diff --git a/.changesets/breaking_lb_next_apollo_otel_default.md b/.changesets/breaking_lb_next_apollo_otel_default.md new file mode 100644 index 0000000000..ae3c4a3d19 --- /dev/null +++ b/.changesets/breaking_lb_next_apollo_otel_default.md @@ -0,0 +1,6 @@ +### Default to Apollo reporting over OTel ([PR #5741](https://github.com/apollographql/router/pull/5741)) + +BREAKING CHANGE: This change will cause traces in Apollo Studio to appear differently + + +By [@lennyburdette](https://github.com/lennyburdette) in https://github.com/apollographql/router/pull/5741 \ No newline at end of file diff --git a/.changesets/config_apollo_reporting_defaults.md b/.changesets/config_apollo_reporting_defaults.md new file mode 100644 index 0000000000..7e3457098b --- /dev/null +++ b/.changesets/config_apollo_reporting_defaults.md @@ -0,0 +1,7 @@ +### Changed Apollo reporting defaults ([PR #5975](https://github.com/apollographql/router/pull/5975)) + +Changed the defaults of the Apollo metrics reporting defaults. +* `telemetry.apollo.signature_normalization_algorithm` now defaults to `enhanced` +* `telemetry.apollo.metrics_reference_mode` now defaults to `extended` + +By [@bonnici](https://github.com/bonnici) in https://github.com/apollographql/router/pull/5975 diff --git a/.changesets/config_garypen_promote_otel.md b/.changesets/config_garypen_promote_otel.md new file mode 100644 index 0000000000..2bab047c74 --- /dev/null +++ b/.changesets/config_garypen_promote_otel.md @@ -0,0 +1,11 @@ +### promote experimental_otlp_tracing_sampler from experimental ([PR #6070](https://github.com/apollographql/router/pull/6070)) + +The router's otlp tracing sampler feature that was previously [experimental](https://www.apollographql.com/docs/resources/product-launch-stages/#experimental-features) is now [generally available](https://www.apollographql.com/docs/resources/product-launch-stages/#general-availability). + +If you used its experimental configuration, you should migrate to the new configuration option: + +* `telemetry.apollo.experimental_otlp_tracing_sampler` is now `telemetry.apollo.otlp_tracing_sampler` + +The experimental configuration option is now deprecated. It remains functional but will log warnings. + +By [@garypen](https://github.com/garypen) in https://github.com/apollographql/router/pull/6070 diff --git a/.changesets/fix_error_locations.md b/.changesets/fix_error_locations.md new file mode 100644 index 0000000000..f975216bab --- /dev/null +++ b/.changesets/fix_error_locations.md @@ -0,0 +1,33 @@ +### Gracefully handle subgraph response with `-1` values inside error locations ([PR #5633](https://github.com/apollographql/router/pull/5633)) + +[GraphQL specification requires](https://spec.graphql.org/draft/#sel-GAPHRPFCCaCGX5zM) that both "line" and "column" are positive numbers. +However GraphQL Java and GraphQL Kotlin use `{ "line": -1, "column": -1 }` value if they can't determine error location inside query. + +This change makes Router to gracefully handle such responses by ignoring such invalid locations. + +As an example, if subgraph respond with: +```json +{ + "data": { "topProducts": null }, + "errors": [{ + "message":"Some error on subgraph", + "locations": [ + { "line": -1, "column": -1 }, + ], + "path":["topProducts"] + }] +} +``` + +Router will return following to a client: +```json +{ + "data": { "topProducts": null }, + "errors": [{ + "message":"Some error on subgraph", + "path":["topProducts"] + }] +} +``` + +By [@IvanGoncharov](https://github.com/IvanGoncharov) in https://github.com/apollographql/router/pull/5633 \ No newline at end of file diff --git a/.changesets/maint_renee_migrate_metrics_histograms.md b/.changesets/maint_renee_migrate_metrics_histograms.md new file mode 100644 index 0000000000..b78b99d872 --- /dev/null +++ b/.changesets/maint_renee_migrate_metrics_histograms.md @@ -0,0 +1,5 @@ +### Migrate histogram metrics to `{f,u}64_histogram!` ([PR #6356](https://github.com/apollographql/router/pull/6356)) + +Updates histogram metrics using the legacy `tracing::info!(histogram.*)` syntax to the new metrics macros. + +By [@goto-bus-stop](https://github.com/goto-bus-stop) in https://github.com/apollographql/router/pull/6356 \ No newline at end of file diff --git a/.changesets/maint_renee_migrate_metrics_values.md b/.changesets/maint_renee_migrate_metrics_values.md index 19ab1e9815..2bb0ee2e23 100644 --- a/.changesets/maint_renee_migrate_metrics_values.md +++ b/.changesets/maint_renee_migrate_metrics_values.md @@ -1,5 +1,5 @@ -### Migrate various metrics to OTel instruments ([PR #6476](https://github.com/apollographql/router/pull/6476), [PR #6356](https://github.com/apollographql/router/pull/6356), [PR #6539](https://github.com/apollographql/router/pull/6539)) +### Migrate gauge metrics to OTel instruments ([PR #6476](https://github.com/apollographql/router/pull/6476)) -Various metrics using our legacy mechanism based on the `tracing` crate are migrated to OTel instruments. +Updates gauge metrics using the legacy `tracing::info!(value.*)` syntax to OTel instruments. -By [@goto-bus-stop](https://github.com/goto-bus-stop) in https://github.com/apollographql/router/pull/6476, https://github.com/apollographql/router/pull/6356, https://github.com/apollographql/router/pull/6539 +By [@goto-bus-stop](https://github.com/goto-bus-stop) in https://github.com/apollographql/router/pull/6476 \ No newline at end of file diff --git a/.circleci/config.yml b/.circleci/config.yml index 38cf25ba1e..167ee409ae 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -12,34 +12,34 @@ orbs: executors: amd_linux_build: &amd_linux_build_executor docker: - - image: cimg/base:stable + - image: cimg/base:current resource_class: xlarge environment: CARGO_BUILD_JOBS: 4 RUST_TEST_THREADS: 6 amd_linux_helm: &amd_linux_helm_executor docker: - - image: cimg/base:stable + - image: cimg/base:current resource_class: small amd_linux_test: &amd_linux_test_executor docker: - - image: cimg/base:stable + - image: cimg/base:current - image: cimg/redis:7.4.1 - image: jaegertracing/all-in-one:1.54.0 - image: openzipkin/zipkin:3.4.3 - image: ghcr.io/datadog/dd-apm-test-agent/ddapm-test-agent:v1.17.0 - resource_class: xlarge + resource_class: 2xlarge environment: CARGO_BUILD_JOBS: 4 arm_linux_build: &arm_linux_build_executor - machine: - image: ubuntu-2004:2024.01.1 + docker: + - image: cimg/base:current resource_class: arm.large environment: CARGO_BUILD_JOBS: 8 arm_linux_test: &arm_linux_test_executor - machine: - image: ubuntu-2004:2024.01.1 + docker: + - image: cimg/base:current resource_class: arm.xlarge environment: CARGO_BUILD_JOBS: 8 @@ -82,6 +82,10 @@ parameters: protoc_version: type: string default: "21.8" + # note the cmake version is only used for manual installs, not for installs from a package manager like apt or homebrew + cmake_version: + type: string + default: "3.31.1" nightly: type: boolean default: false @@ -233,9 +237,29 @@ commands: command: | if [[ ! -d "$HOME/.deb" ]]; then mkdir $HOME/.deb - sudo apt-get --download-only -o Dir::Cache="$HOME/.deb" -o Dir::Cache::archives="$HOME/.deb" install libssl-dev libdw-dev + sudo apt-get update + sudo apt-get --download-only -o Dir::Cache="$HOME/.deb" -o Dir::Cache::archives="$HOME/.deb" install build-essential libssl-dev libdw-dev fi sudo dpkg -i $HOME/.deb/*.deb + + - when: + condition: + or: + - equal: [ *windows_build_executor, << parameters.platform >> ] + - equal: [ *windows_test_executor, << parameters.platform >> ] + steps: + - run: + name: Install CMake + command: | + mkdir -p "$HOME/.local" + if [[ ! -f "$HOME/.local/bin/cmake" ]]; then + curl -L https://github.com/Kitware/CMake/releases/download/v<< pipeline.parameters.cmake_version >>/cmake-<< pipeline.parameters.cmake_version >>-windows-x86_64.zip --output cmake.zip + # The zip file has a root directory, so we put it somewhere else first before placing the files in .local + unzip cmake.zip -d /tmp > /dev/null + cp /tmp/cmake-<< pipeline.parameters.cmake_version >>-windows-x86_64/* -R "$HOME/.local" + fi + + cmake --version install_protoc: parameters: platform: @@ -483,7 +507,11 @@ commands: environment: # Use the settings from the "ci" profile in nextest configuration. NEXTEST_PROFILE: ci - command: xtask test --workspace --locked --features ci,hyper_header_limits + # Temporary disable lib backtrace since it crashing on MacOS + # TODO: remove this workaround once we update to Xcode >= 15.1.0 + # See: https://github.com/apollographql/router/pull/5462 + RUST_LIB_BACKTRACE: 0 + command: xtask test --workspace --locked --features ci,snapshot - run: name: Delete large files from cache command: | @@ -696,10 +724,10 @@ jobs: - run: cargo xtask release prepare nightly - run: command: > - cargo xtask dist --target aarch64-apple-darwin --features hyper_header_limits + cargo xtask dist --target aarch64-apple-darwin - run: command: > - cargo xtask dist --target x86_64-apple-darwin --features hyper_header_limits + cargo xtask dist --target x86_64-apple-darwin - run: command: > mkdir -p artifacts @@ -759,7 +787,7 @@ jobs: - run: cargo xtask release prepare nightly - run: command: > - cargo xtask dist --features hyper_header_limits + cargo xtask dist - run: command: > mkdir -p artifacts @@ -902,7 +930,7 @@ jobs: publish_github_release: docker: - - image: cimg/base:stable + - image: cimg/base:current resource_class: small environment: <<: *common_job_environment @@ -1192,3 +1220,4 @@ workflows: - secops-oidc - github-orb git-base-revision: <<#pipeline.git.base_revision>><><> + disabled-signatures: "rules.providers.semgrep.security.javascript.lang.security.detect-insecure-websocket" diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 6d9f87ef81..8730fff398 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -1,8 +1,9 @@ /docs/ @apollographql/docs /.changesets/ @apollographql/docs /apollo-federation/ @dariuszkuc @sachindshinde @goto-bus-stop @SimonSapin @lrlna @TylerBloom @duckki -/apollo-federation/src/sources/connect/json_selection @benjamn +/apollo-federation/src/sources/connect @apollographql/connectors /apollo-router/ @apollographql/polaris @apollographql/atlas +/apollo-router/src/plugins/connectors @apollographql/connectors /apollo-router-benchmarks/ @apollographql/polaris @apollographql/atlas /apollo-router-scaffold/ @apollographql/polaris @apollographql/atlas /examples/ @apollographql/polaris @apollographql/atlas diff --git a/CHANGELOG.md b/CHANGELOG.md index 3c0e4a4608..6c8dd1a1a4 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,47 @@ All notable changes to Router will be documented in this file. This project adheres to [Semantic Versioning v2.0.0](https://semver.org/spec/v2.0.0.html). +# [2.0.0-preview.0] - 2024-10-01 + +Learn more about [migrating from 1.x to 2.0](https://www.apollographql.com/docs/graphos/reference/migration/from-router-v1). + +## 🚀 Features + +### Apollo Connectors Public Preview + +[Apollo Connectors](https://go.apollo.dev/connectors) are a new declarative programming model for GraphQL, allowing you to plug your existing REST services directly into your graph. + +```graphql +type Query { + posts(first: Int): [Post] + @connect( + http: { GET: "https://my.api/posts?limit={$args.first}" } + selection: "$.results { id title body }" + ) +} +``` + +Apollo Connectors are available for Enterprise and free GraphOS Trial accounts. Get started with the [Connectors Quickstart](https://go.apollo.dev/connectors/quickstart) and visit the ["connectors" tag on the community forums](https://community.apollographql.com/tag/connectors) to leave feedback during the preview. + +### Apollo operation usage reporting via OTLP + +The router supports reporting operation usage metrics to GraphOS via OpenTelemetry Protocol (OTLP). + +Prior to version 1.49.0 of the router, all GraphOS reporting was performed using a private tracing format. In v1.49.0, we introduced support for using OTEL to perform this reporting. In v1.x, this is controlled using the experimental_otlp_tracing_sampler flag, and it's disabled by default. + +Now in v2.x, this flag is renamed to otlp_tracing_sampler, and it's enabled by default. + +Learn more about configuring [usage reporting via OTLP](https://www.apollographql.com/docs/graphos/routing/graphos-reporting#usage-reporting-via-opentelemetry-protocol-otlp). + +## 📃 Configuration + +### Metrics reporting defaults + +Default values of some GraphOS reporting metrics have been changed from v1.x to the following in v2.x: + +* `telemetry.apollo.signature_normalization_algorithm` now defaults to `enhanced`. (In v1.x the default is `legacy`.) +* `telemetry.apollo.metrics_reference_mode` now defaults to `extended`. (In v1.x the default is `standard`.) + # [1.59.1] - 2025-01-08 ## 🐛 Fixes @@ -321,7 +362,6 @@ The deprecated metrics will continue to work in the 1.x release line. By [@goto-bus-stop](https://github.com/goto-bus-stop) in https://github.com/apollographql/router/pull/6350 - # [1.58.1] - 2024-12-05 > [!IMPORTANT] @@ -344,8 +384,6 @@ The native query planner now correctly sets two experimental configuration optio By [@goto-bus-stop](https://github.com/goto-bus-stop) in https://github.com/apollographql/router/pull/6316 - - # [1.58.0] - 2024-11-27 > [!IMPORTANT] @@ -652,8 +690,6 @@ This fixes an issue in progressive override where the override labels were not t By [@Geal](https://github.com/Geal) in https://github.com/apollographql/router/pull/6108 - - # [1.57.0] - 2024-10-22 > [!IMPORTANT] diff --git a/Cargo.lock b/Cargo.lock index b8e7a7a518..25882e6826 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -30,26 +30,26 @@ version = "0.1.0" dependencies = [ "anyhow", "apollo-router", - "http 0.2.12", + "http 1.2.0", "serde_json", "tokio", - "tower", + "tower 0.5.2", ] [[package]] name = "addr2line" -version = "0.22.0" +version = "0.24.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6e4503c46a5c0c7844e948c9a4d6acd9f50cccb4de1c48eb9e291ea17470c678" +checksum = "dfbe277e56a376000877090da837660b4427aad530e3028d44e0bffe4f89a1c1" dependencies = [ "gimli", ] [[package]] -name = "adler" -version = "1.0.2" +name = "adler2" +version = "2.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" +checksum = "512761e0bb2578dd7380c6baaa0f4ce03e84f95e960231d1dec8bf4d7d6e2627" [[package]] name = "ahash" @@ -92,9 +92,9 @@ dependencies = [ [[package]] name = "allocator-api2" -version = "0.2.18" +version = "0.2.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5c6cb57a04249c6480766f7f7cef5467412af1490f8d1e243141daddada3264f" +checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923" [[package]] name = "android-tzdata" @@ -119,9 +119,9 @@ checksum = "4b46cbb362ab8752921c97e041f5e366ee6297bd428a31275b9fcf1e380f7299" [[package]] name = "anstream" -version = "0.6.14" +version = "0.6.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "418c75fa768af9c03be99d17643f93f79bbba589895012a80e3452a19ddda15b" +checksum = "8acc5369981196006228e28809f761875c0327210a891e941f4c683b3a99529b" dependencies = [ "anstyle", "anstyle-parse", @@ -134,43 +134,43 @@ dependencies = [ [[package]] name = "anstyle" -version = "1.0.7" +version = "1.0.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "038dfcf04a5feb68e9c60b21c9625a54c2c0616e79b72b0fd87075a056ae1d1b" +checksum = "55cc3b69f167a1ef2e161439aa98aed94e6028e5f9a59be9a6ffb47aef1651f9" [[package]] name = "anstyle-parse" -version = "0.2.4" +version = "0.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c03a11a9034d92058ceb6ee011ce58af4a9bf61491aa7e1e59ecd24bd40d22d4" +checksum = "3b2d16507662817a6a20a9ea92df6652ee4f94f914589377d69f3b21bc5798a9" dependencies = [ "utf8parse", ] [[package]] name = "anstyle-query" -version = "1.1.0" +version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ad186efb764318d35165f1758e7dcef3b10628e26d41a44bc5550652e6804391" +checksum = "79947af37f4177cfead1110013d678905c37501914fba0efea834c3fe9a8d60c" dependencies = [ - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] name = "anstyle-wincon" -version = "3.0.3" +version = "3.0.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61a38449feb7068f52bb06c12759005cf459ee52bb4adc1d5a7c4322d716fb19" +checksum = "2109dbce0e72be3ec00bed26e6a7479ca384ad226efdd66db8fa2e3a38c83125" dependencies = [ "anstyle", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] name = "anyhow" -version = "1.0.86" +version = "1.0.94" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b3d1d046238990b9cf5bcde22a3fb3584ee5cf65fb2765f454ed428c7a0063da" +checksum = "c1fd03a028ef38ba2276dce7e33fcd6369c158a1bca17946c4b1b701891c1ff7" [[package]] name = "apollo-compiler" @@ -181,11 +181,11 @@ dependencies = [ "ahash", "apollo-parser", "ariadne", - "indexmap 2.5.0", + "indexmap 2.7.0", "rowan", "serde", "serde_json_bytes", - "thiserror", + "thiserror 1.0.69", "triomphe", "typed-arena", "uuid", @@ -204,28 +204,36 @@ dependencies = [ [[package]] name = "apollo-federation" -version = "1.59.1" +version = "2.0.0-preview.4" dependencies = [ "apollo-compiler", "derive_more", "either", "hashbrown 0.15.2", "hex", - "indexmap 2.5.0", + "http 1.2.0", + "indexmap 2.7.0", "insta", "itertools 0.13.0", + "line-col", "multimap 0.10.0", "nom", + "nom_locate", + "once_cell", "petgraph", + "pretty_assertions", "regex", "ron", + "rstest", "serde", + "serde_json", "serde_json_bytes", "sha1", + "shape", "strum 0.26.3", "strum_macros 0.26.4", "tempfile", - "thiserror", + "thiserror 1.0.69", "time", "tracing", "url", @@ -251,12 +259,12 @@ checksum = "c8f05cbc7da3c2e3bb2f86e985aad5f72571d2e2cd26faf8caa7782131576f84" dependencies = [ "memchr", "rowan", - "thiserror", + "thiserror 1.0.69", ] [[package]] name = "apollo-router" -version = "1.59.1" +version = "2.0.0-preview.4" dependencies = [ "access-json", "ahash", @@ -276,7 +284,9 @@ dependencies = [ "aws-sigv4", "aws-smithy-runtime-api", "aws-types", - "axum", + "axum 0.8.1", + "axum-extra", + "axum-server", "base64 0.22.1", "basic-toml", "bloomfilter", @@ -297,6 +307,7 @@ dependencies = [ "displaydoc", "ecdsa", "flate2", + "form_urlencoded", "fred", "futures", "futures-test", @@ -306,14 +317,18 @@ dependencies = [ "hickory-resolver", "hmac", "http 0.2.12", - "http-body 0.4.6", - "http-serde", + "http 1.2.0", + "http-body 1.0.1", + "http-body-util", + "http-serde 1.1.3", + "http-serde 2.1.1", "humantime", "humantime-serde", - "hyper", - "hyper-rustls", + "hyper 1.5.1", + "hyper-rustls 0.27.3", + "hyper-util", "hyperlocal", - "indexmap 2.5.0", + "indexmap 2.7.0", "insta", "itertools 0.13.0", "itoa", @@ -330,7 +345,7 @@ dependencies = [ "memchr", "mime", "mockall", - "multer", + "multer 3.1.0", "multimap 0.9.1", "notify", "nu-ansi-term 0.50.1", @@ -353,6 +368,7 @@ dependencies = [ "parking_lot", "paste", "pin-project-lite", + "pretty_assertions", "prometheus", "prost 0.12.6", "prost-types 0.12.6", @@ -360,14 +376,16 @@ dependencies = [ "rand 0.8.5", "rand_core 0.6.4", "regex", - "reqwest", + "reqwest 0.11.27", + "reqwest 0.12.9", "rhai", "rmp", "rstack", + "rstest", "rust-embed", - "rustls", - "rustls-native-certs", - "rustls-pemfile", + "rustls 0.23.19", + "rustls-native-certs 0.8.1", + "rustls-pemfile 2.2.0", "ryu", "schemars", "semver", @@ -388,17 +406,18 @@ dependencies = [ "sysinfo", "tempfile", "test-log", - "thiserror", + "thiserror 1.0.69", "tikv-jemallocator", "time", "tokio", - "tokio-rustls", + "tokio-rustls 0.26.1", "tokio-stream", - "tokio-tungstenite", + "tokio-tungstenite 0.26.1", "tokio-util", + "tonic 0.12.3", "tonic 0.9.2", "tonic-build", - "tower", + "tower 0.5.2", "tower-http", "tower-service", "tower-test", @@ -406,7 +425,7 @@ dependencies = [ "tracing-core", "tracing-futures", "tracing-opentelemetry", - "tracing-serde", + "tracing-serde 0.1.3", "tracing-subscriber", "tracing-test", "uname", @@ -423,7 +442,7 @@ dependencies = [ [[package]] name = "apollo-router-benchmarks" -version = "1.59.1" +version = "2.0.0-preview.4" dependencies = [ "apollo-parser", "apollo-router", @@ -434,38 +453,7 @@ dependencies = [ "once_cell", "serde_json", "tokio", - "tower", -] - -[[package]] -name = "apollo-router-scaffold" -version = "1.59.1" -dependencies = [ - "anyhow", - "cargo-scaffold", - "clap", - "copy_dir", - "dircmp", - "regex", - "similar", - "str_inflector", - "tempfile", - "toml", -] - -[[package]] -name = "apollo-router-scaffold-test" -version = "0.1.0" -dependencies = [ - "anyhow", - "apollo-router", - "async-trait", - "schemars", - "serde", - "serde_json", - "tokio", - "tower", - "tracing", + "tower 0.5.2", ] [[package]] @@ -477,16 +465,16 @@ dependencies = [ "apollo-compiler", "apollo-parser", "arbitrary", - "indexmap 2.5.0", + "indexmap 2.7.0", "once_cell", - "thiserror", + "thiserror 1.0.69", ] [[package]] name = "arbitrary" -version = "1.3.2" +version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7d5a26814d8dcb93b0e5a0ff3c6d80a8843bafb21b39e8e18a6f05471870e110" +checksum = "dde20b3d026af13f561bdd0f15edf01fc734f0dafcedbaf42bba506a9517f223" dependencies = [ "derive_arbitrary", ] @@ -508,12 +496,6 @@ dependencies = [ "yansi", ] -[[package]] -name = "ascii" -version = "0.9.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eab1c04a571841102f5345a8fc0f6bb3d31c315dec879b5c6e42e40ce7ffa34e" - [[package]] name = "ascii_utils" version = "0.9.3" @@ -537,13 +519,13 @@ dependencies = [ "anyhow", "apollo-router", "async-trait", - "http 0.2.12", + "http 1.2.0", "schemars", "serde", "serde_json", "serde_json_bytes", "tokio", - "tower", + "tower 0.5.2", ] [[package]] @@ -571,28 +553,30 @@ dependencies = [ [[package]] name = "async-compression" -version = "0.4.11" +version = "0.4.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd066d0b4ef8ecb03a55319dc13aa6910616d0f44008a045bb1835af830abff5" +checksum = "df895a515f70646414f4b45c0b79082783b80552b373a68283012928df56f522" dependencies = [ - "brotli 6.0.0", + "brotli 7.0.0", "flate2", "futures-core", "memchr", "pin-project-lite", "tokio", + "zstd", + "zstd-safe", ] [[package]] name = "async-executor" -version = "1.13.0" +version = "1.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d7ebdfa2ebdab6b1760375fa7d6f382b9f486eac35fc994625a00e89280bdbb7" +checksum = "30ca9a001c1e8ba5149f91a74362376cc6bc5b919d92d988668657bd570bdcec" dependencies = [ "async-task", "concurrent-queue", - "fastrand 2.1.0", - "futures-lite 2.3.0", + "fastrand 2.3.0", + "futures-lite 2.5.0", "slab", ] @@ -604,10 +588,10 @@ checksum = "05b1b633a2115cd122d73b955eadd9916c18c8f510ec9cd1686404c60ad1c29c" dependencies = [ "async-channel 2.3.1", "async-executor", - "async-io 2.3.3", - "async-lock 3.4.0", + "async-io", + "async-lock", "blocking", - "futures-lite 2.3.0", + "futures-lite 2.5.0", "once_cell", ] @@ -627,11 +611,11 @@ dependencies = [ "fast_chemail", "fnv", "futures-util", - "handlebars 4.5.0", + "handlebars", "http 0.2.12", - "indexmap 2.5.0", + "indexmap 2.7.0", "mime", - "multer", + "multer 2.1.0", "num-traits", "once_cell", "pin-project-lite", @@ -641,7 +625,7 @@ dependencies = [ "serde_urlencoded", "static_assertions", "tempfile", - "thiserror", + "thiserror 1.0.69", ] [[package]] @@ -652,7 +636,7 @@ checksum = "01a1c20a2059bffbc95130715b23435a05168c518fba9709c81fa2a38eed990c" dependencies = [ "async-graphql", "async-trait", - "axum", + "axum 0.6.20", "bytes", "futures-util", "serde_json", @@ -671,12 +655,12 @@ dependencies = [ "Inflector", "async-graphql-parser", "darling", - "proc-macro-crate", + "proc-macro-crate 1.3.1", "proc-macro2", "quote", "strum 0.25.0", "syn 2.0.90", - "thiserror", + "thiserror 1.0.69", ] [[package]] @@ -698,57 +682,28 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "323a5143f5bdd2030f45e3f2e0c821c9b1d36e79cf382129c64299c50a7f3750" dependencies = [ "bytes", - "indexmap 2.5.0", + "indexmap 2.7.0", "serde", "serde_json", ] [[package]] name = "async-io" -version = "1.13.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fc5b45d93ef0529756f812ca52e44c221b35341892d3dcc34132ac02f3dd2af" -dependencies = [ - "async-lock 2.8.0", - "autocfg", - "cfg-if", - "concurrent-queue", - "futures-lite 1.13.0", - "log", - "parking", - "polling 2.8.0", - "rustix 0.37.27", - "slab", - "socket2 0.4.10", - "waker-fn", -] - -[[package]] -name = "async-io" -version = "2.3.3" +version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0d6baa8f0178795da0e71bc42c9e5d13261aac7ee549853162e66a241ba17964" +checksum = "43a2b323ccce0a1d90b449fd71f2a06ca7faa7c54c2751f06c9bd851fc061059" dependencies = [ - "async-lock 3.4.0", + "async-lock", "cfg-if", "concurrent-queue", "futures-io", - "futures-lite 2.3.0", + "futures-lite 2.5.0", "parking", - "polling 3.7.2", - "rustix 0.38.34", + "polling", + "rustix", "slab", "tracing", - "windows-sys 0.52.0", -] - -[[package]] -name = "async-lock" -version = "2.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "287272293e9d8c41773cec55e365490fe034813a2f172f502d6ddcf75b2f582b" -dependencies = [ - "event-listener 2.5.3", + "windows-sys 0.59.0", ] [[package]] @@ -764,55 +719,57 @@ dependencies = [ [[package]] name = "async-process" -version = "1.8.1" +version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea6438ba0a08d81529c69b36700fa2f95837bfe3e776ab39cde9c14d9149da88" +checksum = "63255f1dc2381611000436537bbedfe83183faa303a5a0edaf191edef06526bb" dependencies = [ - "async-io 1.13.0", - "async-lock 2.8.0", + "async-channel 2.3.1", + "async-io", + "async-lock", "async-signal", + "async-task", "blocking", "cfg-if", - "event-listener 3.1.0", - "futures-lite 1.13.0", - "rustix 0.38.34", - "windows-sys 0.48.0", + "event-listener 5.3.1", + "futures-lite 2.5.0", + "rustix", + "tracing", ] [[package]] name = "async-signal" -version = "0.2.9" +version = "0.2.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dfb3634b73397aa844481f814fad23bbf07fdb0eabec10f2eb95e58944b1ec32" +checksum = "637e00349800c0bdf8bfc21ebbc0b6524abea702b0da4168ac00d070d0c0b9f3" dependencies = [ - "async-io 2.3.3", - "async-lock 3.4.0", + "async-io", + "async-lock", "atomic-waker", "cfg-if", "futures-core", "futures-io", - "rustix 0.38.34", + "rustix", "signal-hook-registry", "slab", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] name = "async-std" -version = "1.12.0" +version = "1.13.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62565bb4402e926b29953c785397c6dc0391b7b446e45008b0049eb43cec6f5d" +checksum = "c634475f29802fde2b8f0b505b1bd00dfe4df7d4a000f0b36f7671197d5c3615" dependencies = [ "async-channel 1.9.0", "async-global-executor", - "async-io 1.13.0", - "async-lock 2.8.0", + "async-io", + "async-lock", "async-process", "crossbeam-utils", "futures-channel", "futures-core", "futures-io", - "futures-lite 1.13.0", + "futures-lite 2.5.0", "gloo-timers", "kv-log-macro", "log", @@ -826,9 +783,9 @@ dependencies = [ [[package]] name = "async-stream" -version = "0.3.5" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd56dd203fef61ac097dd65721a419ddccb106b2d2b70ba60a6b529f03961a51" +checksum = "0b5a71a6f37880a80d1d7f19efd781e4b5de42c88f0722cc13bcb6cc2cfe8476" dependencies = [ "async-stream-impl", "futures-core", @@ -837,9 +794,9 @@ dependencies = [ [[package]] name = "async-stream-impl" -version = "0.3.5" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "16e62a023e7c117e27523144c5d2459f4397fcc3cab0085af8e2224f643a0193" +checksum = "c7c24de15d275a1ecfd47a380fb4d5ec9bfe0933f309ed5e705b775596a3574d" dependencies = [ "proc-macro2", "quote", @@ -854,9 +811,9 @@ checksum = "8b75356056920673b02621b35afd0f7dda9306d03c79a30f5c56c44cf256e3de" [[package]] name = "async-trait" -version = "0.1.85" +version = "0.1.83" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3f934833b4b7233644e5848f235df3f57ed8c80f1528a26c3dfa13d2147fa056" +checksum = "721cae7de5c34fbb2acd27e21e6d2cf7b886dce0c27388d46c4e6c47ea4318dd" dependencies = [ "proc-macro2", "quote", @@ -869,28 +826,17 @@ version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1505bd5d3d116872e7271a6d4e16d81d0c8570876c8de68093a09ac269d8aac0" -[[package]] -name = "auth-git2" -version = "0.5.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e51bd0e4592409df8631ca807716dc1e5caafae5d01ce0157c966c71c7e49c3c" -dependencies = [ - "dirs", - "git2", - "terminal-prompt", -] - [[package]] name = "autocfg" -version = "1.3.0" +version = "1.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c4b4d0bd25bd0b74681c0ad21497610ce1b7c91b1022cd21c80c6fbdd9476b0" +checksum = "ace50bade8e6234aa140d9a2f552bbee1db4d353f69b8217bc503490fc1a9f26" [[package]] name = "aws-config" -version = "1.5.4" +version = "1.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "caf6cfe2881cb1fcbba9ae946fb9a6480d3b7a714ca84c74925014a89ef3387a" +checksum = "4e95816a168520d72c0e7680c405a5a8c1fb6a035b4bc4b9d7b0de8e1a941697" dependencies = [ "aws-credential-types", "aws-runtime", @@ -905,10 +851,9 @@ dependencies = [ "aws-smithy-types", "aws-types", "bytes", - "fastrand 2.1.0", + "fastrand 2.3.0", "hex", "http 0.2.12", - "hyper", "ring", "time", "tokio", @@ -919,9 +864,9 @@ dependencies = [ [[package]] name = "aws-credential-types" -version = "1.2.0" +version = "1.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e16838e6c9e12125face1c1eff1343c75e3ff540de98ff7ebd61874a89bcfeb9" +checksum = "60e8f6b615cb5fc60a98132268508ad104310f0cfb25a1c22eee76efdf9154da" dependencies = [ "aws-smithy-async", "aws-smithy-runtime-api", @@ -929,21 +874,48 @@ dependencies = [ "zeroize", ] +[[package]] +name = "aws-lc-rs" +version = "1.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f47bb8cc16b669d267eeccf585aea077d0882f4777b1c1f740217885d6e6e5a3" +dependencies = [ + "aws-lc-sys", + "paste", + "zeroize", +] + +[[package]] +name = "aws-lc-sys" +version = "0.23.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2101df3813227bbaaaa0b04cd61c534c7954b22bd68d399b440be937dc63ff7" +dependencies = [ + "bindgen", + "cc", + "cmake", + "dunce", + "fs_extra", + "libc", + "paste", +] + [[package]] name = "aws-runtime" -version = "1.4.0" +version = "1.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f42c2d4218de4dcd890a109461e2f799a1a2ba3bcd2cde9af88360f5df9266c6" +checksum = "b5ac934720fbb46206292d2c75b57e67acfc56fe7dfd34fb9a02334af08409ea" dependencies = [ "aws-credential-types", "aws-sigv4", "aws-smithy-async", "aws-smithy-http", + "aws-smithy-runtime", "aws-smithy-runtime-api", "aws-smithy-types", "aws-types", "bytes", - "fastrand 2.1.0", + "fastrand 2.3.0", "http 0.2.12", "http-body 0.4.6", "once_cell", @@ -1022,9 +994,9 @@ dependencies = [ [[package]] name = "aws-sigv4" -version = "1.2.3" +version = "1.2.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5df1b0fa6be58efe9d4ccc257df0a53b89cd8909e86591a13ca54817c87517be" +checksum = "7d3820e0c08d0737872ff3c7c1f21ebbb6693d832312d6152bf18ef50a5471c2" dependencies = [ "aws-credential-types", "aws-smithy-http", @@ -1035,7 +1007,7 @@ dependencies = [ "hex", "hmac", "http 0.2.12", - "http 1.1.0", + "http 1.2.0", "once_cell", "percent-encoding", "sha2", @@ -1056,9 +1028,9 @@ dependencies = [ [[package]] name = "aws-smithy-http" -version = "0.60.9" +version = "0.60.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d9cd0ae3d97daa0a2bf377a4d8e8e1362cae590c4a1aad0d40058ebca18eb91e" +checksum = "5c8bc3e8fdc6b8d07d976e301c02fe553f72a39b7a9fea820e023268467d7ab6" dependencies = [ "aws-smithy-runtime-api", "aws-smithy-types", @@ -1095,42 +1067,42 @@ dependencies = [ [[package]] name = "aws-smithy-runtime" -version = "1.6.3" +version = "1.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0abbf454960d0db2ad12684a1640120e7557294b0ff8e2f11236290a1b293225" +checksum = "9f20685047ca9d6f17b994a07f629c813f08b5bce65523e47124879e60103d45" dependencies = [ "aws-smithy-async", "aws-smithy-http", "aws-smithy-runtime-api", "aws-smithy-types", "bytes", - "fastrand 2.1.0", - "h2", + "fastrand 2.3.0", + "h2 0.3.26", "http 0.2.12", "http-body 0.4.6", "http-body 1.0.1", "httparse", - "hyper", - "hyper-rustls", + "hyper 0.14.31", + "hyper-rustls 0.24.2", "once_cell", "pin-project-lite", "pin-utils", - "rustls", + "rustls 0.21.12", "tokio", "tracing", ] [[package]] name = "aws-smithy-runtime-api" -version = "1.7.2" +version = "1.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e086682a53d3aa241192aa110fa8dfce98f2f5ac2ead0de84d41582c7e8fdb96" +checksum = "92165296a47a812b267b4f41032ff8069ab7ff783696d217f0994a0d7ab585cd" dependencies = [ "aws-smithy-async", "aws-smithy-types", "bytes", "http 0.2.12", - "http 1.1.0", + "http 1.2.0", "pin-project-lite", "tokio", "tracing", @@ -1139,16 +1111,16 @@ dependencies = [ [[package]] name = "aws-smithy-types" -version = "1.2.4" +version = "1.2.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "273dcdfd762fae3e1650b8024624e7cd50e484e37abdab73a7a706188ad34543" +checksum = "4fbd94a32b3a7d55d3806fe27d98d3ad393050439dd05eb53ece36ec5e3d3510" dependencies = [ "base64-simd", "bytes", "bytes-utils", "futures-core", "http 0.2.12", - "http 1.1.0", + "http 1.2.0", "http-body 0.4.6", "http-body 1.0.1", "http-body-util", @@ -1165,9 +1137,9 @@ dependencies = [ [[package]] name = "aws-smithy-xml" -version = "0.60.8" +version = "0.60.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d123fbc2a4adc3c301652ba8e149bf4bc1d1725affb9784eb20c953ace06bf55" +checksum = "ab0b0166827aa700d3dc519f72f8b3a91c35d0b8d042dc5d643a91e6f80648fc" dependencies = [ "xmlparser", ] @@ -1193,17 +1165,80 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3b829e4e32b91e643de6eafe82b1d90675f5874230191a4ffbc1b336dec4d6bf" dependencies = [ "async-trait", - "axum-core", + "axum-core 0.3.4", "base64 0.21.7", "bitflags 1.3.2", "bytes", "futures-util", - "headers", + "headers 0.3.9", "http 0.2.12", "http-body 0.4.6", - "hyper", + "hyper 0.14.31", + "itoa", + "matchit 0.7.3", + "memchr", + "mime", + "percent-encoding", + "pin-project-lite", + "rustversion", + "serde", + "serde_json", + "serde_path_to_error", + "serde_urlencoded", + "sha1", + "sync_wrapper 0.1.2", + "tokio", + "tokio-tungstenite 0.20.1", + "tower 0.4.13", + "tower-layer", + "tower-service", +] + +[[package]] +name = "axum" +version = "0.7.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "edca88bc138befd0323b20752846e6587272d3b03b0343c8ea28a6f819e6e71f" +dependencies = [ + "async-trait", + "axum-core 0.4.5", + "bytes", + "futures-util", + "http 1.2.0", + "http-body 1.0.1", + "http-body-util", + "itoa", + "matchit 0.7.3", + "memchr", + "mime", + "percent-encoding", + "pin-project-lite", + "rustversion", + "serde", + "sync_wrapper 1.0.2", + "tower 0.5.2", + "tower-layer", + "tower-service", +] + +[[package]] +name = "axum" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6d6fd624c75e18b3b4c6b9caf42b1afe24437daaee904069137d8bab077be8b8" +dependencies = [ + "axum-core 0.5.0", + "base64 0.22.1", + "bytes", + "form_urlencoded", + "futures-util", + "http 1.2.0", + "http-body 1.0.1", + "http-body-util", + "hyper 1.5.1", + "hyper-util", "itoa", - "matchit", + "matchit 0.8.4", "memchr", "mime", "percent-encoding", @@ -1214,12 +1249,13 @@ dependencies = [ "serde_path_to_error", "serde_urlencoded", "sha1", - "sync_wrapper", + "sync_wrapper 1.0.2", "tokio", - "tokio-tungstenite", - "tower", + "tokio-tungstenite 0.26.1", + "tower 0.5.2", "tower-layer", "tower-service", + "tracing", ] [[package]] @@ -1239,19 +1275,100 @@ dependencies = [ "tower-service", ] +[[package]] +name = "axum-core" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09f2bd6146b97ae3359fa0cc6d6b376d9539582c7b4220f041a33ec24c226199" +dependencies = [ + "async-trait", + "bytes", + "futures-util", + "http 1.2.0", + "http-body 1.0.1", + "http-body-util", + "mime", + "pin-project-lite", + "rustversion", + "sync_wrapper 1.0.2", + "tower-layer", + "tower-service", +] + +[[package]] +name = "axum-core" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df1362f362fd16024ae199c1970ce98f9661bf5ef94b9808fee734bc3698b733" +dependencies = [ + "bytes", + "futures-util", + "http 1.2.0", + "http-body 1.0.1", + "http-body-util", + "mime", + "pin-project-lite", + "rustversion", + "sync_wrapper 1.0.2", + "tower-layer", + "tower-service", + "tracing", +] + +[[package]] +name = "axum-extra" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "460fc6f625a1f7705c6cf62d0d070794e94668988b1c38111baeec177c715f7b" +dependencies = [ + "axum 0.8.1", + "axum-core 0.5.0", + "bytes", + "futures-util", + "headers 0.4.0", + "http 1.2.0", + "http-body 1.0.1", + "http-body-util", + "mime", + "pin-project-lite", + "serde", + "tower 0.5.2", + "tower-layer", + "tower-service", +] + +[[package]] +name = "axum-server" +version = "0.7.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56bac90848f6a9393ac03c63c640925c4b7c8ca21654de40d53f55964667c7d8" +dependencies = [ + "bytes", + "futures-util", + "http 1.2.0", + "http-body 1.0.1", + "http-body-util", + "hyper 1.5.1", + "hyper-util", + "pin-project-lite", + "tokio", + "tower 0.4.13", + "tower-service", +] + [[package]] name = "backtrace" -version = "0.3.73" +version = "0.3.74" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5cc23269a4f8976d0a4d2e7109211a419fe30e8d88d677cd60b6bc79c5732e0a" +checksum = "8d82cb332cdfaed17ae235a638438ac4d4839913cc2af585c3c6746e8f8bee1a" dependencies = [ "addr2line", - "cc", "cfg-if", "libc", "miniz_oxide", "object", "rustc-demangle", + "windows-targets 0.52.6", ] [[package]] @@ -1303,6 +1420,29 @@ dependencies = [ "serde", ] +[[package]] +name = "bindgen" +version = "0.69.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "271383c67ccabffb7381723dea0672a673f292304fcb45c01cc648c7a8d58088" +dependencies = [ + "bitflags 2.6.0", + "cexpr", + "clang-sys", + "itertools 0.12.1", + "lazy_static", + "lazycell", + "log", + "prettyplease", + "proc-macro2", + "quote", + "regex", + "rustc-hash 1.1.0", + "shlex", + "syn 2.0.90", + "which", +] + [[package]] name = "bit-set" version = "0.5.3" @@ -1357,15 +1497,15 @@ dependencies = [ "async-channel 2.3.1", "async-task", "futures-io", - "futures-lite 2.3.0", + "futures-lite 2.5.0", "piper", ] [[package]] name = "bloomfilter" -version = "1.0.14" +version = "1.0.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc0bdbcf2078e0ba8a74e1fe0cf36f54054a04485759b61dfd60b174658e9607" +checksum = "c541c70a910b485670304fd420f0eab8f7bde68439db6a8d98819c3d2774d7e2" dependencies = [ "bit-vec 0.7.0", "getrandom 0.2.15", @@ -1385,9 +1525,9 @@ dependencies = [ [[package]] name = "brotli" -version = "6.0.0" +version = "7.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "74f7971dbd9326d58187408ab83117d8ac1bb9c17b085fdacd1cf2f598719b6b" +checksum = "cc97b8f16f944bba54f0433f07e30be199b6dc2bd25937444bbad560bcea29bd" dependencies = [ "alloc-no-stdlib", "alloc-stdlib", @@ -1416,9 +1556,9 @@ dependencies = [ [[package]] name = "bstr" -version = "1.9.1" +version = "1.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "05efc5cfd9110c8416e471df0e96702d58690178e206e61b7173706673c93706" +checksum = "1a68f1f47cdf0ec8ee4b941b2eee2a80cb796db73118c0dd09ac63fbe405be22" dependencies = [ "memchr", "serde", @@ -1435,7 +1575,7 @@ dependencies = [ "quote", "str_inflector", "syn 2.0.90", - "thiserror", + "thiserror 1.0.69", "try_match", ] @@ -1459,9 +1599,9 @@ checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "bytes" -version = "1.6.1" +version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a12916984aab3fa6e39d655a33e09c0071eb36d6ab3aea5c2d78551f1df6d952" +checksum = "325918d6fe32f23b19878fe4b34794ae41fc19ddbe53b10571a4874d44ffd39b" dependencies = [ "serde", ] @@ -1491,32 +1631,10 @@ version = "0.1.0" dependencies = [ "anyhow", "apollo-router", - "http 0.2.12", + "http 1.2.0", "serde_json", "tokio", - "tower", -] - -[[package]] -name = "cargo-scaffold" -version = "0.14.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ad9211604c79bf86afd55f798b3c105607f87bd08a9edbf71b22785b0d53f851" -dependencies = [ - "anyhow", - "auth-git2", - "clap", - "console", - "dialoguer", - "git2", - "globset", - "handlebars 5.1.2", - "indicatif", - "md5", - "serde", - "shell-words", - "toml", - "walkdir", + "tower 0.5.2", ] [[package]] @@ -1527,12 +1645,22 @@ checksum = "37b2a672a2cb129a2e41c10b1224bb368f9f37a2b16b612598138befd7b37eb5" [[package]] name = "cc" -version = "1.1.5" +version = "1.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "324c74f2155653c90b04f25b2a47a8a631360cb908f92a772695f430c7e31052" +checksum = "27f657647bcff5394bf56c7317665bbf790a137a50eaaa5c6bfbb9e27a518f2d" dependencies = [ "jobserver", "libc", + "shlex", +] + +[[package]] +name = "cexpr" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6fac387a98bb7c37292057cffc56d62ecb629900026402633ae9160df93a8766" +dependencies = [ + "nom", ] [[package]] @@ -1541,11 +1669,17 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" +[[package]] +name = "cfg_aliases" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" + [[package]] name = "chrono" -version = "0.4.38" +version = "0.4.39" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a21f936df1771bf62b77f047b726c4625ff2e8aa607c01ec06e5a05bd8463401" +checksum = "7e36cc9d416881d2e24f9a963be5fb1cd90966419ac844274161d10488b3e825" dependencies = [ "android-tzdata", "iana-time-zone", @@ -1592,11 +1726,22 @@ dependencies = [ "half", ] +[[package]] +name = "clang-sys" +version = "1.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0b023947811758c97c59bf9d1c188fd619ad4718dcaa767947df1cadb14f39f4" +dependencies = [ + "glob", + "libc", + "libloading", +] + [[package]] name = "clap" -version = "4.5.9" +version = "4.5.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "64acc1846d54c1fe936a78dc189c34e28d3f5afc348403f28ecf53660b9b8462" +checksum = "3135e7ec2ef7b10c6ed8950f0f792ed96ee093fa088608f1c76e569722700c84" dependencies = [ "clap_builder", "clap_derive", @@ -1604,9 +1749,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.9" +version = "4.5.23" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6fb8393d67ba2e7bfaf28a23458e4e2b543cc73a99595511eb207fdb8aede942" +checksum = "30582fc632330df2bd26877bde0c1f4470d57c582bbc070376afcd04d8cb4838" dependencies = [ "anstream", "anstyle", @@ -1616,9 +1761,9 @@ dependencies = [ [[package]] name = "clap_derive" -version = "4.5.8" +version = "4.5.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2bac35c6dafb060fd4d275d9a4ffae97917c13a6327903a8be2153cd964f7085" +checksum = "4ac6a0c7b1a9e9a5186361f67dfa1b88213572f427fb9ab038efb2bd8c582dab" dependencies = [ "heck 0.5.0", "proc-macro2", @@ -1628,27 +1773,33 @@ dependencies = [ [[package]] name = "clap_lex" -version = "0.7.1" +version = "0.7.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f46ad14479a25103f283c0f10005961cf086d8dc42205bb44c46ac563475dca6" + +[[package]] +name = "cmake" +version = "0.1.52" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b82cf0babdbd58558212896d1a4272303a57bdb245c2bf1147185fb45640e70" +checksum = "c682c223677e0e5b6b7f63a64b9351844c3f1b1678a68b7ee617e30fb082620e" +dependencies = [ + "cc", +] [[package]] name = "colorchoice" -version = "1.0.1" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b6a852b24ab71dffc585bcb46eaf7959d175cb865a7152e35b348d1b2960422" +checksum = "5b63caa9aa9397e2d9480a9b13673856c78d8ac123288526c37d7839f2a86990" [[package]] name = "combine" -version = "3.8.1" +version = "4.6.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da3da6baa321ec19e1cc41d31bf599f00c783d0517095cdaf0332e3fe8d20680" +checksum = "ba5a308b75df32fe02788e748662718f03fde005016435c444eea572398219fd" dependencies = [ - "ascii", - "byteorder", - "either", + "bytes", "memchr", - "unreachable", ] [[package]] @@ -1726,8 +1877,8 @@ dependencies = [ "anyhow", "apollo-router", "async-trait", - "http 0.2.12", - "tower", + "http 1.2.0", + "tower 0.5.2", "tracing", ] @@ -1749,12 +1900,9 @@ dependencies = [ [[package]] name = "cookie-factory" -version = "0.3.3" +version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9885fa71e26b8ab7855e2ec7cae6e9b380edff76cd052e07c683a0319d51b3a2" -dependencies = [ - "futures", -] +checksum = "396de984970346b0d9e93d1415082923c679e5ae5c3ee3dcbd104f5610af126b" [[package]] name = "cookies-to-headers" @@ -1762,26 +1910,27 @@ version = "0.1.0" dependencies = [ "anyhow", "apollo-router", - "http 0.2.12", + "http 1.2.0", "serde_json", "tokio", - "tower", + "tower 0.5.2", ] [[package]] -name = "copy_dir" -version = "0.1.3" +name = "core-foundation" +version = "0.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "543d1dd138ef086e2ff05e3a48cf9da045da2033d16f8538fd76b86cd49b2ca3" +checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" dependencies = [ - "walkdir", + "core-foundation-sys", + "libc", ] [[package]] name = "core-foundation" -version = "0.9.4" +version = "0.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91e195e091a93c46f7102ec7818a2aa394e1e1771c3ab4825963fa03e45afb8f" +checksum = "b55271e5c8c478ad3f38ad24ef34923091e0548492a266d19b3c0b4d82574c63" dependencies = [ "core-foundation-sys", "libc", @@ -1789,9 +1938,9 @@ dependencies = [ [[package]] name = "core-foundation-sys" -version = "0.8.6" +version = "0.8.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "06ea2b9bc92be3c2baa9334a323ebca2d6f074ff852cd1d7b11064035cd3868f" +checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" [[package]] name = "countme" @@ -1801,9 +1950,9 @@ checksum = "7704b5fdd17b18ae31c4c1da5a2e0305a2bf17b5249300a9ee9ed7b72114c636" [[package]] name = "cpufeatures" -version = "0.2.12" +version = "0.2.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "53fe5e26ff1b7aef8bca9c6080520cfb8d9333c7568e1829cef191a9723e5504" +checksum = "16b80225097f2e5ae4e7179dd2266824648f3e2f49d9134d584b76389d31c4c3" dependencies = [ "libc", ] @@ -1889,6 +2038,15 @@ dependencies = [ "crossbeam-utils", ] +[[package]] +name = "crossbeam-queue" +version = "0.3.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df0346b5d5e76ac2fe4e327c5fd1118d6be7c51dfb18f9b7922923f287471e35" +dependencies = [ + "crossbeam-utils", +] + [[package]] name = "crossbeam-utils" version = "0.8.20" @@ -2041,9 +2199,9 @@ dependencies = [ [[package]] name = "derive_arbitrary" -version = "1.3.2" +version = "1.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67e77553c4162a157adbf834ebae5b415acbecbeafc7a74b0e886657506a7611" +checksum = "30542c1ad912e0e3d22a1935c290e12e8a29d704a420177a31faad4a601a0800" dependencies = [ "proc-macro2", "quote", @@ -2073,25 +2231,12 @@ dependencies = [ "lazy_static", "mintex", "parking_lot", - "rustc-hash", + "rustc-hash 1.1.0", "serde", "serde_json", "thousands", ] -[[package]] -name = "dialoguer" -version = "0.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "658bce805d770f407bc62102fca7c2c64ceef2fbcb2b8bd19d2765ce093980de" -dependencies = [ - "console", - "shell-words", - "tempfile", - "thiserror", - "zeroize", -] - [[package]] name = "diff" version = "0.1.13" @@ -2110,18 +2255,6 @@ dependencies = [ "subtle", ] -[[package]] -name = "dircmp" -version = "0.2.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3ca7fa3ba397980657070e679f412acddb7a372f1793ff68ef0bbe708680f0f" -dependencies = [ - "regex", - "sha2", - "thiserror", - "walkdir", -] - [[package]] name = "dirs" version = "5.0.1" @@ -2162,9 +2295,9 @@ checksum = "1435fa1053d8b2fbbe9be7e97eca7f33d37b28409959813daefc1446a14247f1" [[package]] name = "dunce" -version = "1.0.4" +version = "1.0.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56ce8c6da7551ec6c462cbaf3bfbc75131ebbfa1c944aeaa9dab51ca1c5f0c3b" +checksum = "92773504d58c093f6de2459af4af33faa518c13451eb8f2b5698ed3d36e7c813" [[package]] name = "dw" @@ -2250,11 +2383,11 @@ dependencies = [ [[package]] name = "enum-as-inner" -version = "0.6.0" +version = "0.6.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ffccbb6966c05b32ef8fbac435df276c4ae4d3dc55a8cd0eb9745e6c12f546a" +checksum = "a1e6a265c649f3f5979b601d26f1d05ada116434c87741c9493cb56218f76cbc" dependencies = [ - "heck 0.4.1", + "heck 0.5.0", "proc-macro2", "quote", "syn 2.0.90", @@ -2310,12 +2443,12 @@ dependencies = [ [[package]] name = "errno" -version = "0.3.9" +version = "0.3.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "534c5cf6194dfab3db3242765c03bbe257cf92f22b38f6bc0c58d59108a820ba" +checksum = "33d852cb9b869c2a9b3df2f71a3074817f01e1844f839a144f5fcef059a4eb5d" dependencies = [ "libc", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -2330,17 +2463,6 @@ version = "2.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0" -[[package]] -name = "event-listener" -version = "3.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d93877bcde0eb80ca09131a08d23f0a5c18a620b01db137dba666d18cd9b30c2" -dependencies = [ - "concurrent-queue", - "parking", - "pin-project-lite", -] - [[package]] name = "event-listener" version = "5.3.1" @@ -2354,9 +2476,9 @@ dependencies = [ [[package]] name = "event-listener-strategy" -version = "0.5.2" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0f214dc438f977e6d4e3500aaa277f5ad94ca83fbbd9b1a15713ce2344ccc5a1" +checksum = "3c3e4e0dd3673c1139bf041f3008816d9cf2946bbfac2945c09e523b8d7b05b2" dependencies = [ "event-listener 5.3.1", "pin-project-lite", @@ -2368,10 +2490,10 @@ version = "0.1.0" dependencies = [ "async-graphql", "async-graphql-axum", - "axum", + "axum 0.6.20", "env_logger", "tokio", - "tower", + "tower 0.4.13", ] [[package]] @@ -2381,15 +2503,17 @@ dependencies = [ "anyhow", "apollo-router", "async-trait", + "bytes", "futures", - "http 0.2.12", - "hyper", + "http 1.2.0", + "http-body-util", + "hyper 1.5.1", "multimap 0.9.1", "schemars", "serde", "serde_json", "tokio", - "tower", + "tower 0.5.2", "tracing", ] @@ -2423,9 +2547,9 @@ dependencies = [ [[package]] name = "fastrand" -version = "2.1.0" +version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9fc0510504f03c51ada170672ac806f1f105a88aa97a5281117e1ddc3368e51a" +checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be" [[package]] name = "ff" @@ -2439,27 +2563,27 @@ dependencies = [ [[package]] name = "filetime" -version = "0.2.23" +version = "0.2.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1ee447700ac8aa0b2f2bd7bc4462ad686ba06baa6727ac149a2d6277f0d240fd" +checksum = "35c0522e981e68cbfa8c3f978441a5f34b30b96e146b33cd3359176b50fe8586" dependencies = [ "cfg-if", "libc", - "redox_syscall 0.4.1", - "windows-sys 0.52.0", + "libredox", + "windows-sys 0.59.0", ] [[package]] name = "fixedbitset" -version = "0.5.7" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d674e81391d1e1ab681a28d99df07927c6d4aa5b027d7da16ba32d1d21ecd99" +checksum = "0ce7134b9999ecaf8bcd65542e436736ef32ddca1b3e06094cb6ec5755203b80" [[package]] name = "flate2" -version = "1.0.30" +version = "1.0.35" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f54427cfd1c7829e2a139fcefea601bf088ebca651d2bf53ebc600eac295dae" +checksum = "c936bfdafb507ebbf50b8074c54fa31c5be9a1e7e5f467dd659697041407d07c" dependencies = [ "crc32fast", "miniz_oxide", @@ -2493,10 +2617,10 @@ dependencies = [ "anyhow", "apollo-router", "async-trait", - "http 0.2.12", + "http 1.2.0", "serde_json", "tokio", - "tower", + "tower 0.5.2", "tracing", ] @@ -2506,10 +2630,10 @@ version = "0.1.0" dependencies = [ "anyhow", "apollo-router", - "http 0.2.12", + "http 1.2.0", "serde_json", "tokio", - "tower", + "tower 0.5.2", ] [[package]] @@ -2566,34 +2690,51 @@ checksum = "6c2141d6d6c8512188a7891b4b01590a45f6dac67afb4f255c4124dbb86d4eaa" [[package]] name = "fred" -version = "7.1.2" +version = "9.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b99c2b48934cd02a81032dd7428b7ae831a27794275bc94eba367418db8a9e55" +checksum = "3cdd5378252ea124b712e0ac55147d26ae3af575883b34b8423091a4c719606b" dependencies = [ "arc-swap", "async-trait", "bytes", "bytes-utils", + "crossbeam-queue", "float-cmp", + "fred-macros", "futures", - "lazy_static", "log", "parking_lot", "rand 0.8.5", "redis-protocol", - "rustls", - "rustls-native-certs", - "rustls-webpki", + "rustls 0.23.19", + "rustls-native-certs 0.7.3", "semver", - "socket2 0.5.7", + "socket2", "tokio", - "tokio-rustls", + "tokio-rustls 0.26.1", "tokio-stream", "tokio-util", "url", "urlencoding", ] +[[package]] +name = "fred-macros" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1458c6e22d36d61507034d5afecc64f105c1d39712b7ac6ec3b352c423f715cc" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.90", +] + +[[package]] +name = "fs_extra" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "42703706b716c37f96a77aea830392ad231f44c9e9a67872fa5548707e11b11c" + [[package]] name = "fsio" version = "0.4.0" @@ -2605,9 +2746,9 @@ dependencies = [ [[package]] name = "futures" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "645c6916888f6cb6350d2550b80fb63e734897a8498abe35cfb732b6487804b0" +checksum = "65bc07b1a8bc7c85c5f2e110c476c7389b4554ba72af57d8445ea63a576b0876" dependencies = [ "futures-channel", "futures-core", @@ -2620,9 +2761,9 @@ dependencies = [ [[package]] name = "futures-channel" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eac8f7d7865dcb88bd4373ab671c8cf4508703796caa2b1985a9ca867b3fcb78" +checksum = "2dff15bf788c671c1934e366d07e30c1814a8ef514e1af724a602e8a2fbe1b10" dependencies = [ "futures-core", "futures-sink", @@ -2630,15 +2771,15 @@ dependencies = [ [[package]] name = "futures-core" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dfc6580bb841c5a68e9ef15c77ccc837b40a7504914d52e47b8b0e9bbda25a1d" +checksum = "05f29059c0c2090612e8d742178b0580d2dc940c837851ad723096f87af6663e" [[package]] name = "futures-executor" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a576fc72ae164fca6b9db127eaa9a9dda0d61316034f33a0a0d4eda41f02b01d" +checksum = "1e28d1d997f585e54aebc3f97d39e72338912123a67330d723fdbb564d646c9f" dependencies = [ "futures-core", "futures-task", @@ -2648,9 +2789,9 @@ dependencies = [ [[package]] name = "futures-io" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a44623e20b9681a318efdd71c299b6b222ed6f231972bfe2f224ebad6311f0c1" +checksum = "9e5c1b78ca4aae1ac06c48a526a655760685149f0d465d21f37abfe57ce075c6" [[package]] name = "futures-lite" @@ -2669,11 +2810,11 @@ dependencies = [ [[package]] name = "futures-lite" -version = "2.3.0" +version = "2.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "52527eb5074e35e9339c6b4e8d12600c7128b68fb25dcb9fa9dec18f7c25f3a5" +checksum = "cef40d21ae2c515b51041df9ed313ed21e572df340ea58a922a0aefe7e8891a1" dependencies = [ - "fastrand 2.1.0", + "fastrand 2.3.0", "futures-core", "futures-io", "parking", @@ -2682,9 +2823,9 @@ dependencies = [ [[package]] name = "futures-macro" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87750cf4b7a4c0625b1529e4c543c2182106e4dedc60a2a6455e00d212c489ac" +checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" dependencies = [ "proc-macro2", "quote", @@ -2693,21 +2834,21 @@ dependencies = [ [[package]] name = "futures-sink" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9fb8e00e87438d937621c1c6269e53f536c14d3fbd6a042bb24879e57d474fb5" +checksum = "e575fab7d1e0dcb8d0c7bcf9a63ee213816ab51902e6d244a95819acacf1d4f7" [[package]] name = "futures-task" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38d84fa142264698cdce1a9f9172cf383a0c82de1bddcf3092901442c4097004" +checksum = "f90f7dce0722e95104fcb095585910c0977252f286e354b5e3bd38902cd99988" [[package]] name = "futures-test" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce388237b32ac42eca0df1ba55ed3bbda4eaf005d7d4b5dbc0b20ab962928ac9" +checksum = "5961fb6311645f46e2cdc2964a8bfae6743fd72315eaec181a71ae3eb2467113" dependencies = [ "futures-core", "futures-executor", @@ -2717,7 +2858,6 @@ dependencies = [ "futures-task", "futures-util", "pin-project", - "pin-utils", ] [[package]] @@ -2728,9 +2868,9 @@ checksum = "f288b0a4f20f9a56b5d1da57e2227c661b7b16168e2f72365f57b63326e29b24" [[package]] name = "futures-util" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d6401deb83407ab3da39eba7e33987a73c3df0c82b4bb5813ee871c19c41d48" +checksum = "9fa08315bb612088cc391249efdc3bc77536f16c91f6cf495e6fbe85b20a4a81" dependencies = [ "futures-channel", "futures-core", @@ -2792,24 +2932,9 @@ dependencies = [ [[package]] name = "gimli" -version = "0.29.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "40ecd4077b5ae9fd2e9e169b102c6c330d0605168eb0e8bf79952b256dbefffd" - -[[package]] -name = "git2" -version = "0.18.3" +version = "0.31.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "232e6a7bfe35766bf715e55a88b39a700596c0ccfd88cd3680b4cdb40d66ef70" -dependencies = [ - "bitflags 2.6.0", - "libc", - "libgit2-sys", - "log", - "openssl-probe", - "openssl-sys", - "url", -] +checksum = "07e28edb80900c19c28f1072f2e8aeca7fa06b23cd4169cefe1af5aa3260783f" [[package]] name = "glob" @@ -2819,22 +2944,22 @@ checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" [[package]] name = "globset" -version = "0.4.14" +version = "0.4.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "57da3b9b5b85bd66f31093f8c408b90a74431672542466497dcbdfdc02034be1" +checksum = "15f1ce686646e7f1e19bf7d5533fe443a45dbfb990e00629110797578b42fb19" dependencies = [ "aho-corasick", "bstr", "log", - "regex-automata 0.4.8", + "regex-automata 0.4.9", "regex-syntax 0.8.5", ] [[package]] name = "gloo-timers" -version = "0.2.6" +version = "0.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b995a66bb87bebce9a0f4a95aed01daca4872c050bfcb21653361c03bc35e5c" +checksum = "bbb143cf96099802033e0d4f4963b19fd2e0b728bcf076cd9cf7f6634f092994" dependencies = [ "futures-channel", "futures-core", @@ -2853,12 +2978,12 @@ dependencies = [ [[package]] name = "graphql-parser" -version = "0.4.0" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2ebc8013b4426d5b81a4364c419a95ed0b404af2b82e2457de52d9348f0e474" +checksum = "7a818c0d883d7c0801df27be910917750932be279c7bc82dc541b8769425f409" dependencies = [ "combine", - "thiserror", + "thiserror 1.0.69", ] [[package]] @@ -2923,7 +3048,26 @@ dependencies = [ "futures-sink", "futures-util", "http 0.2.12", - "indexmap 2.5.0", + "indexmap 2.7.0", + "slab", + "tokio", + "tokio-util", + "tracing", +] + +[[package]] +name = "h2" +version = "0.4.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ccae279728d634d083c00f6099cb58f01cc99c145b84b8be2f6c74618d79922e" +dependencies = [ + "atomic-waker", + "bytes", + "fnv", + "futures-core", + "futures-sink", + "http 1.2.0", + "indexmap 2.7.0", "slab", "tokio", "tokio-util", @@ -2951,21 +3095,7 @@ dependencies = [ "pest_derive", "serde", "serde_json", - "thiserror", -] - -[[package]] -name = "handlebars" -version = "5.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d08485b96a0e6393e9e4d1b8d48cf74ad6c063cd905eb33f42c1ce3f0377539b" -dependencies = [ - "log", - "pest", - "pest_derive", - "serde", - "serde_json", - "thiserror", + "thiserror 1.0.69", ] [[package]] @@ -2979,10 +3109,6 @@ name = "hashbrown" version = "0.14.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e5274423e17b7c9fc20b6e7e208532f9b19825d82dfd615708b70edd83df41f1" -dependencies = [ - "ahash", - "allocator-api2", -] [[package]] name = "hashbrown" @@ -3013,13 +3139,28 @@ checksum = "06683b93020a07e3dbcf5f8c0f6d40080d725bea7936fc01ad345c01b97dc270" dependencies = [ "base64 0.21.7", "bytes", - "headers-core", + "headers-core 0.2.0", "http 0.2.12", "httpdate", "mime", "sha1", ] +[[package]] +name = "headers" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "322106e6bd0cba2d5ead589ddb8150a13d7c4217cf80d7c4f682ca994ccc6aa9" +dependencies = [ + "base64 0.21.7", + "bytes", + "headers-core 0.3.0", + "http 1.2.0", + "httpdate", + "mime", + "sha1", +] + [[package]] name = "headers-core" version = "0.2.0" @@ -3029,6 +3170,15 @@ dependencies = [ "http 0.2.12", ] +[[package]] +name = "headers-core" +version = "0.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "54b4a22553d4242c49fddb9ba998a99962b5cc6f22cb5a3482bec22522403ce4" +dependencies = [ + "http 1.2.0", +] + [[package]] name = "heck" version = "0.4.1" @@ -3052,7 +3202,7 @@ dependencies = [ "serde", "serde_json", "tokio", - "tower", + "tower 0.5.2", "tracing", ] @@ -3094,7 +3244,7 @@ dependencies = [ "ipnet", "once_cell", "rand 0.8.5", - "thiserror", + "thiserror 1.0.69", "tinyvec", "tokio", "tracing", @@ -3117,7 +3267,7 @@ dependencies = [ "rand 0.8.5", "resolv-conf", "smallvec", - "thiserror", + "thiserror 1.0.69", "tokio", "tracing", ] @@ -3164,9 +3314,9 @@ dependencies = [ [[package]] name = "http" -version = "1.1.0" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "21b9ddb458710bc376481b842f5da65cdf31522de232c1ca8146abce2a358258" +checksum = "f16ca2af56261c99fba8bac40a10251ce8188205a4c448fbb745a2e4daa76fea" dependencies = [ "bytes", "fnv", @@ -3191,7 +3341,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1efedce1fb8e6913f23e0c92de8e62cd5b772a67e7b3946df930a62566c93184" dependencies = [ "bytes", - "http 1.1.0", + "http 1.2.0", ] [[package]] @@ -3202,16 +3352,16 @@ checksum = "793429d76616a256bcb62c2a2ec2bed781c8307e797e2598c50010f2bee2544f" dependencies = [ "bytes", "futures-util", - "http 1.1.0", + "http 1.2.0", "http-body 1.0.1", "pin-project-lite", ] [[package]] name = "http-range-header" -version = "0.3.1" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "add0ab9360ddbd88cfeb3bd9574a1d85cfdfa14db10b3e21d3700dbc4328758f" +checksum = "9171a2ea8a68358193d15dd5d70c1c10a2afc3e7e4c5bc92bc9f025cebd7359c" [[package]] name = "http-serde" @@ -3223,6 +3373,16 @@ dependencies = [ "serde", ] +[[package]] +name = "http-serde" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0f056c8559e3757392c8d091e796416e4649d8e49e88b8d76df6c002f05027fd" +dependencies = [ + "http 1.2.0", + "serde", +] + [[package]] name = "http-types" version = "2.12.0" @@ -3246,9 +3406,9 @@ dependencies = [ [[package]] name = "httparse" -version = "1.9.4" +version = "1.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fcc0b4a115bf80b728eb8ea024ad5bd707b615bfed49e0665b6e0f86fd082d9" +checksum = "7d71d3574edd2771538b901e6549113b4006ece66150fb69c0fb6d9a2adae946" [[package]] name = "httpdate" @@ -3275,27 +3435,48 @@ dependencies = [ [[package]] name = "hyper" version = "0.14.31" -source = "git+https://github.com/apollographql/hyper.git?tag=header-customizations-20241108#c42aec785394b40645a283384838b856beace011" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8c08302e8fa335b151b788c775ff56e7a03ae64ff85c548ee820fecb70356e85" dependencies = [ "bytes", "futures-channel", "futures-core", "futures-util", - "h2", + "h2 0.3.26", "http 0.2.12", "http-body 0.4.6", "httparse", "httpdate", "itoa", "pin-project-lite", - "smallvec", - "socket2 0.5.7", + "socket2", "tokio", "tower-service", "tracing", "want", ] +[[package]] +name = "hyper" +version = "1.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97818827ef4f364230e16705d4706e2897df2bb60617d6ca15d598025a3c481f" +dependencies = [ + "bytes", + "futures-channel", + "futures-util", + "h2 0.4.7", + "http 1.2.0", + "http-body 1.0.1", + "httparse", + "httpdate", + "itoa", + "pin-project-lite", + "smallvec", + "tokio", + "want", +] + [[package]] name = "hyper-rustls" version = "0.24.2" @@ -3304,37 +3485,91 @@ checksum = "ec3efd23720e2049821a693cbc7e65ea87c72f1c58ff2f9522ff332b1491e590" dependencies = [ "futures-util", "http 0.2.12", - "hyper", + "hyper 0.14.31", "log", - "rustls", - "rustls-native-certs", + "rustls 0.21.12", + "rustls-native-certs 0.6.3", "tokio", - "tokio-rustls", + "tokio-rustls 0.24.1", ] [[package]] -name = "hyper-timeout" -version = "0.4.1" +name = "hyper-rustls" +version = "0.27.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbb958482e8c7be4bc3cf272a766a2b0bf1a6755e7a6ae777f017a31d11b13b1" +checksum = "08afdbb5c31130e3034af566421053ab03787c640246a446327f550d11bcb333" dependencies = [ - "hyper", - "pin-project-lite", + "futures-util", + "http 1.2.0", + "hyper 1.5.1", + "hyper-util", + "log", + "rustls 0.23.19", + "rustls-native-certs 0.8.1", + "rustls-pki-types", "tokio", - "tokio-io-timeout", + "tokio-rustls 0.26.1", + "tower-service", + "webpki-roots 0.26.7", ] [[package]] -name = "hyperlocal" -version = "0.8.0" +name = "hyper-timeout" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bbb958482e8c7be4bc3cf272a766a2b0bf1a6755e7a6ae777f017a31d11b13b1" +dependencies = [ + "hyper 0.14.31", + "pin-project-lite", + "tokio", + "tokio-io-timeout", +] + +[[package]] +name = "hyper-timeout" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fafdf7b2b2de7c9784f76e02c0935e65a8117ec3b768644379983ab333ac98c" +checksum = "2b90d566bffbce6a75bd8b09a05aa8c2cb1fabb6cb348f8840c9e4c90a0d83b0" dependencies = [ + "hyper 1.5.1", + "hyper-util", + "pin-project-lite", + "tokio", + "tower-service", +] + +[[package]] +name = "hyper-util" +version = "0.1.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "df2dcfbe0677734ab2f3ffa7fa7bfd4706bfdc1ef393f2ee30184aed67e631b4" +dependencies = [ + "bytes", + "futures-channel", "futures-util", + "http 1.2.0", + "http-body 1.0.1", + "hyper 1.5.1", + "pin-project-lite", + "socket2", + "tokio", + "tower-service", + "tracing", +] + +[[package]] +name = "hyperlocal" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "986c5ce3b994526b3cd75578e62554abd09f0899d6206de48b3e96ab34ccc8c7" +dependencies = [ "hex", - "hyper", - "pin-project", + "http-body-util", + "hyper 1.5.1", + "hyper-util", + "pin-project-lite", "tokio", + "tower-service", ] [[package]] @@ -3527,28 +3762,15 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.5.0" +version = "2.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "68b900aa2f7301e21c36462b170ee99994de34dff39a4a6a528e80e7376d07e5" +checksum = "62f822373a4fe84d4bb149bf54e584a7f4abec90e072ed49cda0edea5b95471f" dependencies = [ "equivalent", - "hashbrown 0.14.5", + "hashbrown 0.15.2", "serde", ] -[[package]] -name = "indicatif" -version = "0.17.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "763a5a8f45087d6bcea4222e7b72c291a054edf80e4ef6efd2a4979878c7bea3" -dependencies = [ - "console", - "instant", - "number_prefix", - "portable-atomic", - "unicode-width", -] - [[package]] name = "infer" version = "0.2.3" @@ -3577,17 +3799,19 @@ dependencies = [ [[package]] name = "insta" -version = "1.39.0" +version = "1.41.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "810ae6042d48e2c9e9215043563a58a80b877bc863228a74cf10c49d4620a6f5" +checksum = "7e9ffc4d4892617c50a928c52b2961cb5174b6fc6ebf252b2fac9d21955c48b8" dependencies = [ "console", + "globset", "lazy_static", "linked-hash-map", "pest", "pest_derive", "serde", "similar", + "walkdir", ] [[package]] @@ -3615,24 +3839,13 @@ dependencies = [ "ghost", ] -[[package]] -name = "io-lifetimes" -version = "1.0.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eae7b9aee968036d54dce06cebaefd919e4472e753296daccd6d344e3e2df0c2" -dependencies = [ - "hermit-abi 0.3.9", - "libc", - "windows-sys 0.48.0", -] - [[package]] name = "ipconfig" version = "0.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "b58db92f96b720de98181bbbe63c831e87005ab460c1bf306eb2622b4707997f" dependencies = [ - "socket2 0.5.7", + "socket2", "widestring", "windows-sys 0.48.0", "winreg", @@ -3640,26 +3853,36 @@ dependencies = [ [[package]] name = "ipnet" -version = "2.9.0" +version = "2.10.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ddc24109865250148c2e0f3d25d4f0f479571723792d3802153c60922a4fb708" + +[[package]] +name = "iri-string" +version = "0.7.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f518f335dce6725a761382244631d86cf0ccb2863413590b31338feb467f9c3" +checksum = "dc0f0a572e8ffe56e2ff4f769f32ffe919282c3916799f8b68688b6030063bea" +dependencies = [ + "memchr", + "serde", +] [[package]] name = "is-terminal" -version = "0.4.12" +version = "0.4.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f23ff5ef2b80d608d61efee834934d862cd92461afc0560dedf493e4c033738b" +checksum = "261f68e344040fbd0edea105bef17c66edf46f984ddb1115b775ce31be948f4b" dependencies = [ - "hermit-abi 0.3.9", + "hermit-abi 0.4.0", "libc", "windows-sys 0.52.0", ] [[package]] name = "is_terminal_polyfill" -version = "1.70.0" +version = "1.70.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8478577c03552c21db0e2724ffb8986a5ce7af88107e6be5d2ee6e158c12800" +checksum = "7943c866cc5cd64cbc25b2e01621d07fa8eb2a1a23160ee81ce38704e97b8ecf" [[package]] name = "iso8601" @@ -3699,25 +3922,26 @@ dependencies = [ [[package]] name = "itoa" -version = "1.0.11" +version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49f1f14873335454500d59611f1cf4a4b0f786f9ac11f4312a78e4cf2566695b" +checksum = "d75a2a4b1b190afb6f5425f10f6a8f959d2ea0b9c2b1d79553551850539e4674" [[package]] name = "jobserver" -version = "0.1.31" +version = "0.1.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2b099aaa34a9751c5bf0878add70444e1ed2dd73f347be99003d4577277de6e" +checksum = "48d1dbcbbeb6a7fec7e059840aa538bd62aaccf972c7346c4d9d2059312853d0" dependencies = [ "libc", ] [[package]] name = "js-sys" -version = "0.3.69" +version = "0.3.76" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "29c15563dc2726973df627357ce0c9ddddbea194836909d655df6a75d2cf296d" +checksum = "6717b6b5b077764fb5966237269cb3c64edddde4b14ce42647430a78ced9e7b7" dependencies = [ + "once_cell", "wasm-bindgen", ] @@ -3731,7 +3955,7 @@ dependencies = [ "pest_derive", "regex", "serde_json", - "thiserror", + "thiserror 1.0.69", ] [[package]] @@ -3794,10 +4018,10 @@ version = "0.1.0" dependencies = [ "anyhow", "apollo-router", - "http 0.2.12", + "http 1.2.0", "serde_json", "tokio", - "tower", + "tower 0.5.2", ] [[package]] @@ -3835,11 +4059,17 @@ version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bbd2bcb4c963f2ddae06a2efc7e9f3591312473c50c6685e1f298068316e66fe" +[[package]] +name = "lazycell" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55" + [[package]] name = "libc" -version = "0.2.167" +version = "0.2.168" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09d6582e104315a817dff97f75133544b2e094ee22447d2acf4a74e189ba06fc" +checksum = "5aaeb2981e0606ca11d79718f8bb01164f1d6ed75080182d3abf017e6d244b6d" [[package]] name = "libfuzzer-sys" @@ -3853,17 +4083,13 @@ dependencies = [ ] [[package]] -name = "libgit2-sys" -version = "0.16.2+1.7.2" +name = "libloading" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ee4126d8b4ee5c9d9ea891dd875cfdc1e9d0950437179104b183d7d8a74d24e8" +checksum = "fc2f4eb4bc735547cfed7c0a4922cbd04a4655978c09b54f1f7b228750664c34" dependencies = [ - "cc", - "libc", - "libssh2-sys", - "libz-sys", - "openssl-sys", - "pkg-config", + "cfg-if", + "windows-targets 0.52.6", ] [[package]] @@ -3874,20 +4100,7 @@ checksum = "c0ff37bd590ca25063e35af745c343cb7a0271906fb7b37e4813e8f79f00268d" dependencies = [ "bitflags 2.6.0", "libc", -] - -[[package]] -name = "libssh2-sys" -version = "0.3.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2dc8a030b787e2119a731f1951d6a773e2280c660f8ec4b0f5e1505a386e71ee" -dependencies = [ - "cc", - "libc", - "libz-sys", - "openssl-sys", - "pkg-config", - "vcpkg", + "redox_syscall", ] [[package]] @@ -3903,16 +4116,10 @@ dependencies = [ ] [[package]] -name = "libz-sys" -version = "1.1.18" +name = "line-col" +version = "0.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c15da26e5af7e25c90b37a2d75cdbf940cf4a55316de9d84c679c9b8bfabf82e" -dependencies = [ - "cc", - "libc", - "pkg-config", - "vcpkg", -] +checksum = "9e69cdf6b85b5c8dce514f694089a2cf8b1a702f6cd28607bcb3cf296c9778db" [[package]] name = "linked-hash-map" @@ -3922,30 +4129,24 @@ checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f" [[package]] name = "linkme" -version = "0.3.27" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ccb76662d78edc9f9bf56360d6919bdacc8b7761227727e5082f128eeb90bbf5" +checksum = "566336154b9e58a4f055f6dd4cbab62c7dc0826ce3c0a04e63b2d2ecd784cdae" dependencies = [ "linkme-impl", ] [[package]] name = "linkme-impl" -version = "0.3.27" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f8dccda732e04fa3baf2e17cf835bfe2601c7c2edafd64417c627dabae3a8cda" +checksum = "edbe595006d355eaf9ae11db92707d4338cd2384d16866131cc1afdbdd35d8d9" dependencies = [ "proc-macro2", "quote", "syn 2.0.90", ] -[[package]] -name = "linux-raw-sys" -version = "0.3.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ef53942eb7bf7ff43a617b3e2c1c4a5ecf5944a7c1bc12d7ee39bbb15e5c1519" - [[package]] name = "linux-raw-sys" version = "0.4.14" @@ -3980,11 +4181,11 @@ dependencies = [ [[package]] name = "lru" -version = "0.12.3" +version = "0.12.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d3262e75e648fce39813cb56ac41f3c3e3f65217ebf3844d818d1f9398cfb0dc" +checksum = "234cf4f4a04dc1f57e24b96cc0cd600cf2af460d4161ac5ecdd0af8e1f3b2a38" dependencies = [ - "hashbrown 0.14.5", + "hashbrown 0.15.2", ] [[package]] @@ -4024,10 +4225,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0e7465ac9959cc2b1404e8e2367b43684a6d13790fe23056cc8c6c5a6b7bcb94" [[package]] -name = "md5" -version = "0.7.0" +name = "matchit" +version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "490cc448043f947bae3cbee9c203358d62dbee0db12107a74be5c30ccfd09771" +checksum = "47e1ffaa40ddd1f3ed91f717a33c8c0ee23fff369e3aa8772b9605cc1d22f4c3" [[package]] name = "mediatype" @@ -4075,11 +4276,11 @@ checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a" [[package]] name = "miniz_oxide" -version = "0.7.4" +version = "0.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b8a240ddb74feaf34a79a7add65a741f3167852fba007066dcac1ca548d89c08" +checksum = "e2d80299ef12ff69b16a84bb182e3b9df68b5a91574d3d4fa6e41b65deec4df1" dependencies = [ - "adler", + "adler2", ] [[package]] @@ -4100,11 +4301,22 @@ dependencies = [ "windows-sys 0.48.0", ] +[[package]] +name = "mio" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2886843bf800fba2e3377cff24abf6379b4c4d5c6681eaf9ea5b0d15090450bd" +dependencies = [ + "libc", + "wasi 0.11.0+wasi-snapshot-preview1", + "windows-sys 0.52.0", +] + [[package]] name = "mockall" -version = "0.13.0" +version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d4c28b3fb6d753d28c20e826cd46ee611fda1cf3cde03a443a974043247c065a" +checksum = "39a6bfcc6c8c7eed5ee98b9c3e33adc726054389233e201c95dab2d41a3839d2" dependencies = [ "cfg-if", "downcast", @@ -4116,9 +4328,9 @@ dependencies = [ [[package]] name = "mockall_derive" -version = "0.13.0" +version = "0.13.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "341014e7f530314e9a1fdbc7400b244efea7122662c96bfa248c31da5bfb2020" +checksum = "25ca3004c2efe9011bd4e461bd8256445052b9615405b4f7ea43fc8ca5c20898" dependencies = [ "cfg-if", "proc-macro2", @@ -4140,15 +4352,26 @@ dependencies = [ "log", "memchr", "mime", - "spin", + "spin 0.9.8", "version_check", ] [[package]] -name = "multimap" -version = "0.8.3" +name = "multer" +version = "3.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5ce46fe64a9d73be07dcbe690a38ce1b293be448fd8ce1e6c1b8062c9f72c6a" +checksum = "83e87776546dc87511aa5ee218730c92b666d7264ab6ed41f9d215af9cd5224b" +dependencies = [ + "bytes", + "encoding_rs", + "futures-util", + "http 1.2.0", + "httparse", + "memchr", + "mime", + "spin 0.9.8", + "version_check", +] [[package]] name = "multimap" @@ -4168,6 +4391,15 @@ dependencies = [ "serde", ] +[[package]] +name = "no-std-compat" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b93853da6d84c2e3c7d730d6473e8817692dd89be387eb01b94d7f108ecb5b8c" +dependencies = [ + "spin 0.5.2", +] + [[package]] name = "nom" version = "7.1.3" @@ -4178,6 +4410,17 @@ dependencies = [ "minimal-lexical", ] +[[package]] +name = "nom_locate" +version = "4.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e3c83c053b0713da60c5b8de47fe8e494fe3ece5267b2f23090a07a053ba8f3" +dependencies = [ + "bytecount", + "memchr", + "nom", +] + [[package]] name = "notify" version = "6.1.1" @@ -4190,7 +4433,7 @@ dependencies = [ "kqueue", "libc", "log", - "mio", + "mio 0.8.11", "walkdir", "windows-sys 0.48.0", ] @@ -4327,26 +4570,23 @@ dependencies = [ "libc", ] -[[package]] -name = "number_prefix" -version = "0.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "830b246a0e5f20af87141b25c173cd1b609bd7779a4617d6ec582abaf90870f3" - [[package]] name = "object" -version = "0.36.1" +version = "0.36.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "081b846d1d56ddfc18fdf1a922e4f6e07a11768ea1b92dec44e42b72712ccfce" +checksum = "aedf0a2d09c573ed1d8d85b30c119153926a2b36dce0ab28322c09a117a4683e" dependencies = [ "memchr", ] [[package]] name = "once_cell" -version = "1.19.0" +version = "1.20.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3fdb12b2476b595f9358c5161aa467c2438859caa136dec86c26fdd2efe17b92" +checksum = "1261fe7e33c73b354eab43b1273a57c8f967d0391e80353e51f764ac02cf6775" +dependencies = [ + "portable-atomic", +] [[package]] name = "oorandom" @@ -4360,10 +4600,10 @@ version = "0.1.0" dependencies = [ "anyhow", "apollo-router", - "http 0.2.12", + "http 1.2.0", "serde_json", "tokio", - "tower", + "tower 0.5.2", ] [[package]] @@ -4372,28 +4612,6 @@ version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" -[[package]] -name = "openssl-src" -version = "300.3.1+3.3.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7259953d42a81bf137fbbd73bd30a8e1914d6dce43c2b90ed575783a22608b91" -dependencies = [ - "cc", -] - -[[package]] -name = "openssl-sys" -version = "0.9.102" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c597637d56fbc83893a35eb0dd04b2b8e7a50c91e64e9493e398b5df4fb45fa2" -dependencies = [ - "cc", - "libc", - "openssl-src", - "pkg-config", - "vcpkg", -] - [[package]] name = "opentelemetry" version = "0.20.0" @@ -4415,7 +4633,7 @@ dependencies = [ "js-sys", "once_cell", "pin-project-lite", - "thiserror", + "thiserror 1.0.69", "urlencoding", ] @@ -4443,9 +4661,9 @@ dependencies = [ "opentelemetry 0.20.0", "opentelemetry-http", "opentelemetry-semantic-conventions", - "reqwest", + "reqwest 0.11.27", "rmp", - "thiserror", + "thiserror 1.0.69", "url", ] @@ -4459,7 +4677,7 @@ dependencies = [ "bytes", "http 0.2.12", "opentelemetry_api", - "reqwest", + "reqwest 0.11.27", ] [[package]] @@ -4471,12 +4689,12 @@ dependencies = [ "async-trait", "futures-core", "futures-util", - "headers", + "headers 0.3.9", "http 0.2.12", "opentelemetry 0.20.0", "opentelemetry-http", "opentelemetry-semantic-conventions", - "reqwest", + "reqwest 0.11.27", "thrift", "tokio", ] @@ -4496,8 +4714,8 @@ dependencies = [ "opentelemetry_api", "opentelemetry_sdk 0.20.0", "prost 0.11.9", - "reqwest", - "thiserror", + "reqwest 0.11.27", + "thiserror 1.0.69", "tokio", "tonic 0.9.2", ] @@ -4577,10 +4795,10 @@ dependencies = [ "opentelemetry 0.20.0", "opentelemetry-http", "opentelemetry-semantic-conventions", - "reqwest", + "reqwest 0.11.27", "serde", "serde_json", - "thiserror", + "thiserror 1.0.69", "typed-builder", ] @@ -4596,7 +4814,7 @@ dependencies = [ "js-sys", "once_cell", "pin-project-lite", - "thiserror", + "thiserror 1.0.69", "urlencoding", ] @@ -4619,7 +4837,7 @@ dependencies = [ "rand 0.8.5", "regex", "serde_json", - "thiserror", + "thiserror 1.0.69", "tokio", "tokio-stream", ] @@ -4638,10 +4856,10 @@ dependencies = [ "glob", "once_cell", "opentelemetry 0.22.0", - "ordered-float 4.2.1", + "ordered-float 4.5.0", "percent-encoding", "rand 0.8.5", - "thiserror", + "thiserror 1.0.69", ] [[package]] @@ -4670,9 +4888,9 @@ dependencies = [ [[package]] name = "ordered-float" -version = "4.2.1" +version = "4.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "19ff2cf528c6c03d9ed653d6c4ce1dc0582dc4af309790ad92f07c1cd551b0be" +checksum = "c65ee1f9701bf938026630b455d5315f490640234259037edb259798b3bcf85e" dependencies = [ "num-traits", ] @@ -4703,9 +4921,9 @@ dependencies = [ [[package]] name = "parking" -version = "2.2.0" +version = "2.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb813b8af86854136c6922af0598d719255ecb2179515e6e7730d468f05c9cae" +checksum = "f38d5652c16fde515bb1ecef450ab0f6a219d619a7274976324d5e377f7dceba" [[package]] name = "parking_lot" @@ -4725,7 +4943,7 @@ checksum = "1e401f977ab385c9e4e3ab30627d6f26d00e2c73eef317493c4ec6d468726cf8" dependencies = [ "cfg-if", "libc", - "redox_syscall 0.5.3", + "redox_syscall", "smallvec", "windows-targets 0.52.6", ] @@ -4763,20 +4981,20 @@ checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" [[package]] name = "pest" -version = "2.7.11" +version = "2.7.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd53dff83f26735fdc1ca837098ccf133605d794cdae66acfc2bfac3ec809d95" +checksum = "8b7cafe60d6cf8e62e1b9b2ea516a089c008945bb5a275416789e7db0bc199dc" dependencies = [ "memchr", - "thiserror", + "thiserror 2.0.10", "ucd-trie", ] [[package]] name = "pest_derive" -version = "2.7.11" +version = "2.7.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a548d2beca6773b1c244554d36fcf8548a8a58e74156968211567250e48e49a" +checksum = "816518421cfc6887a0d62bf441b6ffb4536fcc926395a69e1a85852d4363f57e" dependencies = [ "pest", "pest_generator", @@ -4784,9 +5002,9 @@ dependencies = [ [[package]] name = "pest_generator" -version = "2.7.11" +version = "2.7.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c93a82e8d145725dcbaf44e5ea887c8a869efdcc28706df2d08c69e17077183" +checksum = "7d1396fd3a870fc7838768d171b4616d5c91f6cc25e377b673d714567d99377b" dependencies = [ "pest", "pest_meta", @@ -4797,9 +5015,9 @@ dependencies = [ [[package]] name = "pest_meta" -version = "2.7.11" +version = "2.7.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a941429fea7e08bedec25e4f6785b6ffaacc6b755da98df5ef3e7dcf4a124c4f" +checksum = "e1e58089ea25d717bfd31fb534e4f3afcc2cc569c70de3e239778991ea3b7dea" dependencies = [ "once_cell", "pest", @@ -4808,30 +5026,30 @@ dependencies = [ [[package]] name = "petgraph" -version = "0.6.6" +version = "0.6.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c94eb96835f05ec51384814c9b2daef83f68486f67a0e2e9680e0f698dca808e" +checksum = "b4c5cc86750666a3ed20bdaf5ca2a0344f9c67674cae0515bec2da16fbaa47db" dependencies = [ "fixedbitset", - "indexmap 2.5.0", + "indexmap 2.7.0", "serde", "serde_derive", ] [[package]] name = "pin-project" -version = "1.1.5" +version = "1.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6bf43b791c5b9e34c3d182969b4abb522f9343702850a2e57f460d00d09b4b3" +checksum = "be57f64e946e500c8ee36ef6331845d40a93055567ec57e8fae13efd33759b95" dependencies = [ "pin-project-internal", ] [[package]] name = "pin-project-internal" -version = "1.1.5" +version = "1.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f38a4412a78282e09a2cf38d195ea5420d15ba0602cb375210efbc877243965" +checksum = "3c0f5fad0874fc7abcd4d750e76917eaebbecaa2c20bde22e1dbeeba8beb758c" dependencies = [ "proc-macro2", "quote", @@ -4840,9 +5058,9 @@ dependencies = [ [[package]] name = "pin-project-lite" -version = "0.2.14" +version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bda66fc9667c18cb2758a2ac84d1167245054bcf85d5d1aaa6923f45801bdd02" +checksum = "915a1e146535de9163f3987b8944ed8cf49a18bb0056bcebcdcece385cece4ff" [[package]] name = "pin-utils" @@ -4852,12 +5070,12 @@ checksum = "8b870d8c151b6f2fb93e84a13146138f05d02ed11c7e7c54f8826aaaf7c9f184" [[package]] name = "piper" -version = "0.2.3" +version = "0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ae1d5c74c9876f070d3e8fd503d748c7d974c3e48da8f41350fa5222ef9b4391" +checksum = "96c8c490f422ef9a4efd2cb5b42b76c8613d7e7dfc1caf667b8a3350a5acc066" dependencies = [ "atomic-waker", - "fastrand 2.1.0", + "fastrand 2.3.0", "futures-io", ] @@ -4873,15 +5091,15 @@ dependencies = [ [[package]] name = "pkg-config" -version = "0.3.30" +version = "0.3.31" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d231b230927b5e4ad203db57bbcbee2802f6bce620b1e4a9024a07d94e2907ec" +checksum = "953ec861398dccce10c670dfeaf3ec4911ca479e9c02154b3a215178c5f566f2" [[package]] name = "plotters" -version = "0.3.6" +version = "0.3.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a15b6eccb8484002195a3e44fe65a4ce8e93a625797a063735536fd59cb01cf3" +checksum = "5aeb6f403d7a4911efb1e33402027fc44f29b5bf6def3effcc22d7bb75f2b747" dependencies = [ "num-traits", "plotters-backend", @@ -4892,55 +5110,39 @@ dependencies = [ [[package]] name = "plotters-backend" -version = "0.3.6" +version = "0.3.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "414cec62c6634ae900ea1c56128dfe87cf63e7caece0852ec76aba307cebadb7" +checksum = "df42e13c12958a16b3f7f4386b9ab1f3e7933914ecea48da7139435263a4172a" [[package]] name = "plotters-svg" -version = "0.3.6" +version = "0.3.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "81b30686a7d9c3e010b84284bdd26a29f2138574f52f5eb6f794fc0ad924e705" +checksum = "51bae2ac328883f7acdfea3d66a7c35751187f870bc81f94563733a154d7a670" dependencies = [ "plotters-backend", ] [[package]] name = "polling" -version = "2.8.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b2d323e8ca7996b3e23126511a523f7e62924d93ecd5ae73b333815b0eb3dce" -dependencies = [ - "autocfg", - "bitflags 1.3.2", - "cfg-if", - "concurrent-queue", - "libc", - "log", - "pin-project-lite", - "windows-sys 0.48.0", -] - -[[package]] -name = "polling" -version = "3.7.2" +version = "3.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a3ed00ed3fbf728b5816498ecd316d1716eecaced9c0c8d2c5a6740ca214985b" +checksum = "a604568c3202727d1507653cb121dbd627a58684eb09a820fd746bee38b4442f" dependencies = [ "cfg-if", "concurrent-queue", "hermit-abi 0.4.0", "pin-project-lite", - "rustix 0.38.34", + "rustix", "tracing", - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] name = "portable-atomic" -version = "1.6.0" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7170ef9988bc169ba16dd36a7fa041e5c4cbeb6a35b76d4c03daded371eae7c0" +checksum = "280dc24453071f1b63954171985a0b0d30058d287960968b9b2aca264c8d4ee6" [[package]] name = "powerfmt" @@ -4950,9 +5152,12 @@ checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" [[package]] name = "ppv-lite86" -version = "0.2.17" +version = "0.2.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" +checksum = "77957b295656769bb8ad2b6a6b09d897d94f05c41b069aede1fcdaa675eaea04" +dependencies = [ + "zerocopy", +] [[package]] name = "predicates" @@ -4966,28 +5171,38 @@ dependencies = [ [[package]] name = "predicates-core" -version = "1.0.6" +version = "1.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b794032607612e7abeb4db69adb4e33590fa6cf1149e95fd7cb00e634b92f174" +checksum = "ae8177bee8e75d6846599c6b9ff679ed51e882816914eec639944d7c9aa11931" [[package]] name = "predicates-tree" -version = "1.0.9" +version = "1.0.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "368ba315fb8c5052ab692e68a0eefec6ec57b23a36959c14496f0b0df2c0cecf" +checksum = "41b740d195ed3166cd147c8047ec98db0e22ec019eb8eeb76d343b795304fb13" dependencies = [ "predicates-core", "termtree", ] +[[package]] +name = "pretty_assertions" +version = "1.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3ae130e2f271fbc2ac3a40fb1d07180839cdbbe443c7a27e1e3c13c5cac0116d" +dependencies = [ + "diff", + "yansi", +] + [[package]] name = "prettyplease" -version = "0.1.25" +version = "0.2.25" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c8646e95016a7a6c4adea95bafa8a16baab64b583356217f2c85db4a39d9a86" +checksum = "64d1ec885c64d0457d564db4ec299b2dae3f9c02808b8ad9c3a089c591b18033" dependencies = [ "proc-macro2", - "syn 1.0.109", + "syn 2.0.90", ] [[package]] @@ -5009,6 +5224,15 @@ dependencies = [ "toml_edit 0.19.15", ] +[[package]] +name = "proc-macro-crate" +version = "3.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ecf48c7ca261d60b74ab1a7b20da18bede46776b2e55535cb958eb595c5fa7b" +dependencies = [ + "toml_edit 0.22.22", +] + [[package]] name = "proc-macro2" version = "1.0.92" @@ -5030,7 +5254,7 @@ dependencies = [ "memchr", "parking_lot", "protobuf", - "thiserror", + "thiserror 1.0.69", ] [[package]] @@ -5040,12 +5264,12 @@ dependencies = [ "anyhow", "apollo-router", "async-trait", - "http 0.2.12", + "http 1.2.0", "schemars", "serde", "serde_json", "tokio", - "tower", + "tower 0.5.2", ] [[package]] @@ -5069,25 +5293,33 @@ dependencies = [ ] [[package]] -name = "prost-build" -version = "0.11.9" +name = "prost" +version = "0.13.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "119533552c9a7ffacc21e099c24a0ac8bb19c2a2a3f363de84cd9b844feab270" +checksum = "2c0fef6c4230e4ccf618a35c59d7ede15dea37de8427500f50aff708806e42ec" dependencies = [ "bytes", - "heck 0.4.1", - "itertools 0.10.5", - "lazy_static", + "prost-derive 0.13.4", +] + +[[package]] +name = "prost-build" +version = "0.13.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d0f3e5beed80eb580c68e2c600937ac2c4eedabdfd5ef1e5b7ea4f3fba84497b" +dependencies = [ + "heck 0.5.0", + "itertools 0.13.0", "log", - "multimap 0.8.3", + "multimap 0.10.0", + "once_cell", "petgraph", "prettyplease", - "prost 0.11.9", - "prost-types 0.11.9", + "prost 0.13.4", + "prost-types 0.13.4", "regex", - "syn 1.0.109", + "syn 2.0.90", "tempfile", - "which", ] [[package]] @@ -5117,12 +5349,16 @@ dependencies = [ ] [[package]] -name = "prost-types" -version = "0.11.9" +name = "prost-derive" +version = "0.13.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "213622a1460818959ac1181aaeb2dc9c7f63df720db7d788b3e24eacd1983e13" +checksum = "157c5a9d7ea5c2ed2d9fb8f495b64759f7816c7eaea54ba3978f0d63000162e3" dependencies = [ - "prost 0.11.9", + "anyhow", + "itertools 0.13.0", + "proc-macro2", + "quote", + "syn 2.0.90", ] [[package]] @@ -5134,6 +5370,15 @@ dependencies = [ "prost 0.12.6", ] +[[package]] +name = "prost-types" +version = "0.13.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc2f1e56baa61e93533aebc21af4d2134b70f66275e0fcdf3cbe43d77ff7e8fc" +dependencies = [ + "prost 0.13.4", +] + [[package]] name = "proteus" version = "0.5.0" @@ -5144,7 +5389,7 @@ dependencies = [ "regex", "serde", "serde_json", - "thiserror", + "thiserror 1.0.69", "typetag", ] @@ -5160,11 +5405,63 @@ version = "1.2.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0" +[[package]] +name = "quinn" +version = "0.11.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "62e96808277ec6f97351a2380e6c25114bc9e67037775464979f3037c92d05ef" +dependencies = [ + "bytes", + "pin-project-lite", + "quinn-proto", + "quinn-udp", + "rustc-hash 2.1.0", + "rustls 0.23.19", + "socket2", + "thiserror 2.0.10", + "tokio", + "tracing", +] + +[[package]] +name = "quinn-proto" +version = "0.11.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a2fe5ef3495d7d2e377ff17b1a8ce2ee2ec2a18cde8b6ad6619d65d0701c135d" +dependencies = [ + "bytes", + "getrandom 0.2.15", + "rand 0.8.5", + "ring", + "rustc-hash 2.1.0", + "rustls 0.23.19", + "rustls-pki-types", + "slab", + "thiserror 2.0.10", + "tinyvec", + "tracing", + "web-time", +] + +[[package]] +name = "quinn-udp" +version = "0.5.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "52cd4b1eff68bf27940dd39811292c49e007f4d0b4c357358dc9b0197be6b527" +dependencies = [ + "cfg_aliases", + "libc", + "once_cell", + "socket2", + "tracing", + "windows-sys 0.59.0", +] + [[package]] name = "quote" -version = "1.0.36" +version = "1.0.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0fa76aaf39101c457836aec0ce2316dbdc3ab723cdda1c6bd4e6ad4208acaca7" +checksum = "b5b9d34b8991d19d98081b46eacdd8eb58c6f2b201139f7c5f643cc155a633af" dependencies = [ "proc-macro2", ] @@ -5262,9 +5559,9 @@ dependencies = [ [[package]] name = "redis-protocol" -version = "4.1.0" +version = "5.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c31deddf734dc0a39d3112e73490e88b61a05e83e074d211f348404cee4d2c6" +checksum = "65deb7c9501fbb2b6f812a30d59c0253779480853545153a51d8e9e444ddc99f" dependencies = [ "bytes", "bytes-utils", @@ -5276,31 +5573,22 @@ dependencies = [ [[package]] name = "redox_syscall" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4722d768eff46b75989dd134e5c353f0d6296e5aaa3132e776cbdb56be7731aa" -dependencies = [ - "bitflags 1.3.2", -] - -[[package]] -name = "redox_syscall" -version = "0.5.3" +version = "0.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2a908a6e00f1fdd0dfd9c0eb08ce85126f6d8bbda50017e74bc4a4b7d4a926a4" +checksum = "9b6dfecf2c74bce2466cabf93f6664d6998a69eb21e39f4207930065b27b771f" dependencies = [ "bitflags 2.6.0", ] [[package]] name = "redox_users" -version = "0.4.5" +version = "0.4.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bd283d9651eeda4b2a83a43c1c91b266c40fd76ecd39a50a8c630ae69dc72891" +checksum = "ba009ff324d1fc1b900bd1fdb31564febe58a8ccc8a6fdbb93b543d33b13ca43" dependencies = [ "getrandom 0.2.15", "libredox", - "thiserror", + "thiserror 1.0.69", ] [[package]] @@ -5311,7 +5599,7 @@ checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191" dependencies = [ "aho-corasick", "memchr", - "regex-automata 0.4.8", + "regex-automata 0.4.9", "regex-syntax 0.8.5", ] @@ -5326,9 +5614,9 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.4.8" +version = "0.4.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "368758f23274712b504848e9d5a6f010445cc8b87a7cdb4d7cbee666c1288da3" +checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908" dependencies = [ "aho-corasick", "memchr", @@ -5353,6 +5641,12 @@ version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" +[[package]] +name = "relative-path" +version = "1.9.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ba39f3699c378cd8970968dcbff9c43159ea4cfbd88d43c00b22f2ef10a435d2" + [[package]] name = "reqwest" version = "0.11.27" @@ -5365,29 +5659,28 @@ dependencies = [ "encoding_rs", "futures-core", "futures-util", - "h2", + "h2 0.3.26", "http 0.2.12", "http-body 0.4.6", - "hyper", - "hyper-rustls", + "hyper 0.14.31", + "hyper-rustls 0.24.2", "ipnet", "js-sys", "log", "mime", - "mime_guess", "once_cell", "percent-encoding", "pin-project-lite", - "rustls", - "rustls-native-certs", - "rustls-pemfile", + "rustls 0.21.12", + "rustls-native-certs 0.6.3", + "rustls-pemfile 1.0.4", "serde", "serde_json", "serde_urlencoded", - "sync_wrapper", + "sync_wrapper 0.1.2", "system-configuration", "tokio", - "tokio-rustls", + "tokio-rustls 0.24.1", "tokio-util", "tower-service", "url", @@ -5395,10 +5688,57 @@ dependencies = [ "wasm-bindgen-futures", "wasm-streams", "web-sys", - "webpki-roots", + "webpki-roots 0.25.4", "winreg", ] +[[package]] +name = "reqwest" +version = "0.12.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a77c62af46e79de0a562e1a9849205ffcb7fc1238876e9bd743357570e04046f" +dependencies = [ + "async-compression", + "base64 0.22.1", + "bytes", + "futures-core", + "futures-util", + "http 1.2.0", + "http-body 1.0.1", + "http-body-util", + "hyper 1.5.1", + "hyper-rustls 0.27.3", + "hyper-util", + "ipnet", + "js-sys", + "log", + "mime", + "mime_guess", + "once_cell", + "percent-encoding", + "pin-project-lite", + "quinn", + "rustls 0.23.19", + "rustls-native-certs 0.8.1", + "rustls-pemfile 2.2.0", + "rustls-pki-types", + "serde", + "serde_json", + "serde_urlencoded", + "sync_wrapper 1.0.2", + "tokio", + "tokio-rustls 0.26.1", + "tokio-util", + "tower-service", + "url", + "wasm-bindgen", + "wasm-bindgen-futures", + "wasm-streams", + "web-sys", + "webpki-roots 0.26.7", + "windows-registry", +] + [[package]] name = "resolv-conf" version = "0.7.0" @@ -5427,13 +5767,14 @@ dependencies = [ [[package]] name = "rhai" -version = "1.19.0" +version = "1.20.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61797318be89b1a268a018a92a7657096d83f3ecb31418b9e9c16dcbb043b702" +checksum = "8867cfc57aaf2320b60ec0f4d55603ac950ce852e6ab6b9109aa3d626a4dd7ea" dependencies = [ "ahash", "bitflags 2.6.0", "instant", + "no-std-compat", "num-traits", "once_cell", "rhai_codegen", @@ -5449,10 +5790,10 @@ version = "0.1.0" dependencies = [ "anyhow", "apollo-router", - "http 0.2.12", + "http 1.2.0", "serde_json", "tokio", - "tower", + "tower 0.5.2", ] [[package]] @@ -5461,10 +5802,10 @@ version = "0.1.0" dependencies = [ "anyhow", "apollo-router", - "http 0.2.12", + "http 1.2.0", "serde_json", "tokio", - "tower", + "tower 0.5.2", ] [[package]] @@ -5473,10 +5814,10 @@ version = "0.1.0" dependencies = [ "anyhow", "apollo-router", - "http 0.2.12", + "http 1.2.0", "serde_json", "tokio", - "tower", + "tower 0.5.2", ] [[package]] @@ -5485,10 +5826,10 @@ version = "0.1.0" dependencies = [ "anyhow", "apollo-router", - "http 0.2.12", + "http 1.2.0", "serde_json", "tokio", - "tower", + "tower 0.5.2", ] [[package]] @@ -5497,10 +5838,10 @@ version = "0.1.0" dependencies = [ "anyhow", "apollo-router", - "http 0.2.12", + "http 1.2.0", "serde_json", "tokio", - "tower", + "tower 0.5.2", ] [[package]] @@ -5524,7 +5865,7 @@ dependencies = [ "cfg-if", "getrandom 0.2.15", "libc", - "spin", + "spin 0.9.8", "untrusted", "windows-sys 0.52.0", ] @@ -5562,15 +5903,15 @@ dependencies = [ "apollo-smith", "async-trait", "env_logger", - "http 0.2.12", + "http 1.2.0", "libfuzzer-sys", "log", - "reqwest", + "reqwest 0.11.27", "schemars", "serde", "serde_json", "serde_json_bytes", - "tower", + "tower 0.5.2", ] [[package]] @@ -5581,7 +5922,7 @@ checksum = "417a3a9f582e349834051b8a10c8d71ca88da4211e4093528e36b9845f6b5f21" dependencies = [ "countme", "hashbrown 0.14.5", - "rustc-hash", + "rustc-hash 1.1.0", "text-size", ] @@ -5598,6 +5939,36 @@ dependencies = [ "log", ] +[[package]] +name = "rstest" +version = "0.22.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7b423f0e62bdd61734b67cd21ff50871dfaeb9cc74f869dcd6af974fbcb19936" +dependencies = [ + "futures", + "futures-timer", + "rstest_macros", + "rustc_version", +] + +[[package]] +name = "rstest_macros" +version = "0.22.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c5e1711e7d14f74b12a58411c542185ef7fb7f2e7f8ee6e2940a883628522b42" +dependencies = [ + "cfg-if", + "glob", + "proc-macro-crate 3.2.0", + "proc-macro2", + "quote", + "regex", + "relative-path", + "rustc_version", + "syn 2.0.90", + "unicode-ident", +] + [[package]] name = "rust-embed" version = "8.5.0" @@ -5646,39 +6017,31 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "08d43f7aa6b08d49f382cde6a7982047c3426db949b1424bc4b7ec9ae12c6ce2" [[package]] -name = "rustc_version" -version = "0.4.0" +name = "rustc-hash" +version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" -dependencies = [ - "semver", -] +checksum = "c7fb8039b3032c191086b10f11f319a6e99e1e82889c5cc6046f515c9db1d497" [[package]] -name = "rustix" -version = "0.37.27" +name = "rustc_version" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fea8ca367a3a01fe35e6943c400addf443c0f57670e6ec51196f71a4b8762dd2" +checksum = "cfcb3a22ef46e85b45de6ee7e79d063319ebb6594faafcf1c225ea92ab6e9b92" dependencies = [ - "bitflags 1.3.2", - "errno", - "io-lifetimes", - "libc", - "linux-raw-sys 0.3.8", - "windows-sys 0.48.0", + "semver", ] [[package]] name = "rustix" -version = "0.38.34" +version = "0.38.42" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70dc5ec042f7a43c4a73241207cecc9873a06d45debb38b329f8541d85c2730f" +checksum = "f93dc38ecbab2eb790ff964bb77fa94faf256fd3e73285fd7ba0903b76bedb85" dependencies = [ "bitflags 2.6.0", "errno", "libc", - "linux-raw-sys 0.4.14", - "windows-sys 0.52.0", + "linux-raw-sys", + "windows-sys 0.59.0", ] [[package]] @@ -5689,10 +6052,26 @@ checksum = "3f56a14d1f48b391359b22f731fd4bd7e43c97f3c50eee276f3aa09c94784d3e" dependencies = [ "log", "ring", - "rustls-webpki", + "rustls-webpki 0.101.7", "sct", ] +[[package]] +name = "rustls" +version = "0.23.19" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "934b404430bb06b3fae2cba809eb45a1ab1aecd64491213d7c3301b88393f8d1" +dependencies = [ + "aws-lc-rs", + "log", + "once_cell", + "ring", + "rustls-pki-types", + "rustls-webpki 0.102.8", + "subtle", + "zeroize", +] + [[package]] name = "rustls-native-certs" version = "0.6.3" @@ -5700,9 +6079,34 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a9aace74cb666635c918e9c12bc0d348266037aa8eb599b5cba565709a8dff00" dependencies = [ "openssl-probe", - "rustls-pemfile", + "rustls-pemfile 1.0.4", "schannel", - "security-framework", + "security-framework 2.11.1", +] + +[[package]] +name = "rustls-native-certs" +version = "0.7.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e5bfb394eeed242e909609f56089eecfe5fda225042e8b171791b9c95f5931e5" +dependencies = [ + "openssl-probe", + "rustls-pemfile 2.2.0", + "rustls-pki-types", + "schannel", + "security-framework 2.11.1", +] + +[[package]] +name = "rustls-native-certs" +version = "0.8.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7fcff2dd52b58a8d98a70243663a0d234c4e2b79235637849d15913394a247d3" +dependencies = [ + "openssl-probe", + "rustls-pki-types", + "schannel", + "security-framework 3.0.1", ] [[package]] @@ -5714,6 +6118,24 @@ dependencies = [ "base64 0.21.7", ] +[[package]] +name = "rustls-pemfile" +version = "2.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dce314e5fee3f39953d46bb63bb8a46d40c2f8fb7cc5a3b6cab2bde9721d6e50" +dependencies = [ + "rustls-pki-types", +] + +[[package]] +name = "rustls-pki-types" +version = "1.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "16f1201b3c9a7ee8039bcadc17b7e605e2945b27eee7631788c1bd2b0643674b" +dependencies = [ + "web-time", +] + [[package]] name = "rustls-webpki" version = "0.101.7" @@ -5724,11 +6146,23 @@ dependencies = [ "untrusted", ] +[[package]] +name = "rustls-webpki" +version = "0.102.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "64ca1bc8749bd4cf37b5ce386cc146580777b4e8572c7b97baf22c83f444bee9" +dependencies = [ + "aws-lc-rs", + "ring", + "rustls-pki-types", + "untrusted", +] + [[package]] name = "rustversion" -version = "1.0.17" +version = "1.0.18" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "955d28af4278de8121b7ebeb796b6a45735dc01436d898801014aced2773a3d6" +checksum = "0e819f2bc632f285be6d7cd36e25940d45b2391dd6d9b939e79de557f7014248" [[package]] name = "ryu" @@ -5747,20 +6181,20 @@ dependencies = [ [[package]] name = "scc" -version = "2.1.4" +version = "2.2.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4465c22496331e20eb047ff46e7366455bc01c0c02015c4a376de0b2cd3a1af" +checksum = "66b202022bb57c049555430e11fc22fea12909276a80a4c3d368da36ac1d88ed" dependencies = [ "sdd", ] [[package]] name = "schannel" -version = "0.1.23" +version = "0.1.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fbc91545643bcf3a0bbb6569265615222618bdf33ce4ffbbd13c4bbd4c093534" +checksum = "1f29ebaa345f945cec9fbbc532eb307f0fdad8161f281b6369539c8d84876b3d" dependencies = [ - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -5806,9 +6240,9 @@ dependencies = [ [[package]] name = "sdd" -version = "1.6.0" +version = "3.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8eb0dde0ccd15e337a3cf738a9a38115c6d8e74795d074e73973dad3d229a897" +checksum = "49c1eeaf4b6a87c7479688c6d52b9f1153cedd3c489300564f932b065c6eab95" [[package]] name = "sec1" @@ -5831,7 +6265,20 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02" dependencies = [ "bitflags 2.6.0", - "core-foundation", + "core-foundation 0.9.4", + "core-foundation-sys", + "libc", + "security-framework-sys", +] + +[[package]] +name = "security-framework" +version = "3.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e1415a607e92bec364ea2cf9264646dcce0f91e6d65281bd6f2819cca3bf39c8" +dependencies = [ + "bitflags 2.6.0", + "core-foundation 0.10.0", "core-foundation-sys", "libc", "security-framework-sys", @@ -5839,9 +6286,9 @@ dependencies = [ [[package]] name = "security-framework-sys" -version = "2.11.1" +version = "2.12.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "75da29fe9b9b08fe9d6b22b5b4bcbc75d8db3aa31e639aa56bb62e9d46bfceaf" +checksum = "fa39c7303dc58b5543c94d22c1766b0d31f2ee58306363ea622b10bbc075eaa2" dependencies = [ "core-foundation-sys", "libc", @@ -5882,7 +6329,7 @@ dependencies = [ "quote", "regex", "syn 2.0.90", - "thiserror", + "thiserror 1.0.69", ] [[package]] @@ -5898,12 +6345,13 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.120" +version = "1.0.133" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4e0d21c9a8cae1235ad58a00c11cb40d4b1e5c784f1ef2c537876ed6ffd8b7c5" +checksum = "c7fceb2473b9166b2294ef05efcb65a3db80803f0b03ef86a5fc88a2b85ee377" dependencies = [ - "indexmap 2.5.0", + "indexmap 2.7.0", "itoa", + "memchr", "ryu", "serde", ] @@ -5916,7 +6364,7 @@ checksum = "0ecd92a088fb2500b2f146c9ddc5da9950bb7264d3f00932cd2a6fb369c26c46" dependencies = [ "ahash", "bytes", - "indexmap 2.5.0", + "indexmap 2.7.0", "jsonpath-rust", "regex", "serde", @@ -5939,18 +6387,9 @@ version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c7715380eec75f029a4ef7de39a9200e0a63823176b759d055b613f5a87df6a6" dependencies = [ - "percent-encoding", - "serde", - "thiserror", -] - -[[package]] -name = "serde_spanned" -version = "0.6.8" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87607cb1398ed59d48732e575a4c28a7a8ebf2454b964fe3f224f2afc07909e1" -dependencies = [ + "percent-encoding", "serde", + "thiserror 1.0.69", ] [[package]] @@ -5979,9 +6418,9 @@ dependencies = [ [[package]] name = "serial_test" -version = "3.1.1" +version = "3.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b4b487fe2acf240a021cf57c6b2b4903b1e78ca0ecd862a71b71d2a51fed77d" +checksum = "1b258109f244e1d6891bf1053a55d63a5cd4f8f4c30cf9a1280989f80e7a1fa9" dependencies = [ "futures", "log", @@ -5993,9 +6432,9 @@ dependencies = [ [[package]] name = "serial_test_derive" -version = "3.1.1" +version = "3.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "82fe9db325bcef1fbcde82e078a5cc4efdf787e96b3b9cf45b50b529f2083d67" +checksum = "5d69265a08751de7844521fd15003ae0a888e035773ba05695c5c759a6f89eef" dependencies = [ "proc-macro2", "quote", @@ -6025,19 +6464,26 @@ dependencies = [ ] [[package]] -name = "sharded-slab" -version = "0.1.7" +name = "shape" +version = "0.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6" +checksum = "70179a0773695f4fc0b3e8e59f356064ed1532492e52bcf7e0dfef42934ec4c5" dependencies = [ + "apollo-compiler", + "indexmap 2.7.0", "lazy_static", + "serde_json", + "serde_json_bytes", ] [[package]] -name = "shell-words" -version = "1.1.0" +name = "sharded-slab" +version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "24188a676b6ae68c3b2cb3a01be17fbf7240ce009799bb56d5b1409051e78fde" +checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6" +dependencies = [ + "lazy_static", +] [[package]] name = "shellexpand" @@ -6048,6 +6494,12 @@ dependencies = [ "dirs", ] +[[package]] +name = "shlex" +version = "1.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64" + [[package]] name = "signal-hook-registry" version = "1.4.2" @@ -6069,9 +6521,9 @@ dependencies = [ [[package]] name = "similar" -version = "2.5.0" +version = "2.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa42c91313f1d05da9b26f267f931cf178d4aba455b4c4622dd7355eb80c6640" +checksum = "1de1d4f81173b03af4c0cbed3c898f6bff5b870e4a7f5d6f4057d62a7a4b686e" [[package]] name = "simple_asn1" @@ -6081,7 +6533,7 @@ checksum = "adc4e5204eb1910f40f9cfa375f6f05b68c3abac4b6fd879c8ff5e7ae8a0a085" dependencies = [ "num-bigint", "num-traits", - "thiserror", + "thiserror 1.0.69", "time", ] @@ -6123,23 +6575,19 @@ dependencies = [ [[package]] name = "socket2" -version = "0.4.10" +version = "0.5.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9f7916fc008ca5542385b89a3d3ce689953c143e9304a9bf8beec1de48994c0d" +checksum = "c970269d99b64e60ec3bd6ad27270092a5394c4e309314b18ae3fe575695fbe8" dependencies = [ "libc", - "winapi", + "windows-sys 0.52.0", ] [[package]] -name = "socket2" -version = "0.5.7" +name = "spin" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce305eb0b4296696835b71df73eb912e0f1ffd2556a501fcede6e0c50349191c" -dependencies = [ - "libc", - "windows-sys 0.52.0", -] +checksum = "6e63cff320ae2c57904679ba7cb63280a3dc4613885beafb148ee7bf9aa9042d" [[package]] name = "spin" @@ -6240,7 +6688,7 @@ dependencies = [ "apollo-compiler", "apollo-router", "async-trait", - "tower", + "tower 0.5.2", "tracing", ] @@ -6272,6 +6720,15 @@ version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2047c6ded9c721764247e62cd3b03c09ffc529b2ba5b10ec482ae507a4a70160" +[[package]] +name = "sync_wrapper" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0bf256ce5efdfa370213c1dabab5935a12e49f2c58d15e9eac2870d3b4f27263" +dependencies = [ + "futures-core", +] + [[package]] name = "synstructure" version = "0.13.1" @@ -6295,9 +6752,9 @@ dependencies = [ [[package]] name = "sysinfo" -version = "0.32.0" +version = "0.32.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3b5ae3f4f7d64646c46c4cae4e3f01d1c5d255c7406fdd7c7f999a94e488791" +checksum = "4c33cd241af0f2e9e3b5c32163b873b29956890b5342e6745b917ce9d490f4af" dependencies = [ "core-foundation-sys", "libc", @@ -6314,7 +6771,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ba3a3adc5c275d719af8cb4272ea1c4a6d668a777f37e115f6d11ddbc1c8e0e7" dependencies = [ "bitflags 1.3.2", - "core-foundation", + "core-foundation 0.9.4", "system-configuration-sys", ] @@ -6330,14 +6787,15 @@ dependencies = [ [[package]] name = "tempfile" -version = "3.10.1" +version = "3.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "85b77fafb263dd9d05cbeac119526425676db3784113aa9295c88498cbf8bff1" +checksum = "28cce251fcbc87fac86a866eeb0d6c2d536fc16d06f184bb61aeae11aa4cee0c" dependencies = [ "cfg-if", - "fastrand 2.1.0", - "rustix 0.38.34", - "windows-sys 0.52.0", + "fastrand 2.3.0", + "once_cell", + "rustix", + "windows-sys 0.59.0", ] [[package]] @@ -6349,16 +6807,6 @@ dependencies = [ "winapi-util", ] -[[package]] -name = "terminal-prompt" -version = "0.2.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "572818b3472910acbd5dff46a3413715c18e934b071ab2ba464a7b2c2af16376" -dependencies = [ - "libc", - "winapi", -] - [[package]] name = "termtree" version = "0.4.1" @@ -6403,18 +6851,38 @@ dependencies = [ [[package]] name = "thiserror" -version = "1.0.63" +version = "1.0.69" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b6aaf5339b578ea85b50e080feb250a3e8ae8cfcdff9a461c9ec2904bc923f52" +dependencies = [ + "thiserror-impl 1.0.69", +] + +[[package]] +name = "thiserror" +version = "2.0.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a3ac7f54ca534db81081ef1c1e7f6ea8a3ef428d2fc069097c079443d24124d3" +dependencies = [ + "thiserror-impl 2.0.10", +] + +[[package]] +name = "thiserror-impl" +version = "1.0.69" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0342370b38b6a11b6cc11d6a805569958d54cfa061a29969c3b5ce2ea405724" +checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" dependencies = [ - "thiserror-impl", + "proc-macro2", + "quote", + "syn 2.0.90", ] [[package]] name = "thiserror-impl" -version = "1.0.63" +version = "2.0.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a4558b58466b9ad7ca0f102865eccc95938dca1a74a856f2b57b6629050da261" +checksum = "9e9465d30713b56a37ede7185763c3492a91be2f5fa68d958c44e41ab9248beb" dependencies = [ "proc-macro2", "quote", @@ -6465,11 +6933,12 @@ version = "0.1.0" dependencies = [ "anyhow", "apollo-router", - "http 0.2.12", - "hyper", + "http 1.2.0", + "http-body-util", + "hyper 0.14.31", "serde_json", "tokio", - "tower", + "tower 0.5.2", ] [[package]] @@ -6571,21 +7040,20 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.38.1" +version = "1.42.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb2caba9f80616f438e09748d5acda951967e1ea58508ef53d9c6402485a46df" +checksum = "5cec9b21b0450273377fc97bd4c33a8acffc8c996c987a7c5b319a0083707551" dependencies = [ "backtrace", "bytes", "libc", - "mio", - "num_cpus", + "mio 1.0.3", "parking_lot", "pin-project-lite", "signal-hook-registry", - "socket2 0.5.7", + "socket2", "tokio-macros", - "windows-sys 0.48.0", + "windows-sys 0.52.0", ] [[package]] @@ -6600,9 +7068,9 @@ dependencies = [ [[package]] name = "tokio-macros" -version = "2.3.0" +version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f5ae998a069d4b5aba8ee9dad856af7d520c3699e6159b185c2acd48155d39a" +checksum = "693d596312e88961bc67d7f1f97af8a70227d9f90c31bba5806eec004978d752" dependencies = [ "proc-macro2", "quote", @@ -6615,15 +7083,25 @@ version = "0.24.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081" dependencies = [ - "rustls", + "rustls 0.21.12", + "tokio", +] + +[[package]] +name = "tokio-rustls" +version = "0.26.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5f6d0975eaace0cf0fcadee4e4aaa5da15b5c079146f2cffb67c113be122bf37" +dependencies = [ + "rustls 0.23.19", "tokio", ] [[package]] name = "tokio-stream" -version = "0.1.15" +version = "0.1.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "267ac89e0bec6e691e5813911606935d77c476ff49024f98abcea3e7b15e37af" +checksum = "eca58d7bba4a75707817a2c44174253f9236b2d5fbd055602e9d5c07c139a047" dependencies = [ "futures-core", "pin-project-lite", @@ -6652,18 +7130,31 @@ checksum = "212d5dcb2a1ce06d81107c3d0ffa3121fe974b73f068c8282cb1c32328113b6c" dependencies = [ "futures-util", "log", - "rustls", - "rustls-native-certs", "tokio", - "tokio-rustls", - "tungstenite", + "tungstenite 0.20.1", +] + +[[package]] +name = "tokio-tungstenite" +version = "0.26.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "be4bf6fecd69fcdede0ec680aaf474cdab988f9de6bc73d3758f0160e3b7025a" +dependencies = [ + "futures-util", + "log", + "rustls 0.23.19", + "rustls-native-certs 0.8.1", + "rustls-pki-types", + "tokio", + "tokio-rustls 0.26.1", + "tungstenite 0.26.1", ] [[package]] name = "tokio-util" -version = "0.7.11" +version = "0.7.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9cf6b47b3771c49ac75ad09a6162f53ad4b8088b76ac60e8ec1455b31a189fe1" +checksum = "d7fcaa8d55a2bdd6b83ace262b016eca0d79ee02818c5c1bcdf0305114081078" dependencies = [ "bytes", "futures-core", @@ -6674,26 +7165,11 @@ dependencies = [ "tokio", ] -[[package]] -name = "toml" -version = "0.8.19" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1ed1f98e3fdc28d6d910e6737ae6ab1a93bf1985935a1193e68f93eeb68d24e" -dependencies = [ - "serde", - "serde_spanned", - "toml_datetime", - "toml_edit 0.22.22", -] - [[package]] name = "toml_datetime" version = "0.6.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0dd7358ecb8fc2f8d014bf86f6f638ce72ba252a2c3a2572f2a795f1d23efb41" -dependencies = [ - "serde", -] [[package]] name = "toml_edit" @@ -6701,7 +7177,7 @@ version = "0.19.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1b5bb770da30e5cbfde35a2d7b9b8a2c4b8ef89548a7a6aeab5c9a576e3e7421" dependencies = [ - "indexmap 2.5.0", + "indexmap 2.7.0", "toml_datetime", "winnow 0.5.40", ] @@ -6712,9 +7188,7 @@ version = "0.22.22" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4ae48d6208a266e853d946088ed816055e556cc6028c5e8e2b84d9fa5dd7c7f5" dependencies = [ - "indexmap 2.5.0", - "serde", - "serde_spanned", + "indexmap 2.7.0", "toml_datetime", "winnow 0.6.20", ] @@ -6727,26 +7201,26 @@ checksum = "3082666a3a6433f7f511c7192923fa1fe07c69332d3c6a2e6bb040b569199d5a" dependencies = [ "async-stream", "async-trait", - "axum", + "axum 0.6.20", "base64 0.21.7", "bytes", "flate2", "futures-core", "futures-util", - "h2", + "h2 0.3.26", "http 0.2.12", "http-body 0.4.6", - "hyper", - "hyper-timeout", + "hyper 0.14.31", + "hyper-timeout 0.4.1", "percent-encoding", "pin-project", "prost 0.11.9", - "rustls-native-certs", - "rustls-pemfile", + "rustls-native-certs 0.6.3", + "rustls-pemfile 1.0.4", "tokio", - "tokio-rustls", + "tokio-rustls 0.24.1", "tokio-stream", - "tower", + "tower 0.4.13", "tower-layer", "tower-service", "tracing", @@ -6773,17 +7247,52 @@ dependencies = [ "tracing", ] +[[package]] +name = "tonic" +version = "0.12.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "877c5b330756d856ffcc4553ab34a5684481ade925ecc54bcd1bf02b1d0d4d52" +dependencies = [ + "async-stream", + "async-trait", + "axum 0.7.9", + "base64 0.22.1", + "bytes", + "flate2", + "h2 0.4.7", + "http 1.2.0", + "http-body 1.0.1", + "http-body-util", + "hyper 1.5.1", + "hyper-timeout 0.5.2", + "hyper-util", + "percent-encoding", + "pin-project", + "prost 0.13.4", + "rustls-native-certs 0.8.1", + "rustls-pemfile 2.2.0", + "socket2", + "tokio", + "tokio-rustls 0.26.1", + "tokio-stream", + "tower 0.4.13", + "tower-layer", + "tower-service", + "tracing", +] + [[package]] name = "tonic-build" -version = "0.9.2" +version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a6fdaae4c2c638bb70fe42803a26fbd6fc6ac8c72f5c59f67ecc2a2dcabf4b07" +checksum = "9557ce109ea773b399c9b9e5dca39294110b74f1f342cb347a80d1fce8c26a11" dependencies = [ "prettyplease", "proc-macro2", "prost-build", + "prost-types 0.13.4", "quote", - "syn 1.0.109", + "syn 2.0.90", ] [[package]] @@ -6794,7 +7303,6 @@ checksum = "b8fa9be0de6cf49e536ce1851f987bd21a43b771b09473c3549a6c853db37c1c" dependencies = [ "futures-core", "futures-util", - "hdrhistogram", "indexmap 1.9.3", "pin-project", "pin-project-lite", @@ -6807,39 +7315,68 @@ dependencies = [ "tracing", ] +[[package]] +name = "tower" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d039ad9159c98b70ecfd540b2573b97f7f52c3e8d9f8ad57a24b916a536975f9" +dependencies = [ + "futures-core", + "futures-util", + "hdrhistogram", + "indexmap 2.7.0", + "pin-project-lite", + "slab", + "sync_wrapper 1.0.2", + "tokio", + "tokio-util", + "tower-layer", + "tower-service", + "tracing", +] + [[package]] name = "tower-http" -version = "0.4.4" +version = "0.6.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61c5bb1d698276a2443e5ecfabc1008bf15a36c12e6a7176e7bf089ea9131140" +checksum = "403fa3b783d4b626a8ad51d766ab03cb6d2dbfc46b1c5d4448395e6628dc9697" dependencies = [ "async-compression", + "base64 0.22.1", "bitflags 2.6.0", "bytes", "futures-core", "futures-util", - "http 0.2.12", - "http-body 0.4.6", + "http 1.2.0", + "http-body 1.0.1", + "http-body-util", "http-range-header", + "httpdate", + "iri-string", + "mime", + "mime_guess", + "percent-encoding", "pin-project-lite", "tokio", "tokio-util", + "tower 0.5.2", "tower-layer", "tower-service", "tracing", + "uuid", ] [[package]] name = "tower-layer" -version = "0.3.2" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c20c8dbed6283a09604c3e69b4b7eeb54e298b8a600d4d5ecb5ad39de609f1d0" +checksum = "121c2a6cda46980bb0fcd1647ffaf6cd3fc79a013de288782836f6df9c48780e" [[package]] name = "tower-service" -version = "0.3.2" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b6bc1c9ce2b5135ac7f93c72918fc37feb872bdc6a5533a8b85eb4b86bfdae52" +checksum = "8df9b6e13f2d32c91b9bd719c00d1958837bc7dec474d94952798cc8e69eeec3" [[package]] name = "tower-test" @@ -6857,9 +7394,9 @@ dependencies = [ [[package]] name = "tracing" -version = "0.1.40" +version = "0.1.41" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef" +checksum = "784e0ac535deb450455cbfa28a6f0df145ea1bb7ae51b821cf5e7927fdcfbdd0" dependencies = [ "log", "pin-project-lite", @@ -6869,9 +7406,9 @@ dependencies = [ [[package]] name = "tracing-attributes" -version = "0.1.27" +version = "0.1.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" +checksum = "395ae124c09f9e6918a2310af6038fba074bcf474ac352496d5910dd59a2226d" dependencies = [ "proc-macro2", "quote", @@ -6880,9 +7417,9 @@ dependencies = [ [[package]] name = "tracing-core" -version = "0.1.32" +version = "0.1.33" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54" +checksum = "e672c95779cf947c5311f83787af4fa8fffd12fb27e4993211a84bdfd9610f9c" dependencies = [ "once_cell", "valuable", @@ -6948,11 +7485,21 @@ dependencies = [ "tracing-core", ] +[[package]] +name = "tracing-serde" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "704b1aeb7be0d0a84fc9828cae51dab5970fee5088f83d1dd7ee6f6246fc6ff1" +dependencies = [ + "serde", + "tracing-core", +] + [[package]] name = "tracing-subscriber" -version = "0.3.18" +version = "0.3.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b" +checksum = "e8189decb5ac0fa7bc8b96b7cb9b2701d60d48805aca84a238004d665fcc4008" dependencies = [ "matchers", "nu-ansi-term 0.46.0", @@ -6966,7 +7513,7 @@ dependencies = [ "tracing", "tracing-core", "tracing-log 0.2.0", - "tracing-serde", + "tracing-serde 0.2.0", ] [[package]] @@ -6992,9 +7539,9 @@ dependencies = [ [[package]] name = "triomphe" -version = "0.1.13" +version = "0.1.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e6631e42e10b40c0690bf92f404ebcfe6e1fdb480391d15f17cc8e96eeed5369" +checksum = "ef8f7726da4807b58ea5c96fdc122f80702030edc33b35aff9190a51148ccc85" dependencies = [ "serde", "stable_deref_trait", @@ -7008,18 +7555,18 @@ checksum = "e421abadd41a4225275504ea4d6566923418b7f05506fbc9c0fe86ba7396114b" [[package]] name = "try_match" -version = "0.4.1" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61ae3c1941e8859e30d28e572683fbfa89ae5330748b45139aedf488389e2be4" +checksum = "b065c869a3f832418e279aa4c1d7088f9d5d323bde15a60a08e20c2cd4549082" dependencies = [ "try_match_inner", ] [[package]] name = "try_match_inner" -version = "0.5.1" +version = "0.5.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0a91713132798caecb23c977488945566875e7b61b902fb111979871cbff34e" +checksum = "b9c81686f7ab4065ccac3df7a910c4249f8c0f3fb70421d6ddec19b9311f63f9" dependencies = [ "proc-macro2", "quote", @@ -7039,13 +7586,32 @@ dependencies = [ "httparse", "log", "rand 0.8.5", - "rustls", "sha1", - "thiserror", + "thiserror 1.0.69", "url", "utf-8", ] +[[package]] +name = "tungstenite" +version = "0.26.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "413083a99c579593656008130e29255e54dcaae495be556cc26888f211648c24" +dependencies = [ + "byteorder", + "bytes", + "data-encoding", + "http 1.2.0", + "httparse", + "log", + "rand 0.8.5", + "rustls 0.23.19", + "rustls-pki-types", + "sha1", + "thiserror 2.0.10", + "utf-8", +] + [[package]] name = "typed-arena" version = "2.0.2" @@ -7095,9 +7661,9 @@ dependencies = [ [[package]] name = "ucd-trie" -version = "0.1.6" +version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ed646292ffc8188ef8ea4d1e0e0150fb15a5c2e12ad9b8fc191ae7a8a7f3c4b9" +checksum = "2896d95c02a80c6d6a5d6e953d479f5ddf2dfdb6a244441010e373ac0fb88971" [[package]] name = "uname" @@ -7110,48 +7676,36 @@ dependencies = [ [[package]] name = "unicase" -version = "2.7.0" +version = "2.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f7d2d4dafb69621809a81864c9c1b864479e1235c0dd4e199924b9742439ed89" -dependencies = [ - "version_check", -] +checksum = "7e51b68083f157f853b6379db119d1c1be0e6e4dec98101079dec41f6f5cf6df" [[package]] name = "unicode-bidi" -version = "0.3.15" +version = "0.3.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08f95100a766bf4f8f28f90d77e0a5461bbdb219042e7679bebe79004fed8d75" +checksum = "5ab17db44d7388991a428b2ee655ce0c212e862eff1768a455c58f9aad6e7893" [[package]] name = "unicode-ident" -version = "1.0.12" +version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3354b9ac3fae1ff6755cb6db53683adb661634f67557942dea4facebec0fee4b" +checksum = "adb9e6ca4f869e1180728b7950e35922a7fc6397f7b641499e8f3ef06e50dc83" [[package]] name = "unicode-normalization" -version = "0.1.23" +version = "0.1.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a56d1686db2308d901306f92a263857ef59ea39678a5458e7cb17f01415101f5" +checksum = "5033c97c4262335cded6d6fc3e5c18ab755e1a3dc96376350f3d8e9f009ad956" dependencies = [ "tinyvec", ] [[package]] name = "unicode-width" -version = "0.1.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0336d538f7abc86d282a4189614dfaa90810dfc2c6f6427eaf88e16311dd225d" - -[[package]] -name = "unreachable" -version = "1.0.0" +version = "0.1.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "382810877fe448991dfc7f0dd6e3ae5d58088fd0ea5e35189655f84e6814fa56" -dependencies = [ - "void", -] +checksum = "7dd6e30e90baa6f72411720665d41d89b9a3d039dc45b8faea1ddd07f617f6af" [[package]] name = "untrusted" @@ -7220,27 +7774,15 @@ checksum = "830b7e5d4d90034032940e4ace0d9a9a057e7a45cd94e6c007832e39edb82f6d" [[package]] name = "value-bag" -version = "1.9.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a84c137d37ab0142f0f2ddfe332651fdbf252e7b7dbb4e67b6c1f1b2e925101" - -[[package]] -name = "vcpkg" -version = "0.2.15" +version = "1.10.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "accd4ea62f7bb7a82fe23066fb0957d48ef677f6eeb8215f372f52e48bb32426" +checksum = "3ef4c4aa54d5d05a279399bfa921ec387b7aba77caf7a682ae8d86785b8fdad2" [[package]] name = "version_check" -version = "0.9.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f" - -[[package]] -name = "void" -version = "1.0.2" +version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a02e4885ed3bc0f2de90ea6dd45ebcbb66dacffe03547fadbb0eeae2770887d" +checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" [[package]] name = "vsimd" @@ -7287,23 +7829,23 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] name = "wasm-bindgen" -version = "0.2.92" +version = "0.2.99" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4be2531df63900aeb2bca0daaaddec08491ee64ceecbee5076636a3b026795a8" +checksum = "a474f6281d1d70c17ae7aa6a613c87fce69a127e2624002df63dcb39d6cf6396" dependencies = [ "cfg-if", + "once_cell", "wasm-bindgen-macro", ] [[package]] name = "wasm-bindgen-backend" -version = "0.2.92" +version = "0.2.99" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "614d787b966d3989fa7bb98a654e369c762374fd3213d212cfc0251257e747da" +checksum = "5f89bb38646b4f81674e8f5c3fb81b562be1fd936d84320f3264486418519c79" dependencies = [ "bumpalo", "log", - "once_cell", "proc-macro2", "quote", "syn 2.0.90", @@ -7312,21 +7854,22 @@ dependencies = [ [[package]] name = "wasm-bindgen-futures" -version = "0.4.42" +version = "0.4.49" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76bc14366121efc8dbb487ab05bcc9d346b3b5ec0eaa76e46594cabbe51762c0" +checksum = "38176d9b44ea84e9184eff0bc34cc167ed044f816accfe5922e54d84cf48eca2" dependencies = [ "cfg-if", "js-sys", + "once_cell", "wasm-bindgen", "web-sys", ] [[package]] name = "wasm-bindgen-macro" -version = "0.2.92" +version = "0.2.99" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1f8823de937b71b9460c0c34e25f3da88250760bec0ebac694b49997550d726" +checksum = "2cc6181fd9a7492eef6fef1f33961e3695e4579b9872a6f7c83aee556666d4fe" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -7334,9 +7877,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.92" +version = "0.2.99" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e94f17b526d0a461a191c78ea52bbce64071ed5c04c9ffe424dcb38f74171bb7" +checksum = "30d7a95b763d3c45903ed6c81f156801839e5ee968bb07e534c44df0fcd330c2" dependencies = [ "proc-macro2", "quote", @@ -7347,15 +7890,15 @@ dependencies = [ [[package]] name = "wasm-bindgen-shared" -version = "0.2.92" +version = "0.2.99" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af190c94f2773fdb3729c55b007a722abb5384da03bc0986df4c289bf5567e96" +checksum = "943aab3fdaaa029a6e0271b35ea10b72b943135afe9bffca82384098ad0e06a6" [[package]] name = "wasm-streams" -version = "0.4.0" +version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b65dc4c90b63b118468cf747d8bf3566c1913ef60be765b5730ead9e0a3ba129" +checksum = "15053d8d85c7eccdbefef60f06769760a563c7f0a9d6902a13d35c7800b0ad65" dependencies = [ "futures-util", "js-sys", @@ -7366,9 +7909,19 @@ dependencies = [ [[package]] name = "web-sys" -version = "0.3.69" +version = "0.3.76" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "04dd7223427d52553d3702c004d3b2fe07c148165faa56313cb00211e31c12bc" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "web-time" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "77afa9a11836342370f4817622a2f0f418b134426d91a82dfb48f532d2ec13ef" +checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb" dependencies = [ "js-sys", "wasm-bindgen", @@ -7380,6 +7933,15 @@ version = "0.25.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5f20c57d8d7db6d3b86154206ae5d8fba62dd39573114de97c2cb0578251f8e1" +[[package]] +name = "webpki-roots" +version = "0.26.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d642ff16b7e79272ae451b7322067cdc17cadf68c23264be9d94a32319efe7e" +dependencies = [ + "rustls-pki-types", +] + [[package]] name = "which" version = "4.4.2" @@ -7389,7 +7951,7 @@ dependencies = [ "either", "home", "once_cell", - "rustix 0.38.34", + "rustix", ] [[package]] @@ -7416,11 +7978,11 @@ checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" [[package]] name = "winapi-util" -version = "0.1.8" +version = "0.1.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4d4cc384e1e73b93bafa6fb4f1df8c41695c8a91cf9c4c64358067d15a7b6c6b" +checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" dependencies = [ - "windows-sys 0.52.0", + "windows-sys 0.59.0", ] [[package]] @@ -7527,6 +8089,17 @@ dependencies = [ "syn 2.0.90", ] +[[package]] +name = "windows-registry" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e400001bb720a623c1c69032f8e3e4cf09984deec740f007dd2b03ec864804b0" +dependencies = [ + "windows-result 0.2.0", + "windows-strings", + "windows-targets 0.52.6", +] + [[package]] name = "windows-result" version = "0.1.2" @@ -7582,6 +8155,15 @@ dependencies = [ "windows-targets 0.52.6", ] +[[package]] +name = "windows-sys" +version = "0.59.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1e38bc4d79ed67fd075bcc251a1c39b32a1776bbe92e5bef1f0bf1f8c531853b" +dependencies = [ + "windows-targets 0.52.6", +] + [[package]] name = "windows-targets" version = "0.42.2" @@ -7801,7 +8383,7 @@ dependencies = [ "futures", "futures-timer", "http-types", - "hyper", + "hyper 0.14.31", "log", "once_cell", "regex", @@ -7812,15 +8394,15 @@ dependencies = [ [[package]] name = "wmi" -version = "0.14.1" +version = "0.14.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70df482bbec7017ce4132154233642de658000b24b805345572036782a66ad55" +checksum = "dc47c0776cc6c00d2f7a874a0c846d94d45535936e5a1187693a24f23b4dd701" dependencies = [ "chrono", "futures", "log", "serde", - "thiserror", + "thiserror 2.0.10", "windows 0.58.0", "windows-core 0.58.0", ] @@ -7894,6 +8476,7 @@ version = "0.7.35" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1b9b4fd18abc82b8136838da5d50bae7bdea537c574d8dc1a34ed098d6c166f0" dependencies = [ + "byteorder", "zerocopy-derive", ] @@ -7968,18 +8551,18 @@ dependencies = [ [[package]] name = "zstd-safe" -version = "7.2.0" +version = "7.2.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fa556e971e7b568dc775c136fc9de8c779b1c2fc3a63defaafadffdbd3181afa" +checksum = "54a3ab4db68cea366acc5c897c7b4d4d1b8994a9cd6e6f841f8964566a419059" dependencies = [ "zstd-sys", ] [[package]] name = "zstd-sys" -version = "2.0.12+zstd.1.5.6" +version = "2.0.13+zstd.1.5.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0a4e40c320c3cb459d9a9ff6de98cff88f4751ee9275d140e2be94a2b74e4c13" +checksum = "38ff0f21cfee8f97d94cef41359e0c89aa6113028ab0291aa8ca0038995a95aa" dependencies = [ "cc", "pkg-config", diff --git a/Cargo.toml b/Cargo.toml index 25fd043d7c..6910478052 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -4,8 +4,6 @@ default-members = ["apollo-router", "apollo-federation"] members = [ "apollo-router", "apollo-router-benchmarks", - "apollo-router-scaffold", - "apollo-router-scaffold/scaffold-test", "apollo-federation", "apollo-federation/cli", "examples/add-timestamp-header/rhai", @@ -54,8 +52,8 @@ apollo-parser = "0.8.4" apollo-smith = "0.15.0" async-trait = "0.1.77" hex = { version = "0.4.3", features = ["serde"] } -http = "0.2.11" -insta = { version = "1.38.0", features = ["json", "redactions", "yaml"] } +http = "1.1.0" +insta = { version = "1.38.0", features = ["json", "redactions", "yaml", "glob"] } once_cell = "1.19.0" reqwest = { version = "0.11.0", default-features = false, features = [ "rustls-tls", @@ -66,7 +64,7 @@ reqwest = { version = "0.11.0", default-features = false, features = [ ] } schemars = { version = "0.8.16", features = ["url"] } -serde = { version = "1.0.197", features = ["derive", "rc"] } +serde = { version = "1.0.198", features = ["derive", "rc"] } serde_json = { version = "1.0.114", features = [ "preserve_order", "float_roundtrip", @@ -75,7 +73,4 @@ serde_json_bytes = { version = "0.2.4", features = ["preserve_order"] } sha1 = "0.10.6" tempfile = "3.10.1" tokio = { version = "1.36.0", features = ["full"] } -tower = { version = "0.4.13", features = ["full"] } - -[patch.crates-io] -"hyper" = { git = "https://github.com/apollographql/hyper.git", tag = "header-customizations-20241108" } +tower = { version = "0.5.1", features = ["full"] } diff --git a/HYPER_1.0_REVIEW_NOTES.md b/HYPER_1.0_REVIEW_NOTES.md new file mode 100644 index 0000000000..19d296f07f --- /dev/null +++ b/HYPER_1.0_REVIEW_NOTES.md @@ -0,0 +1,135 @@ +# Hyper 1.0 Review Notes + +## Generally Useful Information + +Read HYPER_1.0_UPDATE.md first. This provides a lot of generally +useful information. + +### Crate updates +Many crates have been updated as part of the update. In some parts of +codebase we had to continue using the older version of the crate so +that opentelemetry (which has not been updated to by hyper 1.0 +compliant) would continue to work. + +tonic-0_9 = { version = "0.9.0", features = [ +reqwest-0_11 = { version = "0.11.27", default-features = false, features = [ +http-0_2 = { version = "0.2.12", package = "http" } + +When opentelemetry is updated to use hyper 1.0 we will remove these changes. + +### Body Manipulation + +The change in Hyper to have many different types of bodies implementing the +Body trait means it was useful to have a set of useful body manipulation +functions which are collected in apollo-router/src/services/router/body.rs. + +Being familiar with these in the review will be helpful as they are used in +many locations. + +### hyper_header_limits feature + +We removed this since it's not required in hyper 1.0 + +### XXX Comments + +Anywhere you see a XXX comment is an indication that this should be reviewed +carefully. + +### default crypto + +At various places in the code base you'll see code like this: +``` + // Enable crypto + let _ = rustls::crypto::aws_lc_rs::default_provider().install_default(); +``` + +This is because crypto initialisation is now done differently in rustls and +two crypto stacks are supported: `aws` and `ring`. + +If only one stack is enabled, then there is no need to specify a default +provider. Unfortunately, because some of our crates are quite old, both +aws and ring are specified. In this case no default is favoured by rustls +and the crate panics at runtime when crypto functionality is required. + +The way around this is to specify a default manually. This has to be done +once for the main binary and at various places in tests. + +Hopefully this situation will improve in the future. + +## Focussed Review + +Please pay particular attention to these files, since they proved tricky +to update: + +### apollo-router/src/axum_factory/axum_http_server_factory.rs + +Some of the configuration is centralised on `next` as http_config and passed +to serve_router_on_listen_addr. We can't do that following the hyper update +because there are now different builders for Http1 or Http2 configuration. + +The HandleErrorLayer has been removed at line 443 and the comment there +explains the change. Anyone with more specific knowledge about how +decompression works should review this carefully. + +metrics_handler/license_handler no longer need to be generic. + +Changes in Axum routing mean that handle_graphql is cleaner to write as a +generic function. + +### apollo-router/src/axum_factory/listeners.rs + +Some of the most complex changes with respect to TCP stream handling were +encountered here. Note that `TokioIo` and `hyper::service::service_fn` were +used to "wrap" Axum application and service handler to integrate everything +together. Please familiarise yourselves with how these work so that you +can review the changes in this file. + +There is an unresolved problem in the port with graceful shutdown which we +still need to figure out. I believe it is the cause of one of our jaeger +tests which are failing. + +The primary additional changes here are releating to how hyper services +are configured, built and served. + +### apollo-router/src/axum_factory/tests.rs + +`UnixStream` was provided as a helpful wrapper around `tokio::net::UnixStream` +to simplify integration with existing tests.:wq + +### apollo-router/src/plugins/connectors/make_requests.rs + +In order to be able to compare snapshots we hae to `map` our requests +into a tuple where the request has a converted body. + +We can't preserve the existing request becase the type of the body (RouterBody) +would't match. This means we can still snapshot across body contents. + +### apollo-router/src/plugins/coprocessor/mod.rs + +We replace the RouterBodyConverter type with a MapResponse. + +### apollo-router/src/plugins/limits/limited.rs + +We remove `poll_trailers` sincethe router doesn't do anything meaninfgul with +trailers (and neither did this implementation) + +The `poll_data` is replaced with `poll_frame` to utilise our new stream +conversion functionality. + +### apollo-router/src/plugins/telemetry/config_new/connector/selectors.rs + +In tests we replaced bodies of "" with empty bodies. That seems fine, but +more informed opinions are sought here. We've done that in a few other files +as well and the tests are also all passing. + +### apollo-router/src/plugins/traffic_shaping/retry.rs + +I'm not sure why all of the tests from line 91 were deleted. Anyone have any +ideas? + +### apollo-router/src/services/http/tests.rs + +These tests were particularly tricky to convert, so please examine them +carefully for any issues. Especially with regard to TLS. + + diff --git a/HYPER_1.0_UPDATE.md b/HYPER_1.0_UPDATE.md new file mode 100644 index 0000000000..a39d56ea34 --- /dev/null +++ b/HYPER_1.0_UPDATE.md @@ -0,0 +1,129 @@ +# Hyper 1.0 upgrade decisions + +Document useful information for posterity. + +## Additional Crates + +The hyper ecosystem has split functionality into multiple crates. Some +functionality has migrated to new crates (-util). + +axum = { version = "0.6.20", features = ["headers", "json", "original-uri"] } -> { version = "0.7.9", features = ["json", "original-uri"] } +axum-extra = NEW -> { version = "0.9.6", features = [ "typed-header" ] } +Note: Not sure if I need to enabled typed-header, check this later + +http = "0.2.11" -> "1.1.0" +http-body = "0.4.6" -> "1.0.1" +http-body-util = NEW "0.1.2" + +hyper = { version = "0.14.28", features = ["server", "client", "stream"] } -> hyper = { version = "1.5.1", features = ["full"] } +hyper-util = NEW { version = "0.1.10", features = ["full"] } + +## Type Changes + +A lot of types are changing. It's not always a 1:1 change, because the new +versions of hyper/axum offer much more nuance. I've tried to apply the +following changes consistently. + +### hyper::Body + +This is no longer a struct, but a trait with multiple implementations depending +on the use case. I've applied the following principles. + +#### Clearly driven from Axum + +In this case, I'm just using the `axum::body::Body` type as a direct +replacement for `hyper::Body`. I'm assuming that the axum folks know what +they are doing. + +#### Otherwise + +My default choice is `http_body_util::combinators::UnsyncBoxBody` + +This is chosen because it is a trait object which represents any of the many +structs which implement `hyper::body`. Unsync because the future Streams +which we use in the router are only Send, not Sync. + +From an `UnsyncBoxBody` we can easily convert to and from various useful +stream representations. + +### hyper::Error + +In 0.14 this struct was a good choice, however we found it difficult to work +with as we started to connect futures streams back to axum responses. + +We have replaced our use of `hyper::Error` with `axum::Error`. + +### hyper::server::conn::Http -> hyper_util::server::conn::auto::Builder; + +This is a straightforward drop-in replacement because the Http server +has been moved out of the `hyper` crate into `hyper_util` and renamed. + +### hyper::body::HttpBody -> http_body::Body as HttpBody; + +`HttpBody` no longer exists in `hyper`. This could be replaced either by +`http_body::Body` or `axum::body::HttpBody`. The latter is a re-export of the +former. + +I've gone with the former for now, since it is clearly a dependency on +`http_body` rather than a dependency on `axum`. + +### hyper::client::connect::dns::Name -> hyper_util::client::legacy::connect::dns::Name; +This is a straightforward drop-in replacement because the Name struct +has been moved out of the `hyper` crate into `hyper_util`. + +### hyper::client::HttpConnector -> use hyper_util::client::legacy::connect::HttpConnector; +This is a straightforward drop-in replacement because the HttpConnector struct +has been moved out of the `hyper` crate into `hyper_util`. + +### http_body::Full -> axum::body::Full + +This is no longer required in hyper 1.0 conversion. + +### use axum::headers::HeaderName -> use axum_extra::headers::HeaderName + +This is a straightforward drop-in replacement because the ::headers module +has been moved out of the `axum` crate into `axum-extra`. + +Note: Not sure if axum-extra TypedHeader feature needs to be enabled for +this to continue working. Enabled for now. + +### use axum::body::boxed; + +This function appears to be completely removed and no longer required. +Just delete it from the code base. + +### use axum::body::StreamBody -> use http_body_util::StreamBody; + +This type has been moved to the http_body_util crate. + +### hyper::body::to_bytes(body) -> axum::body::to_bytes(body) + +Drop in replacement as functionality migrated from hyper to axum +Note: There may be a better way to do this in hyper 1.0, leave as this +for now. + +### hyper::Body::from(encoded) -> http_body_util::BodyStream::from(encoded) + +`Body` is now a trait, so I *think* this needs to be converted to become a +`BodyStream`. It may be that it should be a `Full`, check later. + +### hyper::Body::empty() -> http_body_util::Empty::new() + +`Body` is now a trait. `Empty` is an implementation of the trait which is +empty. + +### hyper::Client -> hyper_util::client::legacy::Client + +The `Client` has been moved to the `hyper_util` crate. + +### axum::Next is no longer generic + +Simply remove the generic argument + +### transport::Response -> crate::router::Response + +The transport module is no longer required, so we can remove it + +### tower::retry::budget::Budget -> use tower::retry::budget::TpsBudget; + +Ported to new tower Retry logic. diff --git a/README.md b/README.md index df19eb7593..0a560493c4 100644 --- a/README.md +++ b/README.md @@ -47,8 +47,6 @@ Options: Schema location relative to the project directory [env: APOLLO_ROUTER_SUPERGRAPH_PATH=] --apollo-uplink-endpoints The endpoints (comma separated) polled to fetch the latest supergraph schema [env: APOLLO_UPLINK_ENDPOINTS=] - --apollo-uplink-poll-interval - The time between polls to Apollo uplink. Minimum 10s [env: APOLLO_UPLINK_POLL_INTERVAL=] [default: 10s] --anonymous-telemetry-disabled Disable sending anonymous usage information to Apollo [env: APOLLO_TELEMETRY_DISABLED=] --apollo-uplink-timeout diff --git a/RELEASE_CHECKLIST.md b/RELEASE_CHECKLIST.md index 4ba8c4326f..68cdc92185 100644 --- a/RELEASE_CHECKLIST.md +++ b/RELEASE_CHECKLIST.md @@ -49,13 +49,11 @@ The examples below will use [the GitHub CLI (`gh`)](https://cli.github.com/) to Make sure you have the following software installed and available in your `PATH`. - - `gh`: [The GitHub CLI](https://cli.github.com/) - - `cargo`: [Cargo & Rust Installation](https://doc.rust-lang.org/cargo/getting-started/installation.html) - - `helm`: see - - `helm-docs`: see - - `cargo-about`: install with `cargo install --locked cargo-about` - - `cargo-deny`: install with `cargo install --locked cargo-deny` - - `set-version` from `cargo-edit`: `cargo install --locked cargo-edit` +- `gh`: [The GitHub CLI](https://cli.github.com/) +- `cargo`: [Cargo & Rust Installation](https://doc.rust-lang.org/cargo/getting-started/installation.html) +- `helm`: see +- `helm-docs`: see +- `cargo-about`, `cargo-deny`, & `cargo-edit`: install the same versions as CI (`.circleci/config.yml#install_extra_tools`) #### Pick a version @@ -200,7 +198,7 @@ Start following the steps below to start a release PR. The process is **not ful 10. Finally, publish the Crates from your local computer (this also needs to be moved to CI, but requires changing the release containers to be Rust-enabled and to restore the caches): - > Note: This command may appear unnecessarily specific, but it will help avoid publishing a version to Crates.io that doesn't match what you're currently releasing. (e.g., in the event that you've changed branches in another window) + > Note: This command may appear unnecessarily specific, but it will help avoid publishing a version to Crates.io that doesn't match what you're currently releasing. (e.g., in the event that you've changed branches in another window) ``` cargo publish -p apollo-federation@"${APOLLO_ROUTER_RELEASE_VERSION}${APOLLO_ROUTER_PRERELEASE_SUFFIX}" && @@ -442,7 +440,7 @@ Start following the steps below to start a release PR. The process is **not ful 18. Finally, publish the Crates (`apollo-federation` followed by `apollo-router`) from your local computer from the `main` branch (this also needs to be moved to CI, but requires changing the release containers to be Rust-enabled and to restore the caches): - > Note: This command may appear unnecessarily specific, but it will help avoid publishing a version to Crates.io that doesn't match what you're currently releasing. (e.g., in the event that you've changed branches in another window) + > Note: This command may appear unnecessarily specific, but it will help avoid publishing a version to Crates.io that doesn't match what you're currently releasing. (e.g., in the event that you've changed branches in another window) ``` cargo publish -p apollo-federation@"${APOLLO_ROUTER_RELEASE_VERSION}" && @@ -632,7 +630,7 @@ prep release branch created Make local edits to the newly rendered `CHANGELOG.md` entries to do some initial editoral. These things should have *ALWAYS* been resolved earlier in the review process of the PRs that introduced the changes, but they must be double checked: - + - There are no breaking changes. - Entries are in categories (e.g., Fixes vs Features) that make sense. - Titles stand alone and work without their descriptions. diff --git a/about.toml b/about.toml index 23c6c3ef58..2f4b83a563 100644 --- a/about.toml +++ b/about.toml @@ -30,6 +30,9 @@ workarounds = [ [ring] accepted = ["OpenSSL"] +[aws-lc-sys] +accepted = ["OpenSSL"] + [webpki.clarify] license = "ISC" [[webpki.clarify.files]] diff --git a/apollo-federation/Cargo.toml b/apollo-federation/Cargo.toml index 58dada163f..e9e08f7c6a 100644 --- a/apollo-federation/Cargo.toml +++ b/apollo-federation/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "apollo-federation" -version = "1.59.1" +version = "2.0.0-preview.4" authors = ["The Apollo GraphQL Contributors"] edition = "2021" description = "Apollo Federation" @@ -22,28 +22,36 @@ time = { version = "0.3.34", default-features = false, features = [ "local-offset", ] } derive_more = "0.99.17" +http.workspace = true hashbrown = "0.15.1" indexmap = { version = "2.2.6", features = ["serde"] } itertools = "0.13.0" +line-col = "0.2.1" multimap = "0.10.0" nom = "7.1.3" +nom_locate = "4.2.0" +once_cell = "1.19.0" petgraph = { version = "0.6.4", features = ["serde-1"] } +regex = "1.11.1" serde.workspace = true +serde_json.workspace = true serde_json_bytes.workspace = true strum = "0.26.0" strum_macros = "0.26.0" thiserror = "1.0" url = "2" +either = "1.13.0" tracing = "0.1.40" ron = { version = "0.8.1", optional = true } -either = "1.13.0" -regex = "1.11.1" +shape = "0.4.1" [dev-dependencies] hex.workspace = true insta.workspace = true sha1.workspace = true tempfile.workspace = true +pretty_assertions = "1.4.0" +rstest = "0.22.0" [[test]] name = "main" diff --git a/apollo-federation/cli/src/main.rs b/apollo-federation/cli/src/main.rs index c89afb689c..608999d43a 100644 --- a/apollo-federation/cli/src/main.rs +++ b/apollo-federation/cli/src/main.rs @@ -11,7 +11,11 @@ use apollo_federation::error::SingleFederationError; use apollo_federation::query_graph; use apollo_federation::query_plan::query_planner::QueryPlanner; use apollo_federation::query_plan::query_planner::QueryPlannerConfig; +use apollo_federation::sources::connect::expand::expand_connectors; +use apollo_federation::sources::connect::expand::ExpansionResult; use apollo_federation::subgraph; +use apollo_federation::ApiSchemaOptions; +use apollo_federation::Supergraph; use bench::BenchOutput; use clap::Parser; @@ -100,6 +104,19 @@ enum Command { #[command(flatten)] planner: QueryPlannerArgs, }, + + /// Expand connector-enabled supergraphs + Expand { + /// The path to the supergraph schema file, or `-` for stdin + supergraph_schema: PathBuf, + + /// The output directory for the extracted subgraph schemas + destination_dir: Option, + + /// An optional prefix to match against expanded subgraph names + #[arg(long)] + filter_prefix: Option, + }, } impl QueryPlannerArgs { @@ -148,6 +165,15 @@ fn main() -> ExitCode { operations_dir, planner, } => cmd_bench(&supergraph_schema, &operations_dir, planner), + Command::Expand { + supergraph_schema, + destination_dir, + filter_prefix, + } => cmd_expand( + &supergraph_schema, + destination_dir.as_ref(), + filter_prefix.as_deref(), + ), }; match result { Err(error) => { @@ -292,6 +318,75 @@ fn cmd_extract(file_path: &Path, dest: Option<&PathBuf>) -> Result<(), Federatio Ok(()) } +fn cmd_expand( + file_path: &Path, + dest: Option<&PathBuf>, + filter_prefix: Option<&str>, +) -> Result<(), FederationError> { + let original_supergraph = load_supergraph_file(file_path)?; + let ExpansionResult::Expanded { raw_sdl, .. } = expand_connectors( + &original_supergraph.schema.schema().serialize().to_string(), + &ApiSchemaOptions::default(), + )? + else { + return Err(FederationError::internal( + "supplied supergraph has no connectors to expand", + )); + }; + + // Validate the schema + // TODO: If expansion errors here due to bugs, it can be very hard to trace + // what specific portion of the expansion process failed. Work will need to be + // done to expansion to allow for returning an error type that carries the error + // and the expanded subgraph as seen until the error. + let expanded = Supergraph::new(&raw_sdl)?; + + let subgraphs = expanded.extract_subgraphs()?; + if let Some(dest) = dest { + fs::create_dir_all(dest).map_err(|_| SingleFederationError::Internal { + message: "Error: directory creation failed".into(), + })?; + for (name, subgraph) in subgraphs { + // Skip any files not matching the prefix, if specified + if let Some(prefix) = filter_prefix { + if !name.starts_with(prefix) { + continue; + } + } + + let subgraph_path = dest.join(format!("{}.graphql", name)); + fs::write(subgraph_path, subgraph.schema.schema().to_string()).map_err(|_| { + SingleFederationError::Internal { + message: "Error: file output failed".into(), + } + })?; + } + } else { + // Print out the schemas as YAML so that it can be piped into rover + // TODO: It would be nice to use rover's supergraph type here instead of manually printing + println!("federation_version: 2"); + println!("subgraphs:"); + for (name, subgraph) in subgraphs { + // Skip any files not matching the prefix, if specified + if let Some(prefix) = filter_prefix { + if !name.starts_with(prefix) { + continue; + } + } + + let schema_str = subgraph.schema.schema().serialize().initial_indent_level(4); + println!(" {name}:"); + println!(" routing_url: none"); + println!(" schema:"); + println!(" sdl: |"); + println!("{schema_str}"); + println!(); // newline + } + } + + Ok(()) +} + fn _cmd_bench( file_path: &Path, operations_dir: &PathBuf, diff --git a/apollo-federation/src/lib.rs b/apollo-federation/src/lib.rs index 4e8ba89640..052e743f3a 100644 --- a/apollo-federation/src/lib.rs +++ b/apollo-federation/src/lib.rs @@ -35,6 +35,7 @@ pub(crate) mod operation; pub mod query_graph; pub mod query_plan; pub mod schema; +pub mod sources; pub mod subgraph; pub(crate) mod supergraph; pub(crate) mod utils; diff --git a/apollo-federation/src/merge.rs b/apollo-federation/src/merge.rs index 3ebfbda997..ecd6b973ab 100644 --- a/apollo-federation/src/merge.rs +++ b/apollo-federation/src/merge.rs @@ -1,3 +1,5 @@ +mod fields; + use std::fmt::Debug; use std::fmt::Formatter; use std::iter; @@ -11,13 +13,16 @@ use apollo_compiler::ast::DirectiveLocation; use apollo_compiler::ast::EnumValueDefinition; use apollo_compiler::ast::FieldDefinition; use apollo_compiler::ast::NamedType; +use apollo_compiler::ast::Type; use apollo_compiler::ast::Value; +use apollo_compiler::collections::HashMap; use apollo_compiler::collections::IndexMap; use apollo_compiler::collections::IndexSet; use apollo_compiler::name; use apollo_compiler::schema::Component; use apollo_compiler::schema::EnumType; use apollo_compiler::schema::ExtendedType; +use apollo_compiler::schema::Implementers; use apollo_compiler::schema::InputObjectType; use apollo_compiler::schema::InputValueDefinition; use apollo_compiler::schema::InterfaceType; @@ -63,6 +68,7 @@ struct Merger { errors: Vec, composition_hints: Vec, needs_inaccessible: bool, + interface_objects: IndexSet, } pub struct MergeSuccess { @@ -125,6 +131,7 @@ impl Merger { composition_hints: Vec::new(), errors: Vec::new(), needs_inaccessible: false, + interface_objects: IndexSet::default(), } } @@ -134,17 +141,26 @@ impl Merger { .map(|(_, subgraph)| subgraph) .collect_vec(); subgraphs.sort_by(|s1, s2| s1.name.cmp(&s2.name)); - let mut subgraphs_and_enum_values: Vec<(&ValidFederationSubgraph, Name)> = Vec::new(); + let mut subgraphs_and_enum_values = Vec::new(); + let mut enum_values = IndexSet::default(); for subgraph in &subgraphs { - // TODO: Implement JS codebase's name transform (which always generates a valid GraphQL - // name and avoids collisions). - if let Ok(subgraph_name) = Name::new(&subgraph.name.to_uppercase()) { - subgraphs_and_enum_values.push((subgraph, subgraph_name)); + let enum_value = match EnumValue::new(&subgraph.name) { + Ok(enum_value) => enum_value, + Err(err) => { + self.errors.push(err); + continue; + } + }; + + // Ensure that enum values are unique after normalizing them + let enum_value = if enum_values.contains(&enum_value.0.to_string()) { + EnumValue::new(&format!("{}_{}", subgraph.name, enum_values.len())) + .expect("adding a suffix always works") } else { - self.errors.push(String::from( - "Subgraph name couldn't be transformed into valid GraphQL name", - )); - } + enum_value + }; + enum_values.insert(enum_value.0.to_string()); + subgraphs_and_enum_values.push((subgraph, enum_value)) } if !self.errors.is_empty() { return Err(MergeFailure { @@ -185,35 +201,35 @@ impl Merger { ExtendedType::Enum(value) => self.merge_enum_type( &mut supergraph.types, &relevant_directives, - subgraph_name.clone(), + subgraph_name, type_name.clone(), value, ), ExtendedType::InputObject(value) => self.merge_input_object_type( &mut supergraph.types, &relevant_directives, - subgraph_name.clone(), + subgraph_name, type_name.clone(), value, ), ExtendedType::Interface(value) => self.merge_interface_type( &mut supergraph.types, &relevant_directives, - subgraph_name.clone(), + subgraph_name, type_name.clone(), value, ), ExtendedType::Object(value) => self.merge_object_type( &mut supergraph.types, &relevant_directives, - subgraph_name.clone(), + subgraph_name, type_name.clone(), value, ), ExtendedType::Union(value) => self.merge_union_type( &mut supergraph.types, &relevant_directives, - subgraph_name.clone(), + subgraph_name, type_name.clone(), value, ), @@ -239,6 +255,9 @@ impl Merger { } } + let implementers_map = supergraph.implementers_map(); + self.add_interface_object_fields(&mut supergraph.types, implementers_map)?; + if self.needs_inaccessible { add_core_feature_inaccessible(&mut supergraph); } @@ -259,6 +278,53 @@ impl Merger { } } + fn add_interface_object_fields( + &mut self, + types: &mut IndexMap, + implementers_map: HashMap, + ) -> Result<(), MergeFailure> { + for interface_object_name in self.interface_objects.iter() { + let Some(ExtendedType::Interface(intf_def)) = types.get(interface_object_name) else { + return Err(MergeFailure { + schema: None, + composition_hints: self.composition_hints.to_owned(), + errors: vec![format!("Interface {} not found", interface_object_name)], + }); + }; + let fields = intf_def.fields.clone(); + + if let Some(implementers) = implementers_map.get(interface_object_name) { + for implementer in implementers.iter() { + types.entry(implementer.clone()).and_modify(|f| { + if let ExtendedType::Object(obj) = f { + let obj = obj.make_mut(); + for (field_name, field_def) in fields.iter() { + let mut field_def = field_def.clone(); + let field_def = field_def.make_mut(); + field_def.directives = field_def + .directives + .iter() + .filter(|d| d.name != name!("join__field")) + .cloned() + .collect(); + field_def.directives.push(Node::new(Directive { + name: name!("join__field"), + arguments: vec![], + })); + + obj.fields + .entry(field_name.clone()) + .or_insert(field_def.clone().into()); + } + } + }); + } + }; + } + + Ok(()) + } + fn merge_descriptions(&mut self, merged: &mut Option, new: &Option) { match (&mut *merged, new) { (_, None) => {} @@ -298,7 +364,7 @@ impl Merger { &mut self, types: &mut IndexMap, metadata: &DirectiveNames, - subgraph_name: Name, + subgraph_name: &EnumValue, enum_name: NamedType, enum_type: &Node, ) { @@ -344,7 +410,7 @@ impl Merger { arguments: vec![ (Node::new(Argument { name: name!("graph"), - value: Node::new(Value::Enum(subgraph_name.clone())), + value: Node::new(Value::Enum(subgraph_name.to_name())), })), ], })); @@ -358,7 +424,7 @@ impl Merger { &mut self, types: &mut IndexMap, directive_names: &DirectiveNames, - subgraph_name: Name, + subgraph_name: &EnumValue, input_object_name: NamedType, input_object: &Node, ) { @@ -368,7 +434,7 @@ impl Merger { if let ExtendedType::InputObject(obj) = existing_type { let join_type_directives = - join_type_applied_directive(subgraph_name, iter::empty(), false); + join_type_applied_directive(subgraph_name.clone(), iter::empty(), false); let mutable_object = obj.make_mut(); mutable_object.directives.extend(join_type_directives); @@ -381,23 +447,42 @@ impl Merger { for (field_name, field) in input_object.fields.iter() { let existing_field = mutable_object.fields.entry(field_name.clone()); - match existing_field { - Vacant(_i) => { - // TODO warning - mismatch on input fields - } - Occupied(mut i) => { - self.add_inaccessible( - directive_names, - &mut i.get_mut().make_mut().directives, - &field.directives, - ); + let supergraph_field = match existing_field { + Vacant(i) => i.insert(Component::new(InputValueDefinition { + name: field.name.clone(), + description: field.description.clone(), + ty: field.ty.clone(), + default_value: field.default_value.clone(), + directives: Default::default(), + })), + Occupied(i) => { + i.into_mut() // merge_options(&i.get_mut().description, &field.description); // TODO check description // TODO check type // TODO check default value // TODO process directives } - } + }; + + self.add_inaccessible( + directive_names, + &mut supergraph_field.make_mut().directives, + &field.directives, + ); + + let join_field_directive = join_field_applied_directive( + subgraph_name, + None, + None, + false, + None, + Some(&field.ty), + ); + supergraph_field + .make_mut() + .directives + .push(Node::new(join_field_directive)); } } else { // TODO conflict on type @@ -408,7 +493,7 @@ impl Merger { &mut self, types: &mut IndexMap, directive_names: &DirectiveNames, - subgraph_name: Name, + subgraph_name: &EnumValue, interface_name: NamedType, interface: &Node, ) { @@ -419,7 +504,7 @@ impl Merger { if let ExtendedType::Interface(intf) = existing_type { let key_directives = interface.directives.get_all(&directive_names.key); let join_type_directives = - join_type_applied_directive(subgraph_name, key_directives, false); + join_type_applied_directive(subgraph_name.clone(), key_directives, false); let mutable_intf = intf.make_mut(); mutable_intf.directives.extend(join_type_directives); @@ -429,32 +514,69 @@ impl Merger { &interface.directives, ); + interface + .implements_interfaces + .iter() + .for_each(|intf_name| { + // IndexSet::insert deduplicates + mutable_intf.implements_interfaces.insert(intf_name.clone()); + let join_implements_directive = + join_implements_applied_directive(subgraph_name.clone(), intf_name); + mutable_intf.directives.push(join_implements_directive); + }); + for (field_name, field) in interface.fields.iter() { let existing_field = mutable_intf.fields.entry(field_name.clone()); - match existing_field { - Vacant(i) => { + let supergraph_field = match existing_field { + Occupied(f) => { + f.into_mut() + // TODO check description + // TODO check type + // TODO check default value + // TODO process directives + } + Vacant(f) => { // TODO warning mismatch missing fields - let f = i.insert(Component::new(FieldDefinition { + f.insert(Component::new(FieldDefinition { name: field.name.clone(), description: field.description.clone(), arguments: vec![], ty: field.ty.clone(), directives: Default::default(), - })); - - self.add_inaccessible( - directive_names, - &mut f.make_mut().directives, - &field.directives, - ); + })) } - Occupied(_i) => { - // TODO check description - // TODO check type - // TODO check default value - // TODO process directives - } - } + }; + + fields::merge_arguments( + field.arguments.iter(), + &mut supergraph_field.make_mut().arguments, + self, + directive_names, + ); + self.merge_descriptions( + &mut supergraph_field.make_mut().description, + &field.description, + ); + + self.add_inaccessible( + directive_names, + &mut supergraph_field.make_mut().directives, + &field.directives, + ); + + let join_field_directive = join_field_applied_directive( + subgraph_name, + None, + None, + false, + None, + Some(&field.ty), + ); + + supergraph_field + .make_mut() + .directives + .push(Node::new(join_field_directive)); } } else { // TODO conflict on type @@ -465,7 +587,7 @@ impl Merger { &mut self, types: &mut IndexMap, directive_names: &DirectiveNames, - subgraph_name: Name, + subgraph_name: &EnumValue, object_name: NamedType, object: &Node, ) { @@ -533,35 +655,12 @@ impl Merger { &field.directives, ); - for arg in field.arguments.iter() { - let arguments_to_merge = &mut supergraph_field.make_mut().arguments; - let argument_to_merge = arguments_to_merge - .iter_mut() - .find_map(|a| (a.name == arg.name).then(|| a.make_mut())); - - if let Some(argument) = argument_to_merge { - self.add_inaccessible( - directive_names, - &mut argument.directives, - &arg.directives, - ); - } else { - let mut argument = InputValueDefinition { - name: arg.name.clone(), - description: arg.description.clone(), - directives: Default::default(), - ty: arg.ty.clone(), - default_value: arg.default_value.clone(), - }; - - self.add_inaccessible( - directive_names, - &mut argument.directives, - &arg.directives, - ); - arguments_to_merge.push(argument.into()); - }; - } + fields::merge_arguments( + field.arguments.iter(), + &mut supergraph_field.make_mut().arguments, + self, + directive_names, + ); let requires_directive_option = field .directives @@ -594,11 +693,12 @@ impl Merger { .is_some(); let join_field_directive = join_field_applied_directive( - subgraph_name.clone(), + subgraph_name, requires_directive_option, provides_directive_option, external_field, overrides_directive_option, + Some(&field.ty), ); supergraph_field @@ -610,11 +710,106 @@ impl Merger { // https://github.com/apollographql/federation/blob/0d8a88585d901dff6844fdce1146a4539dec48df/composition-js/src/merging/merge.ts#L1648 } } else if let ExtendedType::Interface(intf) = existing_type { - // TODO support interface object + self.interface_objects.insert(intf.name.clone()); + let key_directives = object.directives.get_all(&directive_names.key); let join_type_directives = - join_type_applied_directive(subgraph_name, key_directives, true); - intf.make_mut().directives.extend(join_type_directives); + join_type_applied_directive(subgraph_name.clone(), key_directives, true); + let mutable_object = intf.make_mut(); + mutable_object.directives.extend(join_type_directives); + self.merge_descriptions(&mut mutable_object.description, &object.description); + self.add_inaccessible( + directive_names, + &mut mutable_object.directives, + &object.directives, + ); + + for (field_name, field) in object.fields.iter() { + // skip federation built-in queries + if field_name == "_service" || field_name == "_entities" { + continue; + } + + let existing_field = mutable_object.fields.entry(field_name.clone()); + let supergraph_field = match existing_field { + Occupied(f) => { + // check description + // check type + // check args + f.into_mut() + } + Vacant(f) => f.insert(Component::new(FieldDefinition { + name: field.name.clone(), + description: field.description.clone(), + arguments: vec![], + directives: Default::default(), + ty: field.ty.clone(), + })), + }; + self.merge_descriptions( + &mut supergraph_field.make_mut().description, + &field.description, + ); + + self.add_inaccessible( + directive_names, + &mut supergraph_field.make_mut().directives, + &field.directives, + ); + + fields::merge_arguments( + field.arguments.iter(), + &mut supergraph_field.make_mut().arguments, + self, + directive_names, + ); + let requires_directive_option = field + .directives + .get_all(&directive_names.requires) + .next() + .and_then(|p| directive_string_arg_value(p, &FEDERATION_FIELDS_ARGUMENT_NAME)); + + let provides_directive_option = field + .directives + .get_all(&directive_names.provides) + .next() + .and_then(|p| directive_string_arg_value(p, &FEDERATION_FIELDS_ARGUMENT_NAME)); + + let overrides_directive_option = field + .directives + .get_all(&directive_names.r#override) + .next() + .and_then(|p| { + let overrides_from = + directive_string_arg_value(p, &FEDERATION_FROM_ARGUMENT_NAME); + let overrides_label = + directive_string_arg_value(p, &FEDERATION_OVERRIDE_LABEL_ARGUMENT_NAME); + overrides_from.map(|from| (from, overrides_label)) + }); + + let external_field = field + .directives + .get_all(&directive_names.external) + .next() + .is_some(); + + let join_field_directive = join_field_applied_directive( + subgraph_name, + requires_directive_option, + provides_directive_option, + external_field, + overrides_directive_option, + Some(&field.ty), + ); + + supergraph_field + .make_mut() + .directives + .push(Node::new(join_field_directive)); + + // TODO: implement needsJoinField to avoid adding join__field when unnecessary + // https://github.com/apollographql/federation/blob/0d8a88585d901dff6844fdce1146a4539dec48df/composition-js/src/merging/merge.ts#L1648 + } }; // TODO merge fields } @@ -623,7 +818,7 @@ impl Merger { &mut self, types: &mut IndexMap, directive_names: &DirectiveNames, - subgraph_name: Name, + subgraph_name: &EnumValue, union_name: NamedType, union: &Node, ) { @@ -650,7 +845,7 @@ impl Merger { arguments: vec![ Node::new(Argument { name: name!("graph"), - value: Node::new(Value::Enum(subgraph_name.clone())), + value: Node::new(Value::Enum(subgraph_name.to_name())), }), Node::new(Argument { name: name!("member"), @@ -666,7 +861,7 @@ impl Merger { &mut self, types: &mut IndexMap, directive_names: &DirectiveNames, - subgraph_name: Name, + subgraph_name: EnumValue, scalar_name: NamedType, ty: &Node, ) { @@ -676,7 +871,7 @@ impl Merger { if let ExtendedType::Scalar(s) = existing_type { let join_type_directives = - join_type_applied_directive(subgraph_name.clone(), iter::empty(), false); + join_type_applied_directive(subgraph_name, iter::empty(), false); s.make_mut().directives.extend(join_type_directives); self.add_inaccessible( directive_names, @@ -927,7 +1122,7 @@ fn copy_union_type(union_name: Name, description: Option>) -> Extended } fn join_type_applied_directive<'a>( - subgraph_name: Name, + subgraph_name: EnumValue, key_directives: impl Iterator> + Sized, is_interface_object: bool, ) -> Vec> { @@ -935,7 +1130,7 @@ fn join_type_applied_directive<'a>( name: name!("join__type"), arguments: vec![Node::new(Argument { name: name!("graph"), - value: Node::new(Value::Enum(subgraph_name)), + value: Node::new(Value::Enum(subgraph_name.into())), })], }; if is_interface_object { @@ -978,7 +1173,7 @@ fn join_type_applied_directive<'a>( } fn join_implements_applied_directive( - subgraph_name: Name, + subgraph_name: EnumValue, intf_name: &Name, ) -> Component { Component::new(Directive { @@ -986,7 +1181,7 @@ fn join_implements_applied_directive( arguments: vec![ Node::new(Argument { name: name!("graph"), - value: Node::new(Value::Enum(subgraph_name)), + value: Node::new(Value::Enum(subgraph_name.into())), }), Node::new(Argument { name: name!("interface"), @@ -1140,7 +1335,7 @@ fn link_purpose_enum_type() -> (Name, EnumType) { // TODO join spec fn add_core_feature_join( supergraph: &mut Schema, - subgraphs_and_enum_values: &Vec<(&ValidFederationSubgraph, Name)>, + subgraphs_and_enum_values: &Vec<(&ValidFederationSubgraph, EnumValue)>, ) { // @link(url: "https://specs.apollo.dev/join/v0.3", for: EXECUTION) supergraph @@ -1208,6 +1403,24 @@ fn add_core_feature_join( Node::new(join_enum_value_directive_definition), ); + // scalar join__DirectiveArguments + let join_directive_arguments_name = name!("join__DirectiveArguments"); + let join_directive_arguments_scalar = ExtendedType::Scalar(Node::new(ScalarType { + directives: Default::default(), + name: join_directive_arguments_name.clone(), + description: None, + })); + supergraph.types.insert( + join_directive_arguments_name, + join_directive_arguments_scalar, + ); + + let join_directive_directive_definition = join_directive_directive_definition(); + supergraph.directive_definitions.insert( + join_directive_directive_definition.name.clone(), + Node::new(join_directive_directive_definition), + ); + let (name, join_graph_enum_type) = join_graph_enum_type(subgraphs_and_enum_values); supergraph.types.insert(name, join_graph_enum_type.into()); } @@ -1229,6 +1442,44 @@ fn join_enum_value_directive_definition() -> DirectiveDefinition { } } +/// directive @join__directive(graphs: [join__Graph!], name: String!, args: join__DirectiveArguments) repeatable on SCHEMA | OBJECT | INTERFACE | FIELD_DEFINITION +fn join_directive_directive_definition() -> DirectiveDefinition { + DirectiveDefinition { + name: name!("join__directive"), + description: None, + arguments: vec![ + Node::new(InputValueDefinition { + name: name!("graphs"), + description: None, + directives: Default::default(), + ty: ty!([join__Graph!]).into(), + default_value: None, + }), + Node::new(InputValueDefinition { + name: name!("name"), + description: None, + directives: Default::default(), + ty: ty!(String!).into(), + default_value: None, + }), + Node::new(InputValueDefinition { + name: name!("args"), + description: None, + directives: Default::default(), + ty: ty!(join__DirectiveArguments!).into(), + default_value: None, + }), + ], + locations: vec![ + DirectiveLocation::Schema, + DirectiveLocation::Object, + DirectiveLocation::Interface, + DirectiveLocation::FieldDefinition, + ], + repeatable: true, + } +} + /// directive @field( /// graph: Graph, /// requires: FieldSet, @@ -1309,17 +1560,18 @@ fn join_field_directive_definition() -> DirectiveDefinition { } fn join_field_applied_directive( - subgraph_name: Name, + subgraph_name: &EnumValue, requires: Option<&str>, provides: Option<&str>, external: bool, overrides: Option<(&str, Option<&str>)>, // from, label + r#type: Option<&Type>, ) -> Directive { let mut join_field_directive = Directive { name: name!("join__field"), arguments: vec![Node::new(Argument { name: name!("graph"), - value: Node::new(Value::Enum(subgraph_name)), + value: Node::new(Value::Enum(subgraph_name.to_name())), })], }; if let Some(required_fields) = requires { @@ -1352,6 +1604,12 @@ fn join_field_applied_directive( })); } } + if let Some(r#type) = r#type { + join_field_directive.arguments.push(Node::new(Argument { + name: name!("type"), + value: r#type.to_string().into(), + })); + } join_field_directive } @@ -1498,7 +1756,7 @@ fn join_union_member_directive_definition() -> DirectiveDefinition { /// enum Graph fn join_graph_enum_type( - subgraphs_and_enum_values: &Vec<(&ValidFederationSubgraph, Name)>, + subgraphs_and_enum_values: &Vec<(&ValidFederationSubgraph, EnumValue)>, ) -> (Name, EnumType) { let join_graph_enum_name = name!("join__Graph"); let mut join_graph_enum_type = EnumType { @@ -1524,7 +1782,7 @@ fn join_graph_enum_type( let graph = EnumValueDefinition { description: None, directives: DirectiveList(vec![Node::new(join_graph_applied_directive)]), - value: subgraph_name.clone(), + value: subgraph_name.to_name(), }; join_graph_enum_type .values @@ -1533,6 +1791,87 @@ fn join_graph_enum_type( (join_graph_enum_name, join_graph_enum_type) } +/// Represents a valid enum value in GraphQL, used for building `join__Graph`. +/// +/// TODO: Put this in `join_spec_definition.rs` when we convert to using that module. +#[derive(Clone, Debug)] +struct EnumValue(Name); + +impl EnumValue { + fn new(raw: &str) -> Result { + let prefix = if raw.starts_with(char::is_numeric) { + Some('_') + } else { + None + }; + let name = prefix + .into_iter() + .chain(raw.chars()) + .map(|c| match c { + 'a'..='z' => c.to_ascii_uppercase(), + 'A'..='Z' | '0'..='9' => c, + _ => '_', + }) + .collect::(); + Name::new(&name) + .map(Self) + .map_err(|_| format!("Failed to transform {raw} into a valid GraphQL name. Got {name}")) + } + fn to_name(&self) -> Name { + self.0.clone() + } + + #[cfg(test)] + fn as_str(&self) -> &str { + self.0.as_str() + } +} + +impl From for Name { + fn from(ev: EnumValue) -> Self { + ev.0 + } +} + +#[cfg(test)] +mod test_enum_value { + #[test] + fn basic() { + let ev = super::EnumValue::new("subgraph").unwrap(); + assert_eq!(ev.as_str(), "SUBGRAPH"); + } + + #[test] + fn with_underscores() { + let ev = super::EnumValue::new("a_subgraph").unwrap(); + assert_eq!(ev.as_str(), "A_SUBGRAPH"); + } + + #[test] + fn with_hyphens() { + let ev = super::EnumValue::new("a-subgraph").unwrap(); + assert_eq!(ev.as_str(), "A_SUBGRAPH"); + } + + #[test] + fn special_symbols() { + let ev = super::EnumValue::new("a$ubgraph").unwrap(); + assert_eq!(ev.as_str(), "A_UBGRAPH"); + } + + #[test] + fn digit_first_char() { + let ev = super::EnumValue::new("1subgraph").unwrap(); + assert_eq!(ev.as_str(), "_1SUBGRAPH"); + } + + #[test] + fn digit_last_char() { + let ev = super::EnumValue::new("subgraph_1").unwrap(); + assert_eq!(ev.as_str(), "SUBGRAPH_1"); + } +} + fn add_core_feature_inaccessible(supergraph: &mut Schema) { // @link(url: "https://specs.apollo.dev/inaccessible/v0.2") let spec = InaccessibleSpecDefinition::new(Version { major: 0, minor: 2 }); @@ -1588,145 +1927,4 @@ fn merge_directive( } #[cfg(test)] -mod tests { - use apollo_compiler::Schema; - use insta::assert_snapshot; - - use crate::merge::merge_federation_subgraphs; - use crate::schema::ValidFederationSchema; - use crate::ValidFederationSubgraph; - use crate::ValidFederationSubgraphs; - - #[test] - fn test_steel_thread() { - let one_sdl = - include_str!("./sources/connect/expand/merge/connector_Query_users_0.graphql"); - let two_sdl = include_str!("./sources/connect/expand/merge/connector_Query_user_0.graphql"); - let three_sdl = include_str!("./sources/connect/expand/merge/connector_User_d_1.graphql"); - let graphql_sdl = include_str!("./sources/connect/expand/merge/graphql.graphql"); - - let mut subgraphs = ValidFederationSubgraphs::new(); - subgraphs - .add(ValidFederationSubgraph { - name: "connector_Query_users_0".to_string(), - url: "".to_string(), - schema: ValidFederationSchema::new( - Schema::parse_and_validate(one_sdl, "./connector_Query_users_0.graphql") - .unwrap(), - ) - .unwrap(), - }) - .unwrap(); - subgraphs - .add(ValidFederationSubgraph { - name: "connector_Query_user_0".to_string(), - url: "".to_string(), - schema: ValidFederationSchema::new( - Schema::parse_and_validate(two_sdl, "./connector_Query_user_0.graphql") - .unwrap(), - ) - .unwrap(), - }) - .unwrap(); - subgraphs - .add(ValidFederationSubgraph { - name: "connector_User_d_1".to_string(), - url: "".to_string(), - schema: ValidFederationSchema::new( - Schema::parse_and_validate(three_sdl, "./connector_User_d_1.graphql").unwrap(), - ) - .unwrap(), - }) - .unwrap(); - subgraphs - .add(ValidFederationSubgraph { - name: "graphql".to_string(), - url: "".to_string(), - schema: ValidFederationSchema::new( - Schema::parse_and_validate(graphql_sdl, "./graphql.graphql").unwrap(), - ) - .unwrap(), - }) - .unwrap(); - - let result = merge_federation_subgraphs(subgraphs).unwrap(); - - let schema = result.schema.into_inner(); - let validation = schema.clone().validate(); - assert!(validation.is_ok(), "{:?}", validation); - - assert_snapshot!(schema.serialize()); - } - - #[test] - fn test_basic() { - let one_sdl = include_str!("./sources/connect/expand/merge/basic_1.graphql"); - let two_sdl = include_str!("./sources/connect/expand/merge/basic_2.graphql"); - - let mut subgraphs = ValidFederationSubgraphs::new(); - subgraphs - .add(ValidFederationSubgraph { - name: "basic_1".to_string(), - url: "".to_string(), - schema: ValidFederationSchema::new( - Schema::parse_and_validate(one_sdl, "./basic_1.graphql").unwrap(), - ) - .unwrap(), - }) - .unwrap(); - subgraphs - .add(ValidFederationSubgraph { - name: "basic_2".to_string(), - url: "".to_string(), - schema: ValidFederationSchema::new( - Schema::parse_and_validate(two_sdl, "./basic_2.graphql").unwrap(), - ) - .unwrap(), - }) - .unwrap(); - - let result = merge_federation_subgraphs(subgraphs).unwrap(); - - let schema = result.schema.into_inner(); - let validation = schema.clone().validate(); - assert!(validation.is_ok(), "{:?}", validation); - - assert_snapshot!(schema.serialize()); - } - - #[test] - fn test_inaccessible() { - let one_sdl = include_str!("./sources/connect/expand/merge/inaccessible.graphql"); - let two_sdl = include_str!("./sources/connect/expand/merge/inaccessible_2.graphql"); - - let mut subgraphs = ValidFederationSubgraphs::new(); - subgraphs - .add(ValidFederationSubgraph { - name: "inaccessible".to_string(), - url: "".to_string(), - schema: ValidFederationSchema::new( - Schema::parse_and_validate(one_sdl, "./inaccessible.graphql").unwrap(), - ) - .unwrap(), - }) - .unwrap(); - subgraphs - .add(ValidFederationSubgraph { - name: "inaccessible_2".to_string(), - url: "".to_string(), - schema: ValidFederationSchema::new( - Schema::parse_and_validate(two_sdl, "./inaccessible_2.graphql").unwrap(), - ) - .unwrap(), - }) - .unwrap(); - - let result = merge_federation_subgraphs(subgraphs).unwrap(); - - let schema = result.schema.into_inner(); - let validation = schema.clone().validate(); - assert!(validation.is_ok(), "{:?}", validation); - - assert_snapshot!(schema.serialize()); - } -} +mod tests; diff --git a/apollo-federation/src/merge/fields.rs b/apollo-federation/src/merge/fields.rs new file mode 100644 index 0000000000..d6fb09d394 --- /dev/null +++ b/apollo-federation/src/merge/fields.rs @@ -0,0 +1,35 @@ +use std::slice::Iter; + +use apollo_compiler::ast::InputValueDefinition; +use apollo_compiler::Node; + +use crate::merge::DirectiveNames; +use crate::merge::Merger; + +pub(super) fn merge_arguments( + arguments: Iter>, + arguments_to_merge: &mut Vec>, + merger: &mut Merger, + directive_names: &DirectiveNames, +) { + for arg in arguments { + let argument_to_merge = arguments_to_merge + .iter_mut() + .find_map(|a| (a.name == arg.name).then(|| a.make_mut())); + + if let Some(argument) = argument_to_merge { + merger.add_inaccessible(directive_names, &mut argument.directives, &arg.directives); + } else { + let mut argument = InputValueDefinition { + name: arg.name.clone(), + description: arg.description.clone(), + directives: Default::default(), + ty: arg.ty.clone(), + default_value: arg.default_value.clone(), + }; + + merger.add_inaccessible(directive_names, &mut argument.directives, &arg.directives); + arguments_to_merge.push(argument.into()); + } + } +} diff --git a/apollo-federation/src/snapshots/apollo_federation__merge__tests__basic.snap b/apollo-federation/src/merge/snapshots/apollo_federation__merge__tests__basic.snap similarity index 59% rename from apollo-federation/src/snapshots/apollo_federation__merge__tests__basic.snap rename to apollo-federation/src/merge/snapshots/apollo_federation__merge__tests__basic.snap index 1b63df9134..96fa85b993 100644 --- a/apollo-federation/src/snapshots/apollo_federation__merge__tests__basic.snap +++ b/apollo-federation/src/merge/snapshots/apollo_federation__merge__tests__basic.snap @@ -1,9 +1,10 @@ --- -source: apollo-federation/src/merge.rs +source: apollo-federation/src/merge/tests.rs expression: schema.serialize() --- schema @link(url: "https://specs.apollo.dev/link/v1.0") @link(url: "https://specs.apollo.dev/join/v0.3", for: EXECUTION) { query: Query + mutation: Mutation } directive @link(url: String, as: String, for: link__Purpose, import: [link__Import]) repeatable on SCHEMA @@ -20,6 +21,8 @@ directive @join__unionMember(graph: join__Graph!, member: String!) repeatable on directive @join__enumValue(graph: join__Graph!) repeatable on ENUM_VALUE +directive @join__directive(graphs: [join__Graph!], name: String!, args: join__DirectiveArguments!) repeatable on SCHEMA | OBJECT | INTERFACE | FIELD_DEFINITION + enum link__Purpose { """ SECURITY features provide metadata necessary to securely resolve fields. @@ -33,29 +36,34 @@ scalar link__Import scalar join__FieldSet +scalar join__DirectiveArguments + enum join__Graph { BASIC_1 @join__graph(name: "basic_1", url: "") BASIC_2 @join__graph(name: "basic_2", url: "") } type Query @join__type(graph: BASIC_1) @join__type(graph: BASIC_2) { - i: I @join__field(graph: BASIC_1) @join__field(graph: BASIC_2) - u: U @join__field(graph: BASIC_1) @join__field(graph: BASIC_2) - f(x: ID, y: YInput): T @join__field(graph: BASIC_1) @join__field(graph: BASIC_2) + i: I @join__field(graph: BASIC_1, type: "I") @join__field(graph: BASIC_2, type: "I") + u: U @join__field(graph: BASIC_1, type: "U") @join__field(graph: BASIC_2, type: "U") + f(x: ID, y: YInput): T @join__field(graph: BASIC_1, type: "T") @join__field(graph: BASIC_2, type: "T") } interface I @join__type(graph: BASIC_1) @join__type(graph: BASIC_2) { - id: ID! + id: ID! @join__field(graph: BASIC_1, type: "ID!") @join__field(graph: BASIC_2, type: "ID!") + f(x: ID, y: YInput): T @join__field(graph: BASIC_2, type: "T") } type A implements I @join__type(graph: BASIC_1) @join__implements(graph: BASIC_1, interface: "I") @join__type(graph: BASIC_2) @join__implements(graph: BASIC_2, interface: "I") { - id: ID! @join__field(graph: BASIC_1) @join__field(graph: BASIC_2) - a: S @join__field(graph: BASIC_1) @join__field(graph: BASIC_2) + id: ID! @join__field(graph: BASIC_1, type: "ID!") @join__field(graph: BASIC_2, type: "ID!") + a: S @join__field(graph: BASIC_1, type: "S") @join__field(graph: BASIC_2, type: "S") + f(x: ID, y: YInput): T @join__field(graph: BASIC_2, type: "T") } type B implements I @join__type(graph: BASIC_1) @join__implements(graph: BASIC_1, interface: "I") @join__type(graph: BASIC_2) @join__implements(graph: BASIC_2, interface: "I") { - id: ID! @join__field(graph: BASIC_1) @join__field(graph: BASIC_2) - b: E @join__field(graph: BASIC_1) @join__field(graph: BASIC_2) + id: ID! @join__field(graph: BASIC_1, type: "ID!") @join__field(graph: BASIC_2, type: "ID!") + b: E @join__field(graph: BASIC_1, type: "E") @join__field(graph: BASIC_2, type: "E") + f(x: ID, y: YInput): T @join__field(graph: BASIC_2, type: "T") } union U @join__type(graph: BASIC_1) @join__unionMember(graph: BASIC_1, member: "A") @join__unionMember(graph: BASIC_1, member: "B") @join__type(graph: BASIC_2) @join__unionMember(graph: BASIC_2, member: "A") @join__unionMember(graph: BASIC_2, member: "B") = A | B @@ -68,14 +76,23 @@ enum E @join__type(graph: BASIC_1) @join__type(graph: BASIC_2) { } type T @join__type(graph: BASIC_1) @join__type(graph: BASIC_2) { - x: ID @join__field(graph: BASIC_1) @join__field(graph: BASIC_2) - y: Y @join__field(graph: BASIC_1) @join__field(graph: BASIC_2) + x: ID @join__field(graph: BASIC_1, type: "ID") @join__field(graph: BASIC_2, type: "ID") + y: Y @join__field(graph: BASIC_1, type: "Y") @join__field(graph: BASIC_2, type: "Y") } type Y @join__type(graph: BASIC_1) @join__type(graph: BASIC_2) { - z: ID @join__field(graph: BASIC_1) @join__field(graph: BASIC_2) + z: ID @join__field(graph: BASIC_1, type: "ID") @join__field(graph: BASIC_2, type: "ID") } input YInput @join__type(graph: BASIC_1) @join__type(graph: BASIC_2) { - z: ID + z: ID @join__field(graph: BASIC_1, type: "ID") @join__field(graph: BASIC_2, type: "ID") +} + +type Mutation @join__type(graph: BASIC_1) @join__type(graph: BASIC_2) { + m: M @join__field(graph: BASIC_1, type: "M") + m2(x: ID, y: YInput): M @join__field(graph: BASIC_2, type: "M") +} + +type M @join__type(graph: BASIC_1) @join__type(graph: BASIC_2) { + n: String @join__field(graph: BASIC_1, type: "String") @join__field(graph: BASIC_2, type: "String") } diff --git a/apollo-federation/src/snapshots/apollo_federation__merge__tests__inaccessible.snap b/apollo-federation/src/merge/snapshots/apollo_federation__merge__tests__inaccessible.snap similarity index 74% rename from apollo-federation/src/snapshots/apollo_federation__merge__tests__inaccessible.snap rename to apollo-federation/src/merge/snapshots/apollo_federation__merge__tests__inaccessible.snap index 29501545a7..58d971dc80 100644 --- a/apollo-federation/src/snapshots/apollo_federation__merge__tests__inaccessible.snap +++ b/apollo-federation/src/merge/snapshots/apollo_federation__merge__tests__inaccessible.snap @@ -20,6 +20,8 @@ directive @join__unionMember(graph: join__Graph!, member: String!) repeatable on directive @join__enumValue(graph: join__Graph!) repeatable on ENUM_VALUE +directive @join__directive(graphs: [join__Graph!], name: String!, args: join__DirectiveArguments!) repeatable on SCHEMA | OBJECT | INTERFACE | FIELD_DEFINITION + directive @inaccessible on FIELD_DEFINITION | OBJECT | INTERFACE | UNION | ARGUMENT_DEFINITION | SCALAR | ENUM | ENUM_VALUE | INPUT_OBJECT | INPUT_FIELD_DEFINITION enum link__Purpose { @@ -35,6 +37,8 @@ scalar link__Import scalar join__FieldSet +scalar join__DirectiveArguments + enum join__Graph { INACCESSIBLE @join__graph(name: "inaccessible", url: "") INACCESSIBLE_2 @join__graph(name: "inaccessible_2", url: "") @@ -43,19 +47,19 @@ enum join__Graph { type Query @join__type(graph: INACCESSIBLE) @join__type(graph: INACCESSIBLE_2) { a( input: Input @inaccessible, - ): A @join__field(graph: INACCESSIBLE) - b: B @inaccessible @join__field(graph: INACCESSIBLE) - as: [A] @inaccessible @join__field(graph: INACCESSIBLE_2) + ): A @join__field(graph: INACCESSIBLE, type: "A") + b: B @inaccessible @join__field(graph: INACCESSIBLE, type: "B") + as: [A] @inaccessible @join__field(graph: INACCESSIBLE_2, type: "[A]") } type A @join__type(graph: INACCESSIBLE, key: "id") @join__type(graph: INACCESSIBLE_2, key: "id") { - id: ID! @join__field(graph: INACCESSIBLE) @join__field(graph: INACCESSIBLE_2) - c: Int @inaccessible @join__field(graph: INACCESSIBLE) @join__field(graph: INACCESSIBLE_2) - d: Enum @inaccessible @join__field(graph: INACCESSIBLE) + id: ID! @join__field(graph: INACCESSIBLE, type: "ID!") @join__field(graph: INACCESSIBLE_2, type: "ID!") + c: Int @inaccessible @join__field(graph: INACCESSIBLE, type: "Int") @join__field(graph: INACCESSIBLE_2, type: "Int") + d: Enum @inaccessible @join__field(graph: INACCESSIBLE, type: "Enum") } type B implements Interface @join__type(graph: INACCESSIBLE) @inaccessible @join__implements(graph: INACCESSIBLE, interface: "Interface") { - b: Scalar @join__field(graph: INACCESSIBLE) + b: Scalar @join__field(graph: INACCESSIBLE, type: "Scalar") } enum Enum @join__type(graph: INACCESSIBLE) @inaccessible { @@ -65,14 +69,14 @@ enum Enum @join__type(graph: INACCESSIBLE) @inaccessible { } input Input @join__type(graph: INACCESSIBLE) @inaccessible { - a: Int @inaccessible - b: String + a: Int @inaccessible @join__field(graph: INACCESSIBLE, type: "Int") + b: String @join__field(graph: INACCESSIBLE, type: "String") } scalar Scalar @join__type(graph: INACCESSIBLE) @inaccessible interface Interface @join__type(graph: INACCESSIBLE) @inaccessible { - b: Scalar + b: Scalar @inaccessible @join__field(graph: INACCESSIBLE, type: "Scalar") } union Union @join__type(graph: INACCESSIBLE) @inaccessible @join__unionMember(graph: INACCESSIBLE, member: "A") @join__unionMember(graph: INACCESSIBLE, member: "B") = A | B diff --git a/apollo-federation/src/merge/snapshots/apollo_federation__merge__tests__input_types.snap b/apollo-federation/src/merge/snapshots/apollo_federation__merge__tests__input_types.snap new file mode 100644 index 0000000000..edf9262249 --- /dev/null +++ b/apollo-federation/src/merge/snapshots/apollo_federation__merge__tests__input_types.snap @@ -0,0 +1,60 @@ +--- +source: apollo-federation/src/merge/tests.rs +expression: schema.serialize() +--- +schema @link(url: "https://specs.apollo.dev/link/v1.0") @link(url: "https://specs.apollo.dev/join/v0.3", for: EXECUTION) { + query: Query +} + +directive @link(url: String, as: String, for: link__Purpose, import: [link__Import]) repeatable on SCHEMA + +directive @join__graph(name: String!, url: String!) on ENUM_VALUE + +directive @join__type(graph: join__Graph!, key: join__FieldSet, extension: Boolean! = false, resolvable: Boolean! = true, isInterfaceObject: Boolean! = false) repeatable on ENUM | INPUT_OBJECT | INTERFACE | OBJECT | SCALAR | UNION + +directive @join__field(graph: join__Graph, requires: join__FieldSet, provides: join__FieldSet, type: String, external: Boolean, override: String, overrideLabel: String, usedOverridden: Boolean) repeatable on FIELD_DEFINITION | INPUT_FIELD_DEFINITION + +directive @join__implements(graph: join__Graph!, interface: String!) repeatable on INTERFACE | OBJECT + +directive @join__unionMember(graph: join__Graph!, member: String!) repeatable on UNION + +directive @join__enumValue(graph: join__Graph!) repeatable on ENUM_VALUE + +directive @join__directive(graphs: [join__Graph!], name: String!, args: join__DirectiveArguments!) repeatable on SCHEMA | OBJECT | INTERFACE | FIELD_DEFINITION + +enum link__Purpose { + """ + SECURITY features provide metadata necessary to securely resolve fields. + """ + SECURITY + """EXECUTION features provide metadata necessary for operation execution.""" + EXECUTION +} + +scalar link__Import + +scalar join__FieldSet + +scalar join__DirectiveArguments + +enum join__Graph { + ONE @join__graph(name: "one", url: "") +} + +type Query @join__type(graph: ONE) { + a(input: AInput!): A @join__field(graph: ONE, type: "A") +} + +type A @join__type(graph: ONE) { + id: ID! @join__field(graph: ONE, type: "ID!") + b: String @join__field(graph: ONE, type: "String") +} + +input AInput @join__type(graph: ONE) { + id: ID! @join__field(graph: ONE, type: "ID!") + b: BInput @join__field(graph: ONE, type: "BInput") +} + +input BInput @join__type(graph: ONE) { + id: ID! @join__field(graph: ONE, type: "ID!") +} diff --git a/apollo-federation/src/merge/snapshots/apollo_federation__merge__tests__interface_implementing_interface.snap b/apollo-federation/src/merge/snapshots/apollo_federation__merge__tests__interface_implementing_interface.snap new file mode 100644 index 0000000000..0bd995e561 --- /dev/null +++ b/apollo-federation/src/merge/snapshots/apollo_federation__merge__tests__interface_implementing_interface.snap @@ -0,0 +1,67 @@ +--- +source: apollo-federation/src/merge/tests.rs +expression: schema.serialize() +--- +schema @link(url: "https://specs.apollo.dev/link/v1.0") @link(url: "https://specs.apollo.dev/join/v0.3", for: EXECUTION) { + query: Query +} + +directive @link(url: String, as: String, for: link__Purpose, import: [link__Import]) repeatable on SCHEMA + +directive @join__graph(name: String!, url: String!) on ENUM_VALUE + +directive @join__type(graph: join__Graph!, key: join__FieldSet, extension: Boolean! = false, resolvable: Boolean! = true, isInterfaceObject: Boolean! = false) repeatable on ENUM | INPUT_OBJECT | INTERFACE | OBJECT | SCALAR | UNION + +directive @join__field(graph: join__Graph, requires: join__FieldSet, provides: join__FieldSet, type: String, external: Boolean, override: String, overrideLabel: String, usedOverridden: Boolean) repeatable on FIELD_DEFINITION | INPUT_FIELD_DEFINITION + +directive @join__implements(graph: join__Graph!, interface: String!) repeatable on INTERFACE | OBJECT + +directive @join__unionMember(graph: join__Graph!, member: String!) repeatable on UNION + +directive @join__enumValue(graph: join__Graph!) repeatable on ENUM_VALUE + +directive @join__directive(graphs: [join__Graph!], name: String!, args: join__DirectiveArguments!) repeatable on SCHEMA | OBJECT | INTERFACE | FIELD_DEFINITION + +enum link__Purpose { + """ + SECURITY features provide metadata necessary to securely resolve fields. + """ + SECURITY + """EXECUTION features provide metadata necessary for operation execution.""" + EXECUTION +} + +scalar link__Import + +scalar join__FieldSet + +scalar join__DirectiveArguments + +enum join__Graph { + ONE @join__graph(name: "one", url: "") +} + +type Query @join__type(graph: ONE) { + i: I @join__field(graph: ONE, type: "I") +} + +interface Node @join__type(graph: ONE) { + id: ID! @join__field(graph: ONE, type: "ID!") +} + +interface I implements Node @join__type(graph: ONE) @join__implements(graph: ONE, interface: "Node") { + id: ID! @join__field(graph: ONE, type: "ID!") + i: String @join__field(graph: ONE, type: "String") +} + +type A implements I & Node @join__type(graph: ONE) @join__implements(graph: ONE, interface: "I") @join__implements(graph: ONE, interface: "Node") { + id: ID! @join__field(graph: ONE, type: "ID!") + i: String @join__field(graph: ONE, type: "String") + a: String @join__field(graph: ONE, type: "String") +} + +type B implements I & Node @join__type(graph: ONE) @join__implements(graph: ONE, interface: "I") @join__implements(graph: ONE, interface: "Node") { + id: ID! @join__field(graph: ONE, type: "ID!") + i: String @join__field(graph: ONE, type: "String") + b: String @join__field(graph: ONE, type: "String") +} diff --git a/apollo-federation/src/merge/snapshots/apollo_federation__merge__tests__interface_object.snap b/apollo-federation/src/merge/snapshots/apollo_federation__merge__tests__interface_object.snap new file mode 100644 index 0000000000..affaa4c72c --- /dev/null +++ b/apollo-federation/src/merge/snapshots/apollo_federation__merge__tests__interface_object.snap @@ -0,0 +1,74 @@ +--- +source: apollo-federation/src/merge/tests.rs +expression: schema.serialize() +--- +schema @link(url: "https://specs.apollo.dev/link/v1.0") @link(url: "https://specs.apollo.dev/join/v0.3", for: EXECUTION) { + query: Query +} + +directive @link(url: String, as: String, for: link__Purpose, import: [link__Import]) repeatable on SCHEMA + +directive @join__graph(name: String!, url: String!) on ENUM_VALUE + +directive @join__type(graph: join__Graph!, key: join__FieldSet, extension: Boolean! = false, resolvable: Boolean! = true, isInterfaceObject: Boolean! = false) repeatable on ENUM | INPUT_OBJECT | INTERFACE | OBJECT | SCALAR | UNION + +directive @join__field(graph: join__Graph, requires: join__FieldSet, provides: join__FieldSet, type: String, external: Boolean, override: String, overrideLabel: String, usedOverridden: Boolean) repeatable on FIELD_DEFINITION | INPUT_FIELD_DEFINITION + +directive @join__implements(graph: join__Graph!, interface: String!) repeatable on INTERFACE | OBJECT + +directive @join__unionMember(graph: join__Graph!, member: String!) repeatable on UNION + +directive @join__enumValue(graph: join__Graph!) repeatable on ENUM_VALUE + +directive @join__directive(graphs: [join__Graph!], name: String!, args: join__DirectiveArguments!) repeatable on SCHEMA | OBJECT | INTERFACE | FIELD_DEFINITION + +enum link__Purpose { + """ + SECURITY features provide metadata necessary to securely resolve fields. + """ + SECURITY + """EXECUTION features provide metadata necessary for operation execution.""" + EXECUTION +} + +scalar link__Import + +scalar join__FieldSet + +scalar join__DirectiveArguments + +enum join__Graph { + INTERFACE_OBJECT_1 @join__graph(name: "interface_object_1", url: "") + INTERFACE_OBJECT_2 @join__graph(name: "interface_object_2", url: "") + INTERFACE_OBJECT_3 @join__graph(name: "interface_object_3", url: "") +} + +interface Itf @join__type(graph: INTERFACE_OBJECT_1, key: "id") @join__type(graph: INTERFACE_OBJECT_2, isInterfaceObject: true, key: "id") @join__type(graph: INTERFACE_OBJECT_3, isInterfaceObject: true, key: "id") { + id: ID! @join__field(graph: INTERFACE_OBJECT_1, type: "ID!") @join__field(graph: INTERFACE_OBJECT_2, type: "ID!") @join__field(graph: INTERFACE_OBJECT_3, type: "ID!") + c: Int! @join__field(graph: INTERFACE_OBJECT_2, type: "Int!") @join__field(graph: INTERFACE_OBJECT_3, type: "Int!") + d: Int! @join__field(graph: INTERFACE_OBJECT_3, type: "Int!") +} + +type T1 implements Itf @join__type(graph: INTERFACE_OBJECT_1, key: "id") @join__implements(graph: INTERFACE_OBJECT_1, interface: "Itf") { + id: ID! @join__field(graph: INTERFACE_OBJECT_1, type: "ID!") + a: String @join__field(graph: INTERFACE_OBJECT_1, type: "String") + c: Int! @join__field + d: Int! @join__field +} + +type T2 implements Itf @join__type(graph: INTERFACE_OBJECT_1, key: "id") @join__implements(graph: INTERFACE_OBJECT_1, interface: "Itf") { + id: ID! @join__field(graph: INTERFACE_OBJECT_1, type: "ID!") + b: String @join__field(graph: INTERFACE_OBJECT_1, type: "String") + c: Int! @join__field + d: Int! @join__field +} + +type Query @join__type(graph: INTERFACE_OBJECT_1) @join__type(graph: INTERFACE_OBJECT_2) @join__type(graph: INTERFACE_OBJECT_3) { + itfs: [Itf] @join__field(graph: INTERFACE_OBJECT_2, type: "[Itf]") + itf(id: ID!): Itf @join__field(graph: INTERFACE_OBJECT_3, type: "Itf") + itf2(id: ID!): Itf2 @join__field(graph: INTERFACE_OBJECT_3, type: "Itf2") +} + +interface Itf2 @join__type(graph: INTERFACE_OBJECT_3, isInterfaceObject: true, key: "id") { + id: ID! @join__field(graph: INTERFACE_OBJECT_3, type: "ID!") +} diff --git a/apollo-federation/src/snapshots/apollo_federation__merge__tests__steel_thread.snap b/apollo-federation/src/merge/snapshots/apollo_federation__merge__tests__steel_thread.snap similarity index 77% rename from apollo-federation/src/snapshots/apollo_federation__merge__tests__steel_thread.snap rename to apollo-federation/src/merge/snapshots/apollo_federation__merge__tests__steel_thread.snap index 54ee822eea..9bd6f521a2 100644 --- a/apollo-federation/src/snapshots/apollo_federation__merge__tests__steel_thread.snap +++ b/apollo-federation/src/merge/snapshots/apollo_federation__merge__tests__steel_thread.snap @@ -20,6 +20,8 @@ directive @join__unionMember(graph: join__Graph!, member: String!) repeatable on directive @join__enumValue(graph: join__Graph!) repeatable on ENUM_VALUE +directive @join__directive(graphs: [join__Graph!], name: String!, args: join__DirectiveArguments!) repeatable on SCHEMA | OBJECT | INTERFACE | FIELD_DEFINITION + directive @inaccessible on FIELD_DEFINITION | OBJECT | INTERFACE | UNION | ARGUMENT_DEFINITION | SCALAR | ENUM | ENUM_VALUE | INPUT_OBJECT | INPUT_FIELD_DEFINITION enum link__Purpose { @@ -35,6 +37,8 @@ scalar link__Import scalar join__FieldSet +scalar join__DirectiveArguments + enum join__Graph { CONNECTOR_QUERY_USER_0 @join__graph(name: "connector_Query_user_0", url: "") CONNECTOR_QUERY_USERS_0 @join__graph(name: "connector_Query_users_0", url: "") @@ -43,15 +47,15 @@ enum join__Graph { } type User @join__type(graph: CONNECTOR_QUERY_USER_0, key: "id") @join__type(graph: CONNECTOR_QUERY_USERS_0) @join__type(graph: CONNECTOR_USER_D_1, key: "__typename") @join__type(graph: GRAPHQL, key: "id") { - id: ID! @join__field(graph: CONNECTOR_QUERY_USER_0) @join__field(graph: CONNECTOR_QUERY_USERS_0) @join__field(graph: GRAPHQL) - a: String @join__field(graph: CONNECTOR_QUERY_USER_0) @join__field(graph: CONNECTOR_QUERY_USERS_0) - b: String @join__field(graph: CONNECTOR_QUERY_USER_0) - c: String @join__field(graph: CONNECTOR_USER_D_1, external: true) @join__field(graph: GRAPHQL) - d: String @join__field(graph: CONNECTOR_USER_D_1, requires: "c") + id: ID! @join__field(graph: CONNECTOR_QUERY_USER_0, type: "ID!") @join__field(graph: CONNECTOR_QUERY_USERS_0, type: "ID!") @join__field(graph: GRAPHQL, type: "ID!") + a: String @join__field(graph: CONNECTOR_QUERY_USER_0, type: "String") @join__field(graph: CONNECTOR_QUERY_USERS_0, type: "String") + b: String @join__field(graph: CONNECTOR_QUERY_USER_0, type: "String") + c: String @join__field(graph: CONNECTOR_USER_D_1, external: true, type: "String") @join__field(graph: GRAPHQL, type: "String") + d: String @join__field(graph: CONNECTOR_USER_D_1, requires: "c", type: "String") } type Query @join__type(graph: CONNECTOR_QUERY_USER_0) @join__type(graph: CONNECTOR_QUERY_USERS_0) @join__type(graph: CONNECTOR_USER_D_1) @join__type(graph: GRAPHQL) { - user(id: ID!): User @join__field(graph: CONNECTOR_QUERY_USER_0) - users(limit: Int): [User] @join__field(graph: CONNECTOR_QUERY_USERS_0) - _: ID @inaccessible @join__field(graph: CONNECTOR_USER_D_1) + user(id: ID!): User @join__field(graph: CONNECTOR_QUERY_USER_0, type: "User") + users(limit: Int): [User] @join__field(graph: CONNECTOR_QUERY_USERS_0, type: "[User]") + _: ID @inaccessible @join__field(graph: CONNECTOR_USER_D_1, type: "ID") } diff --git a/apollo-federation/src/merge/testdata/input_types/one.graphql b/apollo-federation/src/merge/testdata/input_types/one.graphql new file mode 100644 index 0000000000..8b2b4ffd12 --- /dev/null +++ b/apollo-federation/src/merge/testdata/input_types/one.graphql @@ -0,0 +1,107 @@ +schema { + query: Query +} + +extend schema + @link(url: "https://specs.apollo.dev/link/v1.0") + @link(url: "https://specs.apollo.dev/federation/v2.9") + +directive @link( + url: String + as: String + for: link__Purpose + import: [link__Import] +) repeatable on SCHEMA + +directive @federation__key( + fields: federation__FieldSet! + resolvable: Boolean = true +) repeatable on OBJECT | INTERFACE + +directive @federation__requires( + fields: federation__FieldSet! +) on FIELD_DEFINITION + +directive @federation__provides( + fields: federation__FieldSet! +) on FIELD_DEFINITION + +directive @federation__external(reason: String) on OBJECT | FIELD_DEFINITION + +directive @federation__tag( + name: String! +) repeatable on FIELD_DEFINITION | OBJECT | INTERFACE | UNION | ARGUMENT_DEFINITION | SCALAR | ENUM | ENUM_VALUE | INPUT_OBJECT | INPUT_FIELD_DEFINITION | SCHEMA + +directive @federation__extends on OBJECT | INTERFACE + +directive @federation__shareable on OBJECT | FIELD_DEFINITION + +directive @federation__inaccessible on FIELD_DEFINITION | OBJECT | INTERFACE | UNION | ARGUMENT_DEFINITION | SCALAR | ENUM | ENUM_VALUE | INPUT_OBJECT | INPUT_FIELD_DEFINITION + +directive @federation__override( + from: String! + label: String +) on FIELD_DEFINITION + +directive @federation__composeDirective(name: String) repeatable on SCHEMA + +directive @federation__interfaceObject on OBJECT + +directive @federation__authenticated on FIELD_DEFINITION | OBJECT | INTERFACE | SCALAR | ENUM + +directive @federation__requiresScopes( + scopes: [[federation__Scope!]!]! +) on FIELD_DEFINITION | OBJECT | INTERFACE | SCALAR | ENUM + +directive @federation__cost( + weight: Int! +) on ARGUMENT_DEFINITION | ENUM | FIELD_DEFINITION | INPUT_FIELD_DEFINITION | OBJECT | SCALAR + +directive @federation__listSize( + assumedSize: Int + slicingArguments: [String!] + sizedFields: [String!] + requireOneSlicingArgument: Boolean = true +) on FIELD_DEFINITION + +scalar link__Import + +enum link__Purpose { + """ + \`SECURITY\` features provide metadata necessary to securely resolve fields. + """ + SECURITY + """ + \`EXECUTION\` features provide metadata necessary for operation execution. + """ + EXECUTION +} + +scalar federation__FieldSet + +scalar federation__Scope + +scalar _Any + +type _Service { + sdl: String +} + +type Query { + _service: _Service! + a(input: AInput!): A +} + +type A { + id: ID! + b: String +} + +input AInput { + id: ID! + b: BInput +} + +input BInput { + id: ID! +} diff --git a/apollo-federation/src/merge/testdata/interface_implementing_interface/one.graphql b/apollo-federation/src/merge/testdata/interface_implementing_interface/one.graphql new file mode 100644 index 0000000000..58fe7ca8fa --- /dev/null +++ b/apollo-federation/src/merge/testdata/interface_implementing_interface/one.graphql @@ -0,0 +1,114 @@ +schema { + query: Query +} + +extend schema + @link(url: "https://specs.apollo.dev/link/v1.0") + @link(url: "https://specs.apollo.dev/federation/v2.9") + +directive @link( + url: String + as: String + for: link__Purpose + import: [link__Import] +) repeatable on SCHEMA + +directive @federation__key( + fields: federation__FieldSet! + resolvable: Boolean = true +) repeatable on OBJECT | INTERFACE + +directive @federation__requires( + fields: federation__FieldSet! +) on FIELD_DEFINITION + +directive @federation__provides( + fields: federation__FieldSet! +) on FIELD_DEFINITION + +directive @federation__external(reason: String) on OBJECT | FIELD_DEFINITION + +directive @federation__tag( + name: String! +) repeatable on FIELD_DEFINITION | OBJECT | INTERFACE | UNION | ARGUMENT_DEFINITION | SCALAR | ENUM | ENUM_VALUE | INPUT_OBJECT | INPUT_FIELD_DEFINITION | SCHEMA + +directive @federation__extends on OBJECT | INTERFACE + +directive @federation__shareable on OBJECT | FIELD_DEFINITION + +directive @federation__inaccessible on FIELD_DEFINITION | OBJECT | INTERFACE | UNION | ARGUMENT_DEFINITION | SCALAR | ENUM | ENUM_VALUE | INPUT_OBJECT | INPUT_FIELD_DEFINITION + +directive @federation__override( + from: String! + label: String +) on FIELD_DEFINITION + +directive @federation__composeDirective(name: String) repeatable on SCHEMA + +directive @federation__interfaceObject on OBJECT + +directive @federation__authenticated on FIELD_DEFINITION | OBJECT | INTERFACE | SCALAR | ENUM + +directive @federation__requiresScopes( + scopes: [[federation__Scope!]!]! +) on FIELD_DEFINITION | OBJECT | INTERFACE | SCALAR | ENUM + +directive @federation__cost( + weight: Int! +) on ARGUMENT_DEFINITION | ENUM | FIELD_DEFINITION | INPUT_FIELD_DEFINITION | OBJECT | SCALAR + +directive @federation__listSize( + assumedSize: Int + slicingArguments: [String!] + sizedFields: [String!] + requireOneSlicingArgument: Boolean = true +) on FIELD_DEFINITION + +scalar link__Import + +enum link__Purpose { + """ + \`SECURITY\` features provide metadata necessary to securely resolve fields. + """ + SECURITY + """ + \`EXECUTION\` features provide metadata necessary for operation execution. + """ + EXECUTION +} + +scalar federation__FieldSet + +scalar federation__Scope + +scalar _Any + +type _Service { + sdl: String +} + +type Query { + _service: _Service! + i: I +} + +interface Node { + id: ID! +} + +interface I implements Node { + id: ID! + i: String +} + +type A implements I & Node { + id: ID! + i: String + a: String +} + +type B implements I & Node { + id: ID! + i: String + b: String +} diff --git a/apollo-federation/src/merge/testdata/interface_object/one.graphql b/apollo-federation/src/merge/testdata/interface_object/one.graphql new file mode 100644 index 0000000000..92816f382d --- /dev/null +++ b/apollo-federation/src/merge/testdata/interface_object/one.graphql @@ -0,0 +1,109 @@ +schema { + query: Query +} + +extend schema + @link(url: "https://specs.apollo.dev/link/v1.0") + @link(url: "https://specs.apollo.dev/federation/v2.9") + +directive @link( + url: String + as: String + for: link__Purpose + import: [link__Import] +) repeatable on SCHEMA + +directive @federation__key( + fields: federation__FieldSet! + resolvable: Boolean = true +) repeatable on OBJECT | INTERFACE + +directive @federation__requires( + fields: federation__FieldSet! +) on FIELD_DEFINITION + +directive @federation__provides( + fields: federation__FieldSet! +) on FIELD_DEFINITION + +directive @federation__external(reason: String) on OBJECT | FIELD_DEFINITION + +directive @federation__tag( + name: String! +) repeatable on FIELD_DEFINITION | OBJECT | INTERFACE | UNION | ARGUMENT_DEFINITION | SCALAR | ENUM | ENUM_VALUE | INPUT_OBJECT | INPUT_FIELD_DEFINITION | SCHEMA + +directive @federation__extends on OBJECT | INTERFACE + +directive @federation__shareable on OBJECT | FIELD_DEFINITION + +directive @federation__inaccessible on FIELD_DEFINITION | OBJECT | INTERFACE | UNION | ARGUMENT_DEFINITION | SCALAR | ENUM | ENUM_VALUE | INPUT_OBJECT | INPUT_FIELD_DEFINITION + +directive @federation__override( + from: String! + label: String +) on FIELD_DEFINITION + +directive @federation__composeDirective(name: String) repeatable on SCHEMA + +directive @federation__interfaceObject on OBJECT + +directive @federation__authenticated on FIELD_DEFINITION | OBJECT | INTERFACE | SCALAR | ENUM + +directive @federation__requiresScopes( + scopes: [[federation__Scope!]!]! +) on FIELD_DEFINITION | OBJECT | INTERFACE | SCALAR | ENUM + +directive @federation__cost( + weight: Int! +) on ARGUMENT_DEFINITION | ENUM | FIELD_DEFINITION | INPUT_FIELD_DEFINITION | OBJECT | SCALAR + +directive @federation__listSize( + assumedSize: Int + slicingArguments: [String!] + sizedFields: [String!] + requireOneSlicingArgument: Boolean = true +) on FIELD_DEFINITION + +scalar link__Import + +enum link__Purpose { + """ + \`SECURITY\` features provide metadata necessary to securely resolve fields. + """ + SECURITY + """ + \`EXECUTION\` features provide metadata necessary for operation execution. + """ + EXECUTION +} + +scalar federation__FieldSet + +scalar federation__Scope + +interface Itf @federation__key(fields: "id", resolvable: true) { + id: ID! +} + +type T1 implements Itf @federation__key(fields: "id", resolvable: true) { + id: ID! + a: String +} + +type T2 implements Itf @federation__key(fields: "id", resolvable: true) { + id: ID! + b: String +} + +scalar _Any + +type _Service { + sdl: String +} + +union _Entity = T1 | T2 + +type Query { + _entities(representations: [_Any!]!): [_Entity]! + _service: _Service! +} diff --git a/apollo-federation/src/merge/testdata/interface_object/three.graphql b/apollo-federation/src/merge/testdata/interface_object/three.graphql new file mode 100644 index 0000000000..f277d11df9 --- /dev/null +++ b/apollo-federation/src/merge/testdata/interface_object/three.graphql @@ -0,0 +1,98 @@ +schema { + query: Query +} + +extend schema + @link(url: "https://specs.apollo.dev/link/v1.0") + @link(url: "https://specs.apollo.dev/federation/v2.9") + +directive @link( + url: String + as: String + for: link__Purpose + import: [link__Import] +) repeatable on SCHEMA + +directive @federation__key( + fields: federation__FieldSet! + resolvable: Boolean = true +) repeatable on OBJECT | INTERFACE + +directive @federation__requires( + fields: federation__FieldSet! +) on FIELD_DEFINITION + +directive @federation__provides( + fields: federation__FieldSet! +) on FIELD_DEFINITION + +directive @federation__external(reason: String) on OBJECT | FIELD_DEFINITION + +directive @federation__tag( + name: String! +) repeatable on FIELD_DEFINITION | OBJECT | INTERFACE | UNION | ARGUMENT_DEFINITION | SCALAR | ENUM | ENUM_VALUE | INPUT_OBJECT | INPUT_FIELD_DEFINITION | SCHEMA + +directive @federation__extends on OBJECT | INTERFACE + +directive @federation__shareable on OBJECT | FIELD_DEFINITION + +directive @federation__inaccessible on FIELD_DEFINITION | OBJECT | INTERFACE | UNION | ARGUMENT_DEFINITION | SCALAR | ENUM | ENUM_VALUE | INPUT_OBJECT | INPUT_FIELD_DEFINITION + +directive @federation__override( + from: String! + label: String +) on FIELD_DEFINITION + +directive @federation__composeDirective(name: String) repeatable on SCHEMA + +directive @federation__interfaceObject on OBJECT + +directive @federation__authenticated on FIELD_DEFINITION | OBJECT | INTERFACE | SCALAR | ENUM + +directive @federation__requiresScopes( + scopes: [[federation__Scope!]!]! +) on FIELD_DEFINITION | OBJECT | INTERFACE | SCALAR | ENUM + +directive @federation__cost( + weight: Int! +) on ARGUMENT_DEFINITION | ENUM | FIELD_DEFINITION | INPUT_FIELD_DEFINITION | OBJECT | SCALAR + +directive @federation__listSize( + assumedSize: Int + slicingArguments: [String!] + sizedFields: [String!] + requireOneSlicingArgument: Boolean = true +) on FIELD_DEFINITION + +scalar link__Import + +enum link__Purpose { + """ + \`SECURITY\` features provide metadata necessary to securely resolve fields. + """ + SECURITY + """ + \`EXECUTION\` features provide metadata necessary for operation execution. + """ + EXECUTION +} + +scalar federation__FieldSet + +scalar federation__Scope + +type Itf @federation__interfaceObject @federation__key(fields: "id") { + c: Int! + d: Int! + id: ID! +} + +# This doesn't really make sense but it was a bug +type Itf2 @federation__interfaceObject @federation__key(fields: "id") { + id: ID! +} + +type Query { + itf(id: ID!): Itf + itf2(id: ID!): Itf2 +} diff --git a/apollo-federation/src/merge/testdata/interface_object/two.graphql b/apollo-federation/src/merge/testdata/interface_object/two.graphql new file mode 100644 index 0000000000..d40557b508 --- /dev/null +++ b/apollo-federation/src/merge/testdata/interface_object/two.graphql @@ -0,0 +1,93 @@ +schema { + query: Query +} + +extend schema + @link(url: "https://specs.apollo.dev/link/v1.0") + @link(url: "https://specs.apollo.dev/federation/v2.9") + +directive @link( + url: String + as: String + for: link__Purpose + import: [link__Import] +) repeatable on SCHEMA + +directive @federation__key( + fields: federation__FieldSet! + resolvable: Boolean = true +) repeatable on OBJECT | INTERFACE + +directive @federation__requires( + fields: federation__FieldSet! +) on FIELD_DEFINITION + +directive @federation__provides( + fields: federation__FieldSet! +) on FIELD_DEFINITION + +directive @federation__external(reason: String) on OBJECT | FIELD_DEFINITION + +directive @federation__tag( + name: String! +) repeatable on FIELD_DEFINITION | OBJECT | INTERFACE | UNION | ARGUMENT_DEFINITION | SCALAR | ENUM | ENUM_VALUE | INPUT_OBJECT | INPUT_FIELD_DEFINITION | SCHEMA + +directive @federation__extends on OBJECT | INTERFACE + +directive @federation__shareable on OBJECT | FIELD_DEFINITION + +directive @federation__inaccessible on FIELD_DEFINITION | OBJECT | INTERFACE | UNION | ARGUMENT_DEFINITION | SCALAR | ENUM | ENUM_VALUE | INPUT_OBJECT | INPUT_FIELD_DEFINITION + +directive @federation__override( + from: String! + label: String +) on FIELD_DEFINITION + +directive @federation__composeDirective(name: String) repeatable on SCHEMA + +directive @federation__interfaceObject on OBJECT + +directive @federation__authenticated on FIELD_DEFINITION | OBJECT | INTERFACE | SCALAR | ENUM + +directive @federation__requiresScopes( + scopes: [[federation__Scope!]!]! +) on FIELD_DEFINITION | OBJECT | INTERFACE | SCALAR | ENUM + +directive @federation__cost( + weight: Int! +) on ARGUMENT_DEFINITION | ENUM | FIELD_DEFINITION | INPUT_FIELD_DEFINITION | OBJECT | SCALAR + +directive @federation__listSize( + assumedSize: Int + slicingArguments: [String!] + sizedFields: [String!] + requireOneSlicingArgument: Boolean = true +) on FIELD_DEFINITION + +scalar link__Import + +enum link__Purpose { + """ + \`SECURITY\` features provide metadata necessary to securely resolve fields. + """ + SECURITY + """ + \`EXECUTION\` features provide metadata necessary for operation execution. + """ + EXECUTION +} + +scalar federation__FieldSet + +scalar federation__Scope + +type Itf + @federation__interfaceObject + @federation__key(fields: "id", resolvable: true) { + c: Int! + id: ID! +} + +type Query { + itfs: [Itf] +} diff --git a/apollo-federation/src/merge/tests.rs b/apollo-federation/src/merge/tests.rs new file mode 100644 index 0000000000..e887906140 --- /dev/null +++ b/apollo-federation/src/merge/tests.rs @@ -0,0 +1,123 @@ +use apollo_compiler::Schema; +use insta::assert_snapshot; + +use crate::merge::merge_federation_subgraphs; +use crate::schema::ValidFederationSchema; +use crate::ValidFederationSubgraph; +use crate::ValidFederationSubgraphs; + +macro_rules! subgraphs { + ($($name:expr => $file:expr),* $(,)?) => {{ + let mut subgraphs = ValidFederationSubgraphs::new(); + + $( + subgraphs.add(ValidFederationSubgraph { + name: $name.to_string(), + url: "".to_string(), + schema: ValidFederationSchema::new( + Schema::parse_and_validate(include_str!($file), $file).unwrap(), + ) + .unwrap(), + }).unwrap(); + )* + + subgraphs + }}; +} + +#[test] +fn test_steel_thread() { + let subgraphs = subgraphs! { + "connector_Query_users_0" => "../sources/connect/expand/merge/connector_Query_users_0.graphql", + "connector_Query_user_0" => "../sources/connect/expand/merge/connector_Query_user_0.graphql", + "connector_User_d_1" => "../sources/connect/expand/merge/connector_User_d_1.graphql", + "graphql" => "../sources/connect/expand/merge/graphql.graphql", + }; + + let result = merge_federation_subgraphs(subgraphs).unwrap(); + + let schema = result.schema.into_inner(); + let validation = schema.clone().validate(); + assert!(validation.is_ok(), "{:?}", validation); + + assert_snapshot!(schema.serialize()); +} + +#[test] +fn test_basic() { + let subgraphs = subgraphs! { + "basic_1" => "../sources/connect/expand/merge/basic_1.graphql", + "basic_2" => "../sources/connect/expand/merge/basic_2.graphql", + }; + + let result = merge_federation_subgraphs(subgraphs).unwrap(); + + let schema = result.schema.into_inner(); + let validation = schema.clone().validate(); + assert!(validation.is_ok(), "{:?}", validation); + + assert_snapshot!(schema.serialize()); +} + +#[test] +fn test_inaccessible() { + let subgraphs = subgraphs! { + "inaccessible" => "../sources/connect/expand/merge/inaccessible.graphql", + "inaccessible_2" => "../sources/connect/expand/merge/inaccessible_2.graphql", + }; + + let result = merge_federation_subgraphs(subgraphs).unwrap(); + + let schema = result.schema.into_inner(); + let validation = schema.clone().validate(); + assert!(validation.is_ok(), "{:?}", validation); + + assert_snapshot!(schema.serialize()); +} + +#[test] +fn test_interface_object() { + let subgraphs = subgraphs! { + "interface_object_1" => "./testdata/interface_object/one.graphql", + "interface_object_2" => "./testdata/interface_object/two.graphql", + "interface_object_3" => "./testdata/interface_object/three.graphql", + }; + + let result = merge_federation_subgraphs(subgraphs).unwrap(); + + let schema = result.schema.into_inner(); + let validation = schema.clone().validate(); + assert!(validation.is_ok(), "{:?}", validation); + + assert_snapshot!(schema.serialize()); +} + +#[test] +fn test_input_types() { + let subgraphs = subgraphs! { + "one" => "./testdata/input_types/one.graphql", + }; + + let result = merge_federation_subgraphs(subgraphs).unwrap(); + + let schema = result.schema.into_inner(); + let validation = schema.clone().validate(); + assert!(validation.is_ok(), "{:?}", validation); + + assert_snapshot!(schema.serialize()); +} + +#[test] +fn test_interface_implementing_interface() { + let subgraphs = subgraphs! { + "one" => "./testdata/interface_implementing_interface/one.graphql", + }; + + let result = merge_federation_subgraphs(subgraphs).unwrap(); + + let schema = result.schema.into_inner(); + let validation = schema.clone().validate(); + assert!(validation.is_ok(), "{:?}", validation); + + assert_snapshot!(schema.serialize()); +} diff --git a/apollo-federation/src/schema/position.rs b/apollo-federation/src/schema/position.rs index 916f10aa4f..478ac451e2 100644 --- a/apollo-federation/src/schema/position.rs +++ b/apollo-federation/src/schema/position.rs @@ -624,6 +624,13 @@ impl Debug for ObjectOrInterfaceFieldDefinitionPosition { impl ObjectOrInterfaceFieldDefinitionPosition { const EXPECTED: &'static str = "an object/interface field"; + pub(crate) fn type_name(&self) -> &Name { + match self { + ObjectOrInterfaceFieldDefinitionPosition::Object(field) => &field.type_name, + ObjectOrInterfaceFieldDefinitionPosition::Interface(field) => &field.type_name, + } + } + pub(crate) fn field_name(&self) -> &Name { match self { ObjectOrInterfaceFieldDefinitionPosition::Object(field) => &field.field_name, @@ -2157,6 +2164,30 @@ impl ObjectFieldArgumentDefinitionPosition { Ok(()) } + pub(crate) fn insert_directive( + &self, + schema: &mut FederationSchema, + directive: Node, + ) -> Result<(), FederationError> { + let argument = self.make_mut(&mut schema.schema)?; + if argument + .directives + .iter() + .any(|other_directive| other_directive.ptr_eq(&directive)) + { + return Err(SingleFederationError::Internal { + message: format!( + "Directive application \"@{}\" already exists on object field argument \"{}\"", + directive.name, self, + ), + } + .into()); + } + let name = directive.name.clone(); + argument.make_mut().directives.push(directive); + self.insert_directive_name_references(&mut schema.referencers, &name) + } + /// Remove a directive application from this position by name. pub(crate) fn remove_directive_name(&self, schema: &mut FederationSchema, name: &str) { let Some(argument) = self.try_make_mut(&mut schema.schema) else { @@ -2306,6 +2337,13 @@ impl Debug for ObjectFieldArgumentDefinitionPosition { } } +#[derive(Debug, Clone, Hash, PartialEq, Eq)] +pub(crate) struct ObjectOrInterfaceFieldDirectivePosition { + pub(crate) field: ObjectOrInterfaceFieldDefinitionPosition, + pub(crate) directive_name: Name, + pub(crate) directive_index: usize, +} + #[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize)] pub(crate) struct InterfaceTypeDefinitionPosition { pub(crate) type_name: Name, @@ -3123,6 +3161,32 @@ impl InterfaceFieldArgumentDefinitionPosition { Ok(()) } + pub(crate) fn insert_directive( + &self, + schema: &mut FederationSchema, + directive: Node, + ) -> Result<(), FederationError> { + let argument = self.make_mut(&mut schema.schema)?; + if argument + .directives + .iter() + .any(|other_directive| other_directive.ptr_eq(&directive)) + { + return Err( + SingleFederationError::Internal { + message: format!( + "Directive application \"@{}\" already exists on interface field argument \"{}\"", + directive.name, + self, + ) + }.into() + ); + } + let name = directive.name.clone(); + argument.make_mut().directives.push(directive); + self.insert_directive_name_references(&mut schema.referencers, &name) + } + /// Remove a directive application from this position by name. pub(crate) fn remove_directive_name(&self, schema: &mut FederationSchema, name: &str) { let Some(argument) = self.try_make_mut(&mut schema.schema) else { @@ -4022,6 +4086,30 @@ impl EnumValueDefinitionPosition { Ok(()) } + pub(crate) fn insert_directive( + &self, + schema: &mut FederationSchema, + directive: Node, + ) -> Result<(), FederationError> { + let value = self.make_mut(&mut schema.schema)?; + if value + .directives + .iter() + .any(|other_directive| other_directive.ptr_eq(&directive)) + { + return Err(SingleFederationError::Internal { + message: format!( + "Directive application \"@{}\" already exists on enum value \"{}\"", + directive.name, self, + ), + } + .into()); + } + let name = directive.name.clone(); + value.make_mut().directives.push(directive); + self.insert_directive_name_references(&mut schema.referencers, &name) + } + /// Remove a directive application from this position by name. pub(crate) fn remove_directive_name(&self, schema: &mut FederationSchema, name: &str) { let Some(value) = self.try_make_mut(&mut schema.schema) else { @@ -4316,7 +4404,6 @@ impl InputObjectTypeDefinitionPosition { .retain(|other_directive| other_directive.name != name); } - /// Remove a directive application. fn insert_references( &self, type_: &Node, @@ -4504,6 +4591,30 @@ impl InputObjectFieldDefinitionPosition { Ok(()) } + pub(crate) fn insert_directive( + &self, + schema: &mut FederationSchema, + directive: Node, + ) -> Result<(), FederationError> { + let field = self.make_mut(&mut schema.schema)?; + if field + .directives + .iter() + .any(|other_directive| other_directive.ptr_eq(&directive)) + { + return Err(SingleFederationError::Internal { + message: format!( + "Directive application \"@{}\" already exists on input object field \"{}\"", + directive.name, self, + ), + } + .into()); + } + let name = directive.name.clone(); + field.make_mut().directives.push(directive); + self.insert_directive_name_references(&mut schema.referencers, &name) + } + /// Remove a directive application from this position by name. pub(crate) fn remove_directive_name(&self, schema: &mut FederationSchema, name: &str) { let Some(field) = self.try_make_mut(&mut schema.schema) else { @@ -4903,6 +5014,30 @@ impl DirectiveArgumentDefinitionPosition { Ok(()) } + pub(crate) fn insert_directive( + &self, + schema: &mut FederationSchema, + directive: Node, + ) -> Result<(), FederationError> { + let argument = self.make_mut(&mut schema.schema)?; + if argument + .directives + .iter() + .any(|other_directive| other_directive.ptr_eq(&directive)) + { + return Err(SingleFederationError::Internal { + message: format!( + "Directive application \"@{}\" already exists on directive argument \"{}\"", + directive.name, self, + ), + } + .into()); + } + let name = directive.name.clone(); + argument.make_mut().directives.push(directive); + self.insert_directive_name_references(&mut schema.referencers, &name) + } + /// Remove a directive application from this position by name. pub(crate) fn remove_directive_name(&self, schema: &mut FederationSchema, name: &str) { let Some(argument) = self.try_make_mut(&mut schema.schema) else { diff --git a/apollo-federation/src/sources/connect/expand/carryover.rs b/apollo-federation/src/sources/connect/expand/carryover.rs new file mode 100644 index 0000000000..206ec465b1 --- /dev/null +++ b/apollo-federation/src/sources/connect/expand/carryover.rs @@ -0,0 +1,505 @@ +use apollo_compiler::ast::Argument; +use apollo_compiler::ast::Directive; +use apollo_compiler::ast::Value; +use apollo_compiler::name; +use apollo_compiler::Name; +use apollo_compiler::Node; + +use crate::error::FederationError; +use crate::link::inaccessible_spec_definition::INACCESSIBLE_DIRECTIVE_NAME_IN_SPEC; +use crate::link::spec::Identity; +use crate::link::spec::APOLLO_SPEC_DOMAIN; +use crate::link::Link; +use crate::link::DEFAULT_LINK_NAME; +use crate::schema::position::DirectiveArgumentDefinitionPosition; +use crate::schema::position::DirectiveDefinitionPosition; +use crate::schema::position::EnumTypeDefinitionPosition; +use crate::schema::position::EnumValueDefinitionPosition; +use crate::schema::position::InputObjectFieldDefinitionPosition; +use crate::schema::position::InputObjectTypeDefinitionPosition; +use crate::schema::position::InterfaceFieldArgumentDefinitionPosition; +use crate::schema::position::InterfaceFieldDefinitionPosition; +use crate::schema::position::InterfaceTypeDefinitionPosition; +use crate::schema::position::ObjectFieldArgumentDefinitionPosition; +use crate::schema::position::ObjectFieldDefinitionPosition; +use crate::schema::position::ObjectTypeDefinitionPosition; +use crate::schema::position::ScalarTypeDefinitionPosition; +use crate::schema::position::SchemaDefinitionPosition; +use crate::schema::position::UnionTypeDefinitionPosition; +use crate::schema::referencer::DirectiveReferencers; +use crate::schema::FederationSchema; +use crate::sources::connect::ConnectSpec; + +const TAG_DIRECTIVE_NAME_IN_SPEC: Name = name!("tag"); +const AUTHENTICATED_DIRECTIVE_NAME_IN_SPEC: Name = name!("authenticated"); +const REQUIRES_SCOPES_DIRECTIVE_NAME_IN_SPEC: Name = name!("requiresScopes"); +const POLICY_DIRECTIVE_NAME_IN_SPEC: Name = name!("policy"); + +pub(super) fn carryover_directives( + from: &FederationSchema, + to: &mut FederationSchema, + specs: impl Iterator, +) -> Result<(), FederationError> { + let Some(metadata) = from.metadata() else { + return Ok(()); + }; + + // @join__directive(graph: [], name: "link", args: { url: "https://specs.apollo.dev/connect/v0.1" }) + // this must exist for license key enforcement + for spec in specs { + SchemaDefinitionPosition.insert_directive(to, spec.join_directive_application().into())?; + } + + // @inaccessible + + if let Some(link) = metadata.for_identity(&Identity::inaccessible_identity()) { + let directive_name = link.directive_name_in_schema(&INACCESSIBLE_DIRECTIVE_NAME_IN_SPEC); + from.referencers() + .get_directive(&directive_name) + .and_then(|referencers| { + // because the merge code handles inaccessible, we have to check if the + // @link and directive definition are already present in the schema + if referencers.len() > 0 + && to + .metadata() + .and_then(|m| m.by_identity.get(&Identity::inaccessible_identity())) + .is_none() + { + SchemaDefinitionPosition + .insert_directive(to, link.to_directive_application().into())?; + copy_directive_definition(from, to, directive_name.clone())?; + } + referencers.copy_directives(from, to, &directive_name) + })?; + } + + // @tag + + if let Some(link) = metadata.for_identity(&Identity { + domain: APOLLO_SPEC_DOMAIN.to_string(), + name: TAG_DIRECTIVE_NAME_IN_SPEC, + }) { + let directive_name = link.directive_name_in_schema(&TAG_DIRECTIVE_NAME_IN_SPEC); + from.referencers() + .get_directive(&directive_name) + .and_then(|referencers| { + if referencers.len() > 0 { + SchemaDefinitionPosition + .insert_directive(to, link.to_directive_application().into())?; + copy_directive_definition(from, to, directive_name.clone())?; + } + referencers.copy_directives(from, to, &directive_name) + })?; + } + + // @authenticated + + if let Some(link) = metadata.for_identity(&Identity { + domain: APOLLO_SPEC_DOMAIN.to_string(), + name: AUTHENTICATED_DIRECTIVE_NAME_IN_SPEC, + }) { + let directive_name = link.directive_name_in_schema(&AUTHENTICATED_DIRECTIVE_NAME_IN_SPEC); + from.referencers() + .get_directive(&directive_name) + .and_then(|referencers| { + if referencers.len() > 0 { + SchemaDefinitionPosition + .insert_directive(to, link.to_directive_application().into())?; + copy_directive_definition(from, to, directive_name.clone())?; + } + referencers.copy_directives(from, to, &directive_name) + })?; + } + + // @requiresScopes + + if let Some(link) = metadata.for_identity(&Identity { + domain: APOLLO_SPEC_DOMAIN.to_string(), + name: REQUIRES_SCOPES_DIRECTIVE_NAME_IN_SPEC, + }) { + let directive_name = link.directive_name_in_schema(&REQUIRES_SCOPES_DIRECTIVE_NAME_IN_SPEC); + from.referencers() + .get_directive(&directive_name) + .and_then(|referencers| { + if referencers.len() > 0 { + SchemaDefinitionPosition + .insert_directive(to, link.to_directive_application().into())?; + + let scalar_type_pos = ScalarTypeDefinitionPosition { + type_name: link.type_name_in_schema(&name!(Scope)), + }; + + // The scalar might already exist if a subgraph defined it + if scalar_type_pos.get(to.schema()).is_err() { + scalar_type_pos + .get(from.schema()) + .map_err(From::from) + .and_then(|def| { + scalar_type_pos.pre_insert(to)?; + scalar_type_pos.insert(to, def.clone()) + })?; + } + + copy_directive_definition(from, to, directive_name.clone())?; + } + referencers.copy_directives(from, to, &directive_name) + })?; + } + + // @policy + + if let Some(link) = metadata.for_identity(&Identity { + domain: APOLLO_SPEC_DOMAIN.to_string(), + name: POLICY_DIRECTIVE_NAME_IN_SPEC, + }) { + let directive_name = link.directive_name_in_schema(&POLICY_DIRECTIVE_NAME_IN_SPEC); + from.referencers() + .get_directive(&directive_name) + .and_then(|referencers| { + if referencers.len() > 0 { + SchemaDefinitionPosition + .insert_directive(to, link.to_directive_application().into())?; + + let scalar_type_pos = ScalarTypeDefinitionPosition { + type_name: link.type_name_in_schema(&name!(Policy)), + }; + + // The scalar might already exist if a subgraph defined it + if scalar_type_pos.get(to.schema()).is_err() { + scalar_type_pos + .get(from.schema()) + .map_err(From::from) + .and_then(|def| { + scalar_type_pos.pre_insert(to)?; + scalar_type_pos.insert(to, def.clone()) + })?; + } + + copy_directive_definition(from, to, directive_name.clone())?; + } + referencers.copy_directives(from, to, &directive_name) + })?; + } + + // compose directive + + metadata + .directives_by_imported_name + .iter() + .filter(|(_name, (link, _import))| !is_known_link(link)) + .try_for_each(|(name, (link, import))| { + // This is a strange thing — someone is importing @defer, but it's not a type system directive so we don't need to carry it over + if name == "defer" { + return Ok(()); + } + let directive_name = link.directive_name_in_schema(&import.element); + from.referencers() + .get_directive(&directive_name) + .and_then(|referencers| { + if referencers.len() > 0 { + if !SchemaDefinitionPosition + .get(to.schema()) + .directives + .iter() + .any(|d| { + d.name == DEFAULT_LINK_NAME + && d.specified_argument_by_name("url") + .and_then(|url| url.as_str()) + .map(|url| link.url.to_string() == *url) + .unwrap_or_default() + }) + { + SchemaDefinitionPosition + .insert_directive(to, link.to_directive_application().into())?; + } + + copy_directive_definition(from, to, directive_name.clone())?; + } + referencers.copy_directives(from, to, &directive_name) + })?; + Ok::<_, FederationError>(()) + })?; + + Ok(()) +} + +fn is_known_link(link: &Link) -> bool { + link.url.identity.domain == APOLLO_SPEC_DOMAIN + && [ + name!(link), + name!(join), + name!(tag), + name!(inaccessible), + name!(authenticated), + name!(requiresScopes), + name!(policy), + ] + .contains(&link.url.identity.name) +} + +fn copy_directive_definition( + from: &FederationSchema, + to: &mut FederationSchema, + directive_name: Name, +) -> Result<(), FederationError> { + let def_pos = DirectiveDefinitionPosition { directive_name }; + + // If it exists, remove it so we can add the directive as defined in the + // supergraph. In rare cases where a directive can be applied to both + // executable and type system locations, extract_subgraphs_from_supergraph + // will include the definition with only the executable locations, making + // other applications invalid. + if def_pos.get(to.schema()).is_ok() { + def_pos.remove(to)?; + } + + def_pos + .get(from.schema()) + .map_err(From::from) + .and_then(|def| { + def_pos.pre_insert(to)?; + def_pos.insert(to, def.clone()) + }) +} + +impl Link { + fn to_directive_application(&self) -> Directive { + let mut arguments: Vec> = vec![Argument { + name: name!(url), + value: self.url.to_string().into(), + } + .into()]; + + // purpose: link__Purpose + if let Some(purpose) = &self.purpose { + arguments.push( + Argument { + name: name!(for), + value: Value::Enum(purpose.into()).into(), + } + .into(), + ); + } + + // as: String + if let Some(alias) = &self.spec_alias { + arguments.push( + Argument { + name: name!(as), + value: Value::String(alias.to_string()).into(), + } + .into(), + ); + } + + // import: [link__Import!] + if !self.imports.is_empty() { + arguments.push( + Argument { + name: name!(import), + value: Value::List( + self.imports + .iter() + .map(|i| { + let name = if i.is_directive { + format!("@{}", i.element) + } else { + i.element.to_string() + }; + + if let Some(alias) = &i.alias { + let alias = if i.is_directive { + format!("@{}", alias) + } else { + alias.to_string() + }; + + Value::Object(vec![ + (name!(name), Value::String(name).into()), + (name!(as), Value::String(alias).into()), + ]) + } else { + Value::String(name) + } + .into() + }) + .collect::>(), + ) + .into(), + } + .into(), + ); + } + + Directive { + name: name!(link), + arguments, + } + } +} + +trait CopyDirective { + fn copy_directive( + &self, + from: &FederationSchema, + to: &mut FederationSchema, + directive_name: &Name, + ) -> Result<(), FederationError>; +} + +impl CopyDirective for SchemaDefinitionPosition { + fn copy_directive( + &self, + from: &FederationSchema, + to: &mut FederationSchema, + directive_name: &Name, + ) -> Result<(), FederationError> { + self.get(from.schema()) + .directives + .iter() + .filter(|d| &d.name == directive_name) + .try_for_each(|directive| self.insert_directive(to, directive.clone())) + } +} + +macro_rules! impl_copy_directive { + ($( $Ty: ty )+) => { + $( + impl CopyDirective for $Ty { + fn copy_directive( + &self, + from: &FederationSchema, + to: &mut FederationSchema, + directive_name: &Name, + ) -> Result<(), FederationError> { + self.get(from.schema()) + .map(|def| { + def.directives + .iter() + .filter(|d| &d.name == directive_name) + .try_for_each(|directive| self.insert_directive(to, directive.clone())) + }) + .unwrap_or(Ok(())) + } + } + )+ + }; +} + +impl_copy_directive! { + ScalarTypeDefinitionPosition + ObjectTypeDefinitionPosition + ObjectFieldDefinitionPosition + ObjectFieldArgumentDefinitionPosition + InterfaceTypeDefinitionPosition + InterfaceFieldDefinitionPosition + InterfaceFieldArgumentDefinitionPosition + UnionTypeDefinitionPosition + EnumTypeDefinitionPosition + EnumValueDefinitionPosition + InputObjectTypeDefinitionPosition + InputObjectFieldDefinitionPosition + DirectiveArgumentDefinitionPosition +} + +impl DirectiveReferencers { + fn len(&self) -> usize { + self.schema.as_ref().map(|_| 1).unwrap_or_default() + + self.scalar_types.len() + + self.object_types.len() + + self.object_fields.len() + + self.object_field_arguments.len() + + self.interface_types.len() + + self.interface_fields.len() + + self.interface_field_arguments.len() + + self.union_types.len() + + self.enum_types.len() + + self.enum_values.len() + + self.input_object_types.len() + + self.input_object_fields.len() + + self.directive_arguments.len() + } + + fn copy_directives( + &self, + from: &FederationSchema, + to: &mut FederationSchema, + directive_name: &Name, + ) -> Result<(), FederationError> { + if let Some(position) = &self.schema { + position.copy_directive(from, to, directive_name)? + } + self.scalar_types + .iter() + .try_for_each(|position| position.copy_directive(from, to, directive_name))?; + self.object_types + .iter() + .try_for_each(|position| position.copy_directive(from, to, directive_name))?; + self.object_fields + .iter() + .try_for_each(|position| position.copy_directive(from, to, directive_name))?; + self.object_field_arguments + .iter() + .try_for_each(|position| position.copy_directive(from, to, directive_name))?; + self.interface_types + .iter() + .try_for_each(|position| position.copy_directive(from, to, directive_name))?; + self.interface_fields + .iter() + .try_for_each(|position| position.copy_directive(from, to, directive_name))?; + self.interface_field_arguments + .iter() + .try_for_each(|position| position.copy_directive(from, to, directive_name))?; + self.union_types + .iter() + .try_for_each(|position| position.copy_directive(from, to, directive_name))?; + self.enum_types + .iter() + .try_for_each(|position| position.copy_directive(from, to, directive_name))?; + self.enum_values + .iter() + .try_for_each(|position| position.copy_directive(from, to, directive_name))?; + self.input_object_types + .iter() + .try_for_each(|position| position.copy_directive(from, to, directive_name))?; + self.input_object_fields + .iter() + .try_for_each(|position| position.copy_directive(from, to, directive_name))?; + self.directive_arguments + .iter() + .try_for_each(|position| position.copy_directive(from, to, directive_name))?; + Ok(()) + } +} + +#[cfg(test)] +mod tests { + use apollo_compiler::Schema; + use insta::assert_snapshot; + + use super::carryover_directives; + use crate::merge::merge_federation_subgraphs; + use crate::schema::FederationSchema; + use crate::sources::connect::ConnectSpec; + use crate::supergraph::extract_subgraphs_from_supergraph; + + #[test] + fn test_carryover() { + let sdl = include_str!("./tests/schemas/ignore/directives.graphql"); + let schema = Schema::parse(sdl, "directives.graphql").expect("parse failed"); + let supergraph_schema = FederationSchema::new(schema).expect("federation schema failed"); + let subgraphs = extract_subgraphs_from_supergraph(&supergraph_schema, None) + .expect("extract subgraphs failed"); + let merged = merge_federation_subgraphs(subgraphs).expect("merge failed"); + let schema = merged.schema.into_inner(); + let mut schema = FederationSchema::new(schema).expect("federation schema failed"); + + carryover_directives( + &supergraph_schema, + &mut schema, + [ConnectSpec::V0_1].into_iter(), + ) + .expect("carryover failed"); + assert_snapshot!(schema.schema().serialize().to_string()); + } +} diff --git a/apollo-federation/src/sources/connect/expand/merge/basic_1.graphql b/apollo-federation/src/sources/connect/expand/merge/basic_1.graphql index e9a3099331..c3357ffb75 100644 --- a/apollo-federation/src/sources/connect/expand/merge/basic_1.graphql +++ b/apollo-federation/src/sources/connect/expand/merge/basic_1.graphql @@ -39,3 +39,11 @@ type Y { input YInput { z: ID } + +type Mutation { + m: M +} + +type M { + n: String +} diff --git a/apollo-federation/src/sources/connect/expand/merge/basic_2.graphql b/apollo-federation/src/sources/connect/expand/merge/basic_2.graphql index e9a3099331..4dc7cdf929 100644 --- a/apollo-federation/src/sources/connect/expand/merge/basic_2.graphql +++ b/apollo-federation/src/sources/connect/expand/merge/basic_2.graphql @@ -6,15 +6,18 @@ type Query { interface I { id: ID! + f(x: ID, y: YInput): T } type A implements I { id: ID! + f(x: ID, y: YInput): T a: S } type B implements I { id: ID! + f(x: ID, y: YInput): T b: E } @@ -39,3 +42,11 @@ type Y { input YInput { z: ID } + +type Mutation { + m2(x: ID, y: YInput): M +} + +type M { + n: String +} diff --git a/apollo-federation/src/sources/connect/expand/mod.rs b/apollo-federation/src/sources/connect/expand/mod.rs new file mode 100644 index 0000000000..f9a2834817 --- /dev/null +++ b/apollo-federation/src/sources/connect/expand/mod.rs @@ -0,0 +1,785 @@ +use std::collections::HashSet; +use std::sync::Arc; + +use apollo_compiler::validation::Valid; +use apollo_compiler::Schema; +use carryover::carryover_directives; +use indexmap::IndexMap; +use itertools::Itertools; + +use crate::error::FederationError; +use crate::link::Link; +use crate::merge::merge_subgraphs; +use crate::schema::FederationSchema; +use crate::sources::connect::ConnectSpec; +use crate::sources::connect::Connector; +use crate::subgraph::Subgraph; +use crate::subgraph::ValidSubgraph; +use crate::ApiSchemaOptions; +use crate::Supergraph; +use crate::ValidFederationSubgraph; + +mod carryover; +pub(crate) mod visitors; +use visitors::filter_directives; + +pub struct Connectors { + pub by_service_name: Arc, Connector>>, + pub labels_by_service_name: Arc, String>>, +} + +/// The result of a supergraph expansion of connect-aware subgraphs +pub enum ExpansionResult { + /// The supergraph had some subgraphs that were expanded + Expanded { + raw_sdl: String, + api_schema: Box>, + connectors: Connectors, + }, + + /// The supergraph contained no connect directives and was unchanged. + Unchanged, +} + +/// Expand a schema with connector directives into unique subgraphs per directive +/// +/// Until we have a source-aware query planner, work with connectors will need to interface +/// with standard query planning concepts while still enforcing connector-specific rules. To do so, +/// each connector is separated into its own unique subgraph with relevant GraphQL directives to enforce +/// field dependencies and response structures. This allows for satisfiability and validation to piggy-back +/// off of existing functionality in a reproducable way. +pub fn expand_connectors( + supergraph_str: &str, + api_schema_options: &ApiSchemaOptions, +) -> Result { + // TODO: Don't rely on finding the URL manually to short out + let connect_url = ConnectSpec::identity(); + let connect_url = format!("{}/{}/v", connect_url.domain, connect_url.name); + if !supergraph_str.contains(&connect_url) { + return Ok(ExpansionResult::Unchanged); + } + + let supergraph = Supergraph::new(supergraph_str)?; + let api_schema = supergraph.to_api_schema(api_schema_options.clone())?; + + let (connect_subgraphs, graphql_subgraphs): (Vec<_>, Vec<_>) = supergraph + .extract_subgraphs()? + .into_iter() + .partition_map( + |(_, sub)| match ConnectSpec::get_from_schema(sub.schema.schema()) { + Some((spec, link)) if contains_connectors(&link, &sub) => { + either::Either::Left((spec, link, sub)) + } + _ => either::Either::Right(ValidSubgraph::from(sub)), + }, + ); + + // Expand just the connector subgraphs + let mut expanded_subgraphs = Vec::new(); + let mut spec_versions = HashSet::new(); + + for (spec, link, sub) in connect_subgraphs { + expanded_subgraphs.extend(split_subgraph(&link, sub, spec)?); + spec_versions.insert(spec); + } + + // Merge the subgraphs into one supergraph + let all_subgraphs = graphql_subgraphs + .iter() + .chain(expanded_subgraphs.iter().map(|(_, sub)| sub)) + .collect(); + let new_supergraph = merge_subgraphs(all_subgraphs).map_err(|e| { + FederationError::internal(format!("could not merge expanded subgraphs: {e:?}")) + })?; + + let mut new_supergraph = FederationSchema::new(new_supergraph.schema.into_inner())?; + carryover_directives( + &supergraph.schema, + &mut new_supergraph, + spec_versions.into_iter(), + ) + .map_err(|e| FederationError::internal(format!("could not carry over directives: {e:?}")))?; + + let connectors_by_service_name: IndexMap, Connector> = expanded_subgraphs + .into_iter() + .map(|(connector, sub)| (sub.name.into(), connector)) + .collect(); + + let labels_by_service_name = connectors_by_service_name + .iter() + .map(|(service_name, connector)| (service_name.clone(), connector.id.label.clone())) + .collect(); + + Ok(ExpansionResult::Expanded { + raw_sdl: new_supergraph.schema().serialize().to_string(), + api_schema: Box::new(api_schema.schema().clone()), + connectors: Connectors { + by_service_name: Arc::new(connectors_by_service_name), + labels_by_service_name: Arc::new(labels_by_service_name), + }, + }) +} + +fn contains_connectors(link: &Link, subgraph: &ValidFederationSubgraph) -> bool { + let connect_name = ConnectSpec::connect_directive_name(link); + let source_name = ConnectSpec::source_directive_name(link); + + subgraph + .schema + .get_directive_definitions() + .any(|directive| { + directive.directive_name == connect_name || directive.directive_name == source_name + }) +} + +/// Split up a subgraph so that each connector directive becomes its own subgraph. +/// +/// Subgraphs passed to this function should contain connector directives. +fn split_subgraph( + link: &Link, + subgraph: ValidFederationSubgraph, + spec: ConnectSpec, +) -> Result, FederationError> { + let connector_map = Connector::from_schema(subgraph.schema.schema(), &subgraph.name, spec)?; + + let expander = helpers::Expander::new(link, &subgraph); + connector_map + .into_iter() + .map(|(id, connector)| { + // Build a subgraph using only the necessary fields from the directive + let schema = expander.expand(&connector)?; + let subgraph = Subgraph::new( + id.synthetic_name().as_str(), + &subgraph.url, + &schema.schema().serialize().to_string(), + )?; + + // We only validate during debug builds since we should realistically only generate valid schemas + // for these subgraphs. + #[cfg(debug_assertions)] + let schema = subgraph.schema.validate()?; + #[cfg(not(debug_assertions))] + let schema = Valid::assume_valid(subgraph.schema); + + Ok(( + connector, + ValidSubgraph { + name: subgraph.name, + url: subgraph.url, + schema, + }, + )) + }) + .try_collect() +} + +mod helpers { + use apollo_compiler::ast; + use apollo_compiler::ast::Argument; + use apollo_compiler::ast::Directive; + use apollo_compiler::ast::FieldDefinition; + use apollo_compiler::ast::InputValueDefinition; + use apollo_compiler::ast::Value; + use apollo_compiler::name; + use apollo_compiler::schema::Component; + use apollo_compiler::schema::ComponentName; + use apollo_compiler::schema::ComponentOrigin; + use apollo_compiler::schema::DirectiveList; + use apollo_compiler::schema::EnumType; + use apollo_compiler::schema::ObjectType; + use apollo_compiler::schema::ScalarType; + use apollo_compiler::Name; + use apollo_compiler::Node; + use indexmap::IndexMap; + use indexmap::IndexSet; + + use super::filter_directives; + use super::visitors::try_insert; + use super::visitors::try_pre_insert; + use super::visitors::GroupVisitor; + use super::visitors::SchemaVisitor; + use crate::error::FederationError; + use crate::internal_error; + use crate::link::spec::Identity; + use crate::link::Link; + use crate::schema::position::ObjectFieldDefinitionPosition; + use crate::schema::position::ObjectOrInterfaceTypeDefinitionPosition; + use crate::schema::position::ObjectTypeDefinitionPosition; + use crate::schema::position::SchemaRootDefinitionKind; + use crate::schema::position::SchemaRootDefinitionPosition; + use crate::schema::position::TypeDefinitionPosition; + use crate::schema::FederationSchema; + use crate::schema::ValidFederationSchema; + use crate::sources::connect::ConnectSpec; + use crate::sources::connect::Connector; + use crate::sources::connect::EntityResolver; + use crate::sources::connect::JSONSelection; + use crate::subgraph::spec::EXTERNAL_DIRECTIVE_NAME; + use crate::subgraph::spec::INTF_OBJECT_DIRECTIVE_NAME; + use crate::subgraph::spec::KEY_DIRECTIVE_NAME; + use crate::subgraph::spec::REQUIRES_DIRECTIVE_NAME; + use crate::supergraph::new_empty_fed_2_subgraph_schema; + use crate::ValidFederationSubgraph; + + /// A helper struct for expanding a subgraph into one per connect directive. + pub(super) struct Expander<'a> { + /// The name of the connect directive, possibly aliased. + #[allow(unused)] + connect_name: Name, + + /// The name of the connect directive, possibly aliased. + #[allow(unused)] + source_name: Name, + + /// The name of the @key directive, as known in the subgraph + key_name: Name, + + /// The name of the @interfaceObject directive, as known in the subgraph + interface_object_name: Name, + + /// The original schema that contains connect directives + original_schema: &'a ValidFederationSchema, + + /// A list of directives to exclude when copying over types from the + /// original schema. + directive_deny_list: IndexSet, + } + + impl<'a> Expander<'a> { + pub(super) fn new(link: &Link, subgraph: &'a ValidFederationSubgraph) -> Expander<'a> { + let connect_name = ConnectSpec::connect_directive_name(link); + let source_name = ConnectSpec::source_directive_name(link); + + // When we go to expand all output types, we'll need to make sure that we don't carry over + // any connect-related directives. The following directives are also special because they + // influence planning and satisfiability: + // + // - @key: derived based on the fields selected + // - @external: the current approach will only add external fields to the list of keys + // if used in the transport. If not used at all, the field marked with this directive + // won't even be included in the expanded subgraph, but if it _is_ used then leaving + // this directive will result in planning failures. + // - @requires: the current approach will add required fields to the list of keys for + // implicit entities, so it can't stay. + let key_name = subgraph + .schema + .metadata() + .and_then(|m| m.for_identity(&Identity::federation_identity())) + .map(|f| f.directive_name_in_schema(&KEY_DIRECTIVE_NAME)) + .unwrap_or(KEY_DIRECTIVE_NAME); + let interface_object_name = subgraph + .schema + .metadata() + .and_then(|m| m.for_identity(&Identity::federation_identity())) + .map(|f| f.directive_name_in_schema(&INTF_OBJECT_DIRECTIVE_NAME)) + .unwrap_or(INTF_OBJECT_DIRECTIVE_NAME); + let extra_excluded = [EXTERNAL_DIRECTIVE_NAME, REQUIRES_DIRECTIVE_NAME] + .into_iter() + .map(|d| { + subgraph + .schema + .metadata() + .and_then(|m| m.for_identity(&Identity::federation_identity())) + .map(|f| f.directive_name_in_schema(&d)) + .unwrap_or(d) + }); + let directive_deny_list = IndexSet::from_iter(extra_excluded.chain([ + key_name.clone(), + connect_name.clone(), + source_name.clone(), + ])); + + Self { + connect_name, + source_name, + key_name, + interface_object_name, + original_schema: &subgraph.schema, + directive_deny_list, + } + } + + /// Build an expanded subgraph for the supplied connector + pub(super) fn expand( + &self, + connector: &Connector, + ) -> Result { + let mut schema = new_empty_fed_2_subgraph_schema()?; + let query_alias = self + .original_schema + .schema() + .schema_definition + .query + .as_ref() + .map(|m| m.name.clone()) + .unwrap_or(name!("Query")); + let mutation_alias = self + .original_schema + .schema() + .schema_definition + .mutation + .as_ref() + .map(|m| m.name.clone()); + + let field = &connector.id.directive.field; + let field_def = field.get(self.original_schema.schema())?; + let field_type = self + .original_schema + .get_type(field_def.ty.inner_named_type().clone())?; + + // We'll need to make sure that we always process the inputs first, since they need to be present + // before any dependent types + self.process_inputs(&mut schema, &field_def.arguments)?; + + // Actually process the type annotated with the connector, making sure to walk nested types + match field_type { + TypeDefinitionPosition::Object(object) => { + SchemaVisitor::new( + self.original_schema, + &mut schema, + &self.directive_deny_list, + ) + .walk(( + object, + connector.selection.next_subselection().cloned().ok_or( + FederationError::internal("empty selections are not allowed"), + )?, + ))?; + } + + TypeDefinitionPosition::Scalar(_) | TypeDefinitionPosition::Enum(_) => { + self.insert_custom_leaf(&mut schema, &field_type)?; + } + + TypeDefinitionPosition::Interface(interface) => { + return Err(FederationError::internal(format!( + "connect directives not yet supported on interfaces: found on {}", + interface.type_name + ))) + } + TypeDefinitionPosition::Union(union) => { + return Err(FederationError::internal(format!( + "connect directives not yet supported on union: found on {}", + union.type_name + ))) + } + TypeDefinitionPosition::InputObject(input) => { + return Err(FederationError::internal(format!( + "connect directives not yet supported on inputs: found on {}", + input.type_name + ))) + } + }; + + // Add the root type for this connector, optionally inserting a dummy query root + // if the connector is not defined within a field on a Query (since a subgraph is invalid + // without at least a root-level Query) + let ObjectOrInterfaceTypeDefinitionPosition::Object(parent_object) = field.parent() + else { + return Err(FederationError::internal( + "connect directives on interfaces is not yet supported", + )); + }; + + self.insert_query_for_field(&mut schema, &query_alias, &parent_object, field_def)?; + + let query_root = SchemaRootDefinitionPosition { + root_kind: SchemaRootDefinitionKind::Query, + }; + query_root.insert( + &mut schema, + ComponentName { + origin: ComponentOrigin::Definition, + name: query_alias, + }, + )?; + + if let Some(mutation_alias) = mutation_alias { + // only add the mutation root definition if we've added the + // type to this schema + if schema.get_type(mutation_alias.clone()).is_ok() { + let mutation_root = SchemaRootDefinitionPosition { + root_kind: SchemaRootDefinitionKind::Mutation, + }; + mutation_root.insert( + &mut schema, + ComponentName { + origin: ComponentOrigin::Definition, + name: mutation_alias, + }, + )?; + } + } + + // Process any outputs needed by the connector + self.process_outputs( + &mut schema, + connector, + parent_object.type_name.clone(), + field_def.ty.inner_named_type().clone(), + )?; + + Ok(schema) + } + + /// Process all input types + /// + /// Inputs can include leaf types as well as custom inputs. + fn process_inputs( + &self, + to_schema: &mut FederationSchema, + arguments: &[Node], + ) -> Result<(), FederationError> { + // All inputs to a connector's field need to be carried over in order to always generate + // valid subgraphs + for arg in arguments { + let arg_type_name = arg.ty.inner_named_type(); + let arg_type = self.original_schema.get_type(arg_type_name.clone())?; + let arg_extended_type = arg_type.get(self.original_schema.schema())?; + + // If the input type isn't built in, then we need to carry it over, making sure to only walk + // if we have a complex input since leaf types can just be copied over. + if !arg_extended_type.is_built_in() { + match arg_type { + TypeDefinitionPosition::InputObject(input) => SchemaVisitor::new( + self.original_schema, + to_schema, + &self.directive_deny_list, + ) + .walk(input)?, + + other => self.insert_custom_leaf(to_schema, &other)?, + }; + } + } + + Ok(()) + } + + // Process outputs needed by a connector + // + // By the time this method is called, all dependent types should exist for a connector, + // including its direct inputs. Since each connector could select only a subset of its output + // type, this method carries over each output type as seen by the selection defined on the connector. + fn process_outputs( + &self, + to_schema: &mut FederationSchema, + connector: &Connector, + parent_type_name: Name, + output_type_name: Name, + ) -> Result<(), FederationError> { + let resolvable_key = connector + .resolvable_key(self.original_schema.schema()) + .map_err(|_| FederationError::internal("error creating resolvable key"))?; + + let Some(resolvable_key) = resolvable_key else { + return self.copy_interface_object_keys(output_type_name, to_schema); + }; + + let parent_type = self.original_schema.get_type(parent_type_name)?; + let output_type = to_schema.get_type(output_type_name.clone())?; + let key_for_type = match &connector.entity_resolver { + Some(EntityResolver::Explicit) => output_type, + _ => parent_type, + }; + + let parsed = JSONSelection::parse(&resolvable_key.serialize().no_indent().to_string()) + .map_err(|e| FederationError::internal(format!("error parsing key: {e}")))?; + + let visitor = + SchemaVisitor::new(self.original_schema, to_schema, &self.directive_deny_list); + + let output_type = match &key_for_type { + TypeDefinitionPosition::Object(object) => object, + + other => { + return Err(FederationError::internal(format!( + "connector output types currently only support object types: found {}", + other.type_name() + ))) + } + }; + + // This adds child types for all key fields + visitor.walk(( + output_type.clone(), + parsed + .next_subselection() + .cloned() + .ok_or(FederationError::internal( + "empty selections are not allowed", + ))?, + ))?; + + // This actually adds the key fields if necessary, which is only + // when depending on sibling fields. + if let Some(sub) = parsed.next_subselection() { + for named in sub.selections_iter() { + for field_name in named.names() { + let field_def = self + .original_schema + .schema() + .type_field(key_for_type.type_name(), field_name) + .map_err(|_| { + FederationError::internal(format!( + "field {} not found on type {}", + field_name, + key_for_type.type_name() + )) + })?; + + // TODO: future support for interfaces + let pos = ObjectFieldDefinitionPosition { + type_name: key_for_type.type_name().clone(), + field_name: Name::new(field_name)?, + }; + + if pos.get(to_schema.schema()).is_err() { + pos.insert( + to_schema, + Component::new(FieldDefinition { + description: field_def.description.clone(), + name: field_def.name.clone(), + arguments: field_def.arguments.clone(), + ty: field_def.ty.clone(), + directives: filter_directives( + &self.directive_deny_list, + &field_def.directives, + ), + }), + )?; + } + } + } + }; + + // If we have marked keys as being necessary for this output type, add them as an `@key` + // directive now. + let key_directive = Directive { + name: self.key_name.clone(), + arguments: vec![Node::new(Argument { + name: name!("fields"), + value: Node::new(Value::String( + resolvable_key.serialize().no_indent().to_string(), + )), + })], + }; + + match &key_for_type { + TypeDefinitionPosition::Object(o) => { + o.insert_directive(to_schema, Component::new(key_directive)) + } + TypeDefinitionPosition::Interface(i) => { + i.insert_directive(to_schema, Component::new(key_directive)) + } + _ => { + return Err(FederationError::internal( + "keys cannot be added to scalars, unions, enums, or input objects", + )) + } + }?; + + Ok(()) + } + + /// If the type has @interfaceObject and it doesn't have a key at this point + /// we'll need to add a key — this is a requirement for using @interfaceObject. + /// For now we'll just copy over keys from the original supergraph as resolvable: false + /// but we need to think through the implications of that. + fn copy_interface_object_keys( + &self, + type_name: Name, + to_schema: &mut FederationSchema, + ) -> Result<(), FederationError> { + let Some(original_output_type) = self.original_schema.schema().get_object(&type_name) + else { + return Ok(()); + }; + + let is_interface_object = original_output_type + .directives + .iter() + .any(|d| d.name == self.interface_object_name); + + if is_interface_object { + let pos = ObjectTypeDefinitionPosition { + type_name: original_output_type.name.clone(), + }; + + for key in original_output_type + .directives + .iter() + .filter(|d| d.name == self.key_name) + { + let key_fields = key + .argument_by_name("fields", self.original_schema.schema()) + .map_err(|_| internal_error!("@key(fields:) argument missing"))?; + let key = Directive { + name: key.name.clone(), + arguments: vec![ + Node::new(Argument { + name: name!("fields"), + value: key_fields.clone(), + }), + Node::new(Argument { + name: name!("resolvable"), + value: Node::new(Value::Boolean(false)), + }), + ], + }; + pos.insert_directive(to_schema, Component::new(key))?; + } + } + + Ok(()) + } + + /// Inserts a custom leaf type into the schema + /// + /// This errors if called with a non-leaf type. + fn insert_custom_leaf( + &self, + to_schema: &mut FederationSchema, + r#type: &TypeDefinitionPosition, + ) -> Result<(), FederationError> { + match r#type { + TypeDefinitionPosition::Scalar(scalar) => { + let def = scalar.get(self.original_schema.schema())?; + let def = ScalarType { + description: def.description.clone(), + name: def.name.clone(), + directives: filter_directives(&self.directive_deny_list, &def.directives), + }; + + try_pre_insert!(to_schema, scalar)?; + try_insert!(to_schema, scalar, Node::new(def)) + } + TypeDefinitionPosition::Enum(r#enum) => { + let def = r#enum.get(self.original_schema.schema())?; + let def = EnumType { + description: def.description.clone(), + name: def.name.clone(), + directives: filter_directives(&self.directive_deny_list, &def.directives), + values: def.values.clone(), + }; + + try_pre_insert!(to_schema, r#enum)?; + try_insert!(to_schema, r#enum, Node::new(def)) + } + + other => Err(FederationError::internal(format!( + "expected a leaf, found: {}", + other.type_name(), + ))), + } + } + + /// Insert a query root for a connect field + /// + /// This method will handle creating a dummy query root as shown below when the + /// parent type is _not_ a root-level Query to pass schema validation. + /// + /// ```graphql + /// type Query { + /// _: ID @shareable @inaccessible + /// } + /// ``` + /// + /// Note: This would probably be better off expanding the query to have + /// an __entities vs. adding an inaccessible field. + fn insert_query_for_field( + &self, + to_schema: &mut FederationSchema, + query_alias: &Name, + field_parent: &ObjectTypeDefinitionPosition, + field: impl AsRef, + ) -> Result<(), FederationError> { + // Prime the query type + let query = ObjectTypeDefinitionPosition { + type_name: query_alias.clone(), + }; + + // Now we'll need to know what field to add to the query root. In the case + // where the parent of the field on the original schema was not the root + // Query object, the field added is a dummy inaccessible field and the actual + // parent root is created and upserted. Otherwise, the query will contain the field specified. + let original = field.as_ref(); + let field = if field_parent.type_name != *query_alias { + // We'll need to upsert the actual type for the field's parent + let parent_type = field_parent.get(self.original_schema.schema())?; + + try_pre_insert!(to_schema, field_parent)?; + let field_def = FieldDefinition { + description: original.description.clone(), + name: original.name.clone(), + arguments: original.arguments.clone(), + ty: original.ty.clone(), + directives: filter_directives(&self.directive_deny_list, &original.directives), + }; + try_insert!( + to_schema, + field_parent, + Node::new(ObjectType { + description: parent_type.description.clone(), + name: parent_type.name.clone(), + implements_interfaces: parent_type.implements_interfaces.clone(), + directives: filter_directives( + &self.directive_deny_list, + &parent_type.directives, + ), + // don't insert field def here. if the type already existed + // which happens with circular references, then this defintion + // won't be used. + fields: Default::default() + }) + )?; + + let pos = ObjectFieldDefinitionPosition { + type_name: parent_type.name.clone(), + field_name: field_def.name.clone(), + }; + + pos.insert(to_schema, field_def.into())?; + + // Return the dummy field to add to the root Query + FieldDefinition { + description: None, + name: name!("_"), + arguments: Vec::new(), + ty: ast::Type::Named(ast::NamedType::new("ID")?), + directives: ast::DirectiveList(vec![Node::new(Directive { + name: name!("federation__inaccessible"), + arguments: Vec::new(), + })]), + } + } else { + FieldDefinition { + description: original.description.clone(), + name: original.name.clone(), + arguments: original.arguments.clone(), + ty: original.ty.clone(), + directives: filter_directives(&self.directive_deny_list, &original.directives), + } + }; + + // Insert the root Query + // Note: This should error if Query is already defined, as it shouldn't be + query.pre_insert(to_schema)?; + query.insert( + to_schema, + Node::new(ObjectType { + description: None, + name: query_alias.clone(), + implements_interfaces: IndexSet::with_hasher(Default::default()), + directives: DirectiveList::new(), + fields: IndexMap::from_iter([(field.name.clone(), Component::new(field))]), + }), + )?; + + Ok(()) + } + } +} + +#[cfg(test)] +mod tests; diff --git a/apollo-federation/src/sources/connect/expand/snapshots/apollo_federation__sources__connect__expand__carryover__tests__carryover.snap b/apollo-federation/src/sources/connect/expand/snapshots/apollo_federation__sources__connect__expand__carryover__tests__carryover.snap new file mode 100644 index 0000000000..d8ede375a8 --- /dev/null +++ b/apollo-federation/src/sources/connect/expand/snapshots/apollo_federation__sources__connect__expand__carryover__tests__carryover.snap @@ -0,0 +1,81 @@ +--- +source: apollo-federation/src/sources/connect/expand/carryover.rs +expression: schema.schema().serialize().to_string() +--- +schema @link(url: "https://specs.apollo.dev/link/v1.0") @link(url: "https://specs.apollo.dev/join/v0.3", for: EXECUTION) @join__directive(graphs: [], name: "link", args: {url: "https://specs.apollo.dev/connect/v0.1"}) @link(url: "https://specs.apollo.dev/inaccessible/v0.2", for: SECURITY) @link(url: "https://specs.apollo.dev/tag/v0.3") @link(url: "https://specs.apollo.dev/authenticated/v0.1", for: SECURITY) @link(url: "https://specs.apollo.dev/requiresScopes/v0.1", for: SECURITY) @link(url: "https://specs.apollo.dev/policy/v0.1", for: SECURITY) @link(url: "http://specs.example.org/custom/v0.1", import: ["@custom1", "@custom2", {name: "@originalName", as: "@custom3"}]) @link(url: "http://bugfix/weird/v1.0", import: ["@weird"]) { + query: Query +} + +directive @link(url: String, as: String, for: link__Purpose, import: [link__Import]) repeatable on SCHEMA + +directive @join__graph(name: String!, url: String!) on ENUM_VALUE + +directive @join__type(graph: join__Graph!, key: join__FieldSet, extension: Boolean! = false, resolvable: Boolean! = true, isInterfaceObject: Boolean! = false) repeatable on ENUM | INPUT_OBJECT | INTERFACE | OBJECT | SCALAR | UNION + +directive @join__field(graph: join__Graph, requires: join__FieldSet, provides: join__FieldSet, type: String, external: Boolean, override: String, overrideLabel: String, usedOverridden: Boolean) repeatable on FIELD_DEFINITION | INPUT_FIELD_DEFINITION + +directive @join__implements(graph: join__Graph!, interface: String!) repeatable on INTERFACE | OBJECT + +directive @join__unionMember(graph: join__Graph!, member: String!) repeatable on UNION + +directive @join__enumValue(graph: join__Graph!) repeatable on ENUM_VALUE + +directive @join__directive(graphs: [join__Graph!], name: String!, args: join__DirectiveArguments!) repeatable on SCHEMA | OBJECT | INTERFACE | FIELD_DEFINITION + +directive @inaccessible on FIELD_DEFINITION | OBJECT | INTERFACE | UNION | ARGUMENT_DEFINITION | SCALAR | ENUM | ENUM_VALUE | INPUT_OBJECT | INPUT_FIELD_DEFINITION + +directive @tag(name: String!) repeatable on FIELD_DEFINITION | OBJECT | INTERFACE | UNION | ARGUMENT_DEFINITION | SCALAR | ENUM | ENUM_VALUE | INPUT_OBJECT | INPUT_FIELD_DEFINITION | SCHEMA + +directive @authenticated on FIELD_DEFINITION | OBJECT | INTERFACE | SCALAR | ENUM + +directive @requiresScopes(scopes: [[requiresScopes__Scope!]!]!) on FIELD_DEFINITION | OBJECT | INTERFACE | SCALAR | ENUM + +directive @policy(policies: [[policy__Policy!]!]!) on FIELD_DEFINITION | OBJECT | INTERFACE | SCALAR | ENUM + +directive @custom1 on OBJECT | FIELD_DEFINITION + +directive @custom2 on OBJECT | FIELD_DEFINITION + +directive @custom3 on OBJECT | FIELD_DEFINITION + +directive @weird on FIELD | FIELD_DEFINITION + +enum link__Purpose { + """ + SECURITY features provide metadata necessary to securely resolve fields. + """ + SECURITY + """EXECUTION features provide metadata necessary for operation execution.""" + EXECUTION +} + +scalar link__Import + +scalar join__FieldSet + +scalar join__DirectiveArguments + +enum join__Graph { + ONE @join__graph(name: "one", url: "none") + TWO @join__graph(name: "two", url: "none") +} + +type Query @join__type(graph: ONE) @join__type(graph: TWO) { + tagged: String @join__field(graph: ONE, type: "String") @tag(name: "tag") + hidden: String @join__field(graph: ONE, type: "String") @inaccessible + custom: T @join__field(graph: ONE, type: "T") @custom1 + authenticated: String @join__field(graph: ONE, type: "String") @authenticated + requiresScopes: String @join__field(graph: ONE, type: "String") @requiresScopes(scopes: ["scope"]) + policy: String @join__field(graph: ONE, type: "String") @policy(policies: [["admin"]]) + overridden: String @join__field(graph: ONE, override: "two", overrideLabel: "label", type: "String") @join__field(graph: TWO, type: "String") + weird: String @join__field(graph: ONE, type: "String") @weird + customAgain: String @join__field(graph: TWO, type: "String") @custom1 +} + +type T @join__type(graph: ONE) @custom2 { + field: String @join__field(graph: ONE, type: "String") @custom3 +} + +scalar requiresScopes__Scope + +scalar policy__Policy diff --git a/apollo-federation/src/sources/connect/expand/tests/mod.rs b/apollo-federation/src/sources/connect/expand/tests/mod.rs new file mode 100644 index 0000000000..9987141d35 --- /dev/null +++ b/apollo-federation/src/sources/connect/expand/tests/mod.rs @@ -0,0 +1,42 @@ +use std::fs::read_to_string; + +use insta::assert_debug_snapshot; +use insta::assert_snapshot; +use insta::glob; + +use crate::sources::connect::expand::expand_connectors; +use crate::sources::connect::expand::ExpansionResult; +use crate::ApiSchemaOptions; + +#[test] +fn it_expand_supergraph() { + insta::with_settings!({prepend_module_to_snapshot => false}, { + glob!("schemas/expand", "*.graphql", |path| { + let to_expand = read_to_string(path).unwrap(); + let ExpansionResult::Expanded { + raw_sdl, + api_schema, + connectors, + } = expand_connectors(&to_expand, &ApiSchemaOptions { include_defer: true, ..Default::default() }).unwrap() + else { + panic!("expected expansion to actually expand subgraphs for {path:?}"); + }; + + assert_snapshot!(api_schema); + assert_debug_snapshot!(connectors.by_service_name); + assert_snapshot!(raw_sdl); + }); + }); +} + +#[test] +fn it_ignores_supergraph() { + insta::with_settings!({prepend_module_to_snapshot => false}, { + glob!("schemas/ignore", "*.graphql", |path| { + let to_ignore = read_to_string(path).unwrap(); + let ExpansionResult::Unchanged = expand_connectors(&to_ignore, &ApiSchemaOptions::default()).unwrap() else { + panic!("expected expansion to ignore non-connector supergraph for {path:?}"); + }; + }); + }); +} diff --git a/apollo-federation/src/sources/connect/expand/tests/schemas/expand/carryover.graphql b/apollo-federation/src/sources/connect/expand/tests/schemas/expand/carryover.graphql new file mode 100644 index 0000000000..94d0229f0a --- /dev/null +++ b/apollo-federation/src/sources/connect/expand/tests/schemas/expand/carryover.graphql @@ -0,0 +1,108 @@ +schema + @link(url: "https://specs.apollo.dev/link/v1.0") + @link(url: "https://specs.apollo.dev/join/v0.5", for: EXECUTION) + @link(url: "https://specs.apollo.dev/tag/v0.3") + @link(url: "https://specs.apollo.dev/inaccessible/v0.2", for: SECURITY) + @link(url: "https://specs.apollo.dev/authenticated/v0.1", for: SECURITY) + @link(url: "https://specs.apollo.dev/requiresScopes/v0.1", for: SECURITY) + @link(url: "https://specs.apollo.dev/policy/v0.1", for: SECURITY) + @link(url: "http://specs.example.org/custom/v0.1", import: ["@custom"]) + @link(url: "https://specs.apollo.dev/connect/v0.1", for: EXECUTION) + @join__directive(graphs: [ONE], name: "link", args: {url: "https://specs.apollo.dev/connect/v0.1", import: ["@connect", "@source"]}) + @join__directive(graphs: [ONE], name: "source", args: {name: "json", http: {baseURL: "http://example/"}}) +{ + query: Query +} + +directive @authenticated on FIELD_DEFINITION | OBJECT | INTERFACE | SCALAR | ENUM + +directive @custom on OBJECT | FIELD_DEFINITION + +directive @inaccessible on FIELD_DEFINITION | OBJECT | INTERFACE | UNION | ARGUMENT_DEFINITION | SCALAR | ENUM | ENUM_VALUE | INPUT_OBJECT | INPUT_FIELD_DEFINITION + +directive @join__directive(graphs: [join__Graph!], name: String!, args: join__DirectiveArguments) repeatable on SCHEMA | OBJECT | INTERFACE | FIELD_DEFINITION + +directive @join__enumValue(graph: join__Graph!) repeatable on ENUM_VALUE + +directive @join__field(graph: join__Graph, requires: join__FieldSet, provides: join__FieldSet, type: String, external: Boolean, override: String, usedOverridden: Boolean, overrideLabel: String, contextArguments: [join__ContextArgument!]) repeatable on FIELD_DEFINITION | INPUT_FIELD_DEFINITION + +directive @join__graph(name: String!, url: String!) on ENUM_VALUE + +directive @join__implements(graph: join__Graph!, interface: String!) repeatable on OBJECT | INTERFACE + +directive @join__type(graph: join__Graph!, key: join__FieldSet, extension: Boolean! = false, resolvable: Boolean! = true, isInterfaceObject: Boolean! = false) repeatable on OBJECT | INTERFACE | UNION | ENUM | INPUT_OBJECT | SCALAR + +directive @join__unionMember(graph: join__Graph!, member: String!) repeatable on UNION + +directive @link(url: String, as: String, for: link__Purpose, import: [link__Import]) repeatable on SCHEMA + +directive @policy(policies: [[policy__Policy!]!]!) on FIELD_DEFINITION | OBJECT | INTERFACE | SCALAR | ENUM + +directive @requiresScopes(scopes: [[requiresScopes__Scope!]!]!) on FIELD_DEFINITION | OBJECT | INTERFACE | SCALAR | ENUM + +directive @tag(name: String!) repeatable on FIELD_DEFINITION | OBJECT | INTERFACE | UNION | ARGUMENT_DEFINITION | SCALAR | ENUM | ENUM_VALUE | INPUT_OBJECT | INPUT_FIELD_DEFINITION | SCHEMA + +input join__ContextArgument { + name: String! + type: String! + context: String! + selection: join__FieldValue! +} + +scalar join__DirectiveArguments + +scalar join__FieldSet + +scalar join__FieldValue + +enum join__Graph { + ONE @join__graph(name: "one", url: "none") + TWO @join__graph(name: "two", url: "none") +} + +scalar link__Import + +enum link__Purpose { + """ + `SECURITY` features provide metadata necessary to securely resolve fields. + """ + SECURITY + + """ + `EXECUTION` features provide metadata necessary for operation execution. + """ + EXECUTION +} + +scalar policy__Policy + +type Query + @join__type(graph: ONE) + @join__type(graph: TWO) +{ + ts: [T] @join__field(graph: ONE) @join__directive(graphs: [ONE], name: "connect", args: {source: "json", http: {GET: "/t"}, selection: "id\ntagged\nhidden\ncustom\nauthenticated\nrequiresScopes\npolicy\noverridden"}) + t(id: ID): T @join__field(graph: ONE) @join__directive(graphs: [ONE], name: "connect", args: {source: "json", http: {GET: "/t/{$args.id}"}, selection: "id\ntagged\nhidden\ncustom\nauthenticated\nrequiresScopes\npolicy\noverridden", entity: true}) +} + +type R + @join__type(graph: ONE) +{ + id: ID! +} + +scalar requiresScopes__Scope + +type T + @join__type(graph: ONE, key: "id") + @join__type(graph: TWO, key: "id") +{ + id: ID! + tagged: String @join__field(graph: ONE) @tag(name: "tag") + hidden: String @inaccessible @join__field(graph: ONE) + custom: String @join__field(graph: ONE) @custom + authenticated: String @join__field(graph: ONE) @authenticated + requiresScopes: String @join__field(graph: ONE) @requiresScopes(scopes: ["scope"]) + policy: String @join__field(graph: ONE) @policy(policies: [["admin"]]) + overridden: String @join__field(graph: ONE, override: "two", overrideLabel: "label") @join__field(graph: TWO, overrideLabel: "label") + r: R @join__field(graph: ONE) @join__directive(graphs: [ONE], name: "connect", args: {source: "json", http: {GET: "/t/{$this.id}/r"}, selection: "id"}) +} diff --git a/apollo-federation/src/sources/connect/expand/tests/schemas/expand/carryover.yaml b/apollo-federation/src/sources/connect/expand/tests/schemas/expand/carryover.yaml new file mode 100644 index 0000000000..97a6c6e900 --- /dev/null +++ b/apollo-federation/src/sources/connect/expand/tests/schemas/expand/carryover.yaml @@ -0,0 +1,79 @@ +subgraphs: + one: + routing_url: none + schema: + sdl: | + extend schema + @link(url: "https://specs.apollo.dev/federation/v2.8", import: [ + "@key", + "@inaccessible", "@tag", "@override", + "@authenticated", "@requiresScopes", "@policy", + "@composeDirective" + ] + ) + @link(url: "http://specs.example.org/custom/v0.1", import: ["@custom"]) + @link(url: "https://specs.apollo.dev/connect/v0.1", import: ["@connect", "@source"]) + @composeDirective(name: "@custom") + @source(name: "json" http: { baseURL: "http://example/" }) + directive @custom on OBJECT | FIELD_DEFINITION + type Query { + ts: [T] @connect( + source: "json" + http: { GET: "/t" } + selection: """ + id + tagged + hidden + custom + authenticated + requiresScopes + policy + overridden + """ + ) + t(id: ID): T @connect( + source: "json" + http: { GET: "/t/{$$args.id}" } + selection: """ + id + tagged + hidden + custom + authenticated + requiresScopes + policy + overridden + """ + entity: true + ) + } + + type T @key(fields: "id") { + id: ID! + tagged: String @tag(name: "tag") + hidden: String @inaccessible + custom: String @custom + authenticated: String @authenticated + requiresScopes: String @requiresScopes(scopes: ["scope"]) + policy: String @policy(policies: [["admin"]]) + overridden: String @override(from: "two", label: "label") + r: R @connect( + source: "json" + http: { GET: "/t/{$$this.id}/r" } + selection: "id" + ) + } + + type R { + id: ID! + } + two: + routing_url: none + schema: + sdl: | + extend schema + @link(url: "https://specs.apollo.dev/federation/v2.8", import: ["@key"]) + type T @key(fields: "id") { + id: ID! + overridden: String + } \ No newline at end of file diff --git a/apollo-federation/src/sources/connect/expand/tests/schemas/expand/interface-object.graphql b/apollo-federation/src/sources/connect/expand/tests/schemas/expand/interface-object.graphql new file mode 100644 index 0000000000..a7364124c6 --- /dev/null +++ b/apollo-federation/src/sources/connect/expand/tests/schemas/expand/interface-object.graphql @@ -0,0 +1,97 @@ +schema + @link(url: "https://specs.apollo.dev/link/v1.0") + @link(url: "https://specs.apollo.dev/join/v0.5", for: EXECUTION) + @link(url: "https://specs.apollo.dev/connect/v0.1", for: EXECUTION) + @join__directive(graphs: [CONNECTORS], name: "link", args: {url: "https://specs.apollo.dev/connect/v0.1", import: ["@connect", "@source"]}) + @join__directive(graphs: [CONNECTORS], name: "source", args: {name: "json", http: {baseURL: "http://localhost:4001"}}) +{ + query: Query +} + +directive @join__directive(graphs: [join__Graph!], name: String!, args: join__DirectiveArguments) repeatable on SCHEMA | OBJECT | INTERFACE | FIELD_DEFINITION + +directive @join__enumValue(graph: join__Graph!) repeatable on ENUM_VALUE + +directive @join__field(graph: join__Graph, requires: join__FieldSet, provides: join__FieldSet, type: String, external: Boolean, override: String, usedOverridden: Boolean, overrideLabel: String, contextArguments: [join__ContextArgument!]) repeatable on FIELD_DEFINITION | INPUT_FIELD_DEFINITION + +directive @join__graph(name: String!, url: String!) on ENUM_VALUE + +directive @join__implements(graph: join__Graph!, interface: String!) repeatable on OBJECT | INTERFACE + +directive @join__type(graph: join__Graph!, key: join__FieldSet, extension: Boolean! = false, resolvable: Boolean! = true, isInterfaceObject: Boolean! = false) repeatable on OBJECT | INTERFACE | UNION | ENUM | INPUT_OBJECT | SCALAR + +directive @join__unionMember(graph: join__Graph!, member: String!) repeatable on UNION + +directive @link(url: String, as: String, for: link__Purpose, import: [link__Import]) repeatable on SCHEMA + +interface Itf + @join__type(graph: CONNECTORS, key: "id", isInterfaceObject: true) + @join__type(graph: GRAPHQL, key: "id") +{ + id: ID! + c: Int! @join__field(graph: CONNECTORS) + d: Int! @join__field(graph: CONNECTORS) + e: String @join__field(graph: CONNECTORS) @join__directive(graphs: [CONNECTORS], name: "connect", args: {source: "json", http: {GET: "/itfs/{$this.id}/e"}, selection: "$"}) +} + +input join__ContextArgument { + name: String! + type: String! + context: String! + selection: join__FieldValue! +} + +scalar join__DirectiveArguments + +scalar join__FieldSet + +scalar join__FieldValue + +enum join__Graph { + CONNECTORS @join__graph(name: "connectors", url: "none") + GRAPHQL @join__graph(name: "graphql", url: "none") +} + +scalar link__Import + +enum link__Purpose { + """ + `SECURITY` features provide metadata necessary to securely resolve fields. + """ + SECURITY + + """ + `EXECUTION` features provide metadata necessary for operation execution. + """ + EXECUTION +} + +type Query + @join__type(graph: CONNECTORS) + @join__type(graph: GRAPHQL) +{ + itfs: [Itf] @join__field(graph: CONNECTORS) @join__directive(graphs: [CONNECTORS], name: "connect", args: {source: "json", http: {GET: "/itfs"}, selection: "id c"}) + itf(id: ID!): Itf @join__field(graph: CONNECTORS) @join__directive(graphs: [CONNECTORS], name: "connect", args: {source: "json", http: {GET: "/itfs/{$args.id}"}, selection: "id c d", entity: true}) +} + +type T1 implements Itf + @join__implements(graph: GRAPHQL, interface: "Itf") + @join__type(graph: GRAPHQL, key: "id") +{ + id: ID! + a: String + c: Int! @join__field + d: Int! @join__field + e: String @join__field +} + +type T2 implements Itf + @join__implements(graph: GRAPHQL, interface: "Itf") + @join__type(graph: GRAPHQL, key: "id") +{ + id: ID! + b: String + c: Int! @join__field + d: Int! @join__field + e: String @join__field +} diff --git a/apollo-federation/src/sources/connect/expand/tests/schemas/expand/interface-object.yaml b/apollo-federation/src/sources/connect/expand/tests/schemas/expand/interface-object.yaml new file mode 100644 index 0000000000..8e91569443 --- /dev/null +++ b/apollo-federation/src/sources/connect/expand/tests/schemas/expand/interface-object.yaml @@ -0,0 +1,59 @@ +# requires federation_version: =2.10.0-preview.3 # NOTE: unreleased at time of writing +subgraphs: + connectors: + routing_url: none + schema: + sdl: | + extend schema + @link(url: "https://specs.apollo.dev/federation/v2.10", import: ["@key", "@interfaceObject"]) + @link(url: "https://specs.apollo.dev/connect/v0.1", import: ["@connect", "@source"]) + @source(name: "json", http: { baseURL: "http://localhost:4001" }) + + type Query { + itfs: [Itf] + @connect( + source: "json" + http: { GET: "/itfs" } + selection: "id c" + ) + + itf(id: ID!): Itf + @connect( + source: "json" + http: { GET: "/itfs/{$$args.id}" } + selection: "id c d" + entity: true + ) + } + + type Itf @key(fields: "id") @interfaceObject { + id: ID! + c: Int! + d: Int! + e: String + @connect( + source: "json" + http: { GET: "/itfs/{$$this.id}/e" } + selection: "$" + ) + } + graphql: + routing_url: none + schema: + sdl: | + extend schema + @link(url: "https://specs.apollo.dev/federation/v2.10", import: ["@key"]) + + interface Itf @key(fields: "id") { + id: ID! + } + + type T1 implements Itf @key(fields: "id") { + id: ID! + a: String + } + + type T2 implements Itf @key(fields: "id") { + id: ID! + b: String + } diff --git a/apollo-federation/src/sources/connect/expand/tests/schemas/expand/keys.graphql b/apollo-federation/src/sources/connect/expand/tests/schemas/expand/keys.graphql new file mode 100644 index 0000000000..c4928c19cb --- /dev/null +++ b/apollo-federation/src/sources/connect/expand/tests/schemas/expand/keys.graphql @@ -0,0 +1,88 @@ +schema + @link(url: "https://specs.apollo.dev/link/v1.0") + @link(url: "https://specs.apollo.dev/join/v0.5", for: EXECUTION) + @link(url: "https://specs.apollo.dev/connect/v0.1", for: EXECUTION) + @join__directive(graphs: [ONE], name: "link", args: {url: "https://specs.apollo.dev/connect/v0.1", import: ["@connect"]}) +{ + query: Query +} + +directive @join__directive(graphs: [join__Graph!], name: String!, args: join__DirectiveArguments) repeatable on SCHEMA | OBJECT | INTERFACE | FIELD_DEFINITION + +directive @join__enumValue(graph: join__Graph!) repeatable on ENUM_VALUE + +directive @join__field(graph: join__Graph, requires: join__FieldSet, provides: join__FieldSet, type: String, external: Boolean, override: String, usedOverridden: Boolean, overrideLabel: String, contextArguments: [join__ContextArgument!]) repeatable on FIELD_DEFINITION | INPUT_FIELD_DEFINITION + +directive @join__graph(name: String!, url: String!) on ENUM_VALUE + +directive @join__implements(graph: join__Graph!, interface: String!) repeatable on OBJECT | INTERFACE + +directive @join__type(graph: join__Graph!, key: join__FieldSet, extension: Boolean! = false, resolvable: Boolean! = true, isInterfaceObject: Boolean! = false) repeatable on OBJECT | INTERFACE | UNION | ENUM | INPUT_OBJECT | SCALAR + +directive @join__unionMember(graph: join__Graph!, member: String!) repeatable on UNION + +directive @link(url: String, as: String, for: link__Purpose, import: [link__Import]) repeatable on SCHEMA + +input join__ContextArgument { + name: String! + type: String! + context: String! + selection: join__FieldValue! +} + +scalar join__DirectiveArguments + +scalar join__FieldSet + +scalar join__FieldValue + +enum join__Graph { + ONE @join__graph(name: "one", url: "none") +} + +scalar link__Import + +enum link__Purpose { + """ + `SECURITY` features provide metadata necessary to securely resolve fields. + """ + SECURITY + + """ + `EXECUTION` features provide metadata necessary for operation execution. + """ + EXECUTION +} + +type Query + @join__type(graph: ONE) +{ + t(id: ID!): T @join__directive(graphs: [ONE], name: "connect", args: {http: {GET: "http://localhost/ts/{$args.id}"}, selection: "id id2 unselected", entity: true}) + t2(id: ID!, id2: ID!): T @join__directive(graphs: [ONE], name: "connect", args: {http: {GET: "http://localhost/ts/{$args.id}?id2={$args.id2}"}, selection: "id id2 unselected", entity: true}) + + """ Uses the `unselected` field as a key, but doesn't select it """ + unselected(unselected: ID!): T @join__directive(graphs: [ONE], name: "connect", args: {http: {GET: "http://localhost/ts/{$args.unselected}"}, selection: "id id2 accessibleByUnselected", entity: true}) +} + +type R + @join__type(graph: ONE) +{ + id: ID! + id2: ID! +} + +type T + @join__type(graph: ONE, key: "id") + @join__type(graph: ONE, key: "id id2") + @join__type(graph: ONE, key: "unselected") +{ + id: ID! + id2: ID! + unselected: ID! + accessibleByUnselected: ID! + r1: R @join__directive(graphs: [ONE], name: "connect", args: {http: {GET: "http://localhost/rs/{$this.id}"}, selection: "id id2"}) + r2: R @join__directive(graphs: [ONE], name: "connect", args: {http: {GET: "http://localhost/rs/{$this.id}?id2={$this.id2}"}, selection: "id id2"}) + r3: R @join__directive(graphs: [ONE], name: "connect", args: {http: {GET: "http://localhost/rs/{$this.id}"}, selection: "id id2: $this.id2"}) + r4: R @join__directive(graphs: [ONE], name: "connect", args: {http: {POST: "http://localhost/rs", body: "id: $this.id"}, selection: "id id2"}) + r5: R @join__directive(graphs: [ONE], name: "connect", args: {http: {POST: "http://localhost/rs", body: "id: $this.id"}, selection: "id id2: $this.id2"}) +} diff --git a/apollo-federation/src/sources/connect/expand/tests/schemas/expand/keys.yaml b/apollo-federation/src/sources/connect/expand/tests/schemas/expand/keys.yaml new file mode 100644 index 0000000000..aa80b407e4 --- /dev/null +++ b/apollo-federation/src/sources/connect/expand/tests/schemas/expand/keys.yaml @@ -0,0 +1,44 @@ +subgraphs: + one: + routing_url: none + schema: + sdl: | + extend schema + @link(url: "https://specs.apollo.dev/federation/v2.10", import: ["@key"]) + @link(url: "https://specs.apollo.dev/connect/v0.1", import: ["@connect"]) + type Query { + t(id: ID!): T + @connect( # expect `key: "id"` + http: { GET: "http://localhost/ts/{$$args.id}" } + selection: "id id2 unselected" + entity: true + ) + t2(id: ID!, id2: ID!): T + @connect( # expect `key: "id id2"` + http: { GET: "http://localhost/ts/{$$args.id}?id2={$$args.id2}" } + selection: "id id2 unselected" + entity: true + ) + """ Uses the `unselected` field as a key, but doesn't select it """ + unselected(unselected: ID!): T + @connect( + http: { GET: "http://localhost/ts/{$$args.unselected}" } + selection: "id id2 accessibleByUnselected" + entity: true + ) + } + type T @key(fields: "id") @key(fields: "id id2") @key(fields: "unselected") { + id: ID! + id2: ID! + unselected: ID! + accessibleByUnselected: ID! + r1: R @connect(http: { GET: "http://localhost/rs/{$$this.id}" }, selection: "id id2") # expect `key: "id"`` + r2: R @connect(http: { GET: "http://localhost/rs/{$$this.id}?id2={$$this.id2}" }, selection: "id id2") # expect `key: "id id2"` + r3: R @connect(http: { GET: "http://localhost/rs/{$$this.id}" }, selection: "id id2: $$this.id2") # expect `key: "id id2"` + r4: R @connect(http: { POST: "http://localhost/rs" body: "id: $$this.id" }, selection: "id id2") # expect `key: "id"` + r5: R @connect(http: { POST: "http://localhost/rs" body: "id: $$this.id" }, selection: "id id2: $$this.id2") # expect `key: "id id2"` + } + type R { + id: ID! + id2: ID! + } diff --git a/apollo-federation/src/sources/connect/expand/tests/schemas/expand/nested_inputs.graphql b/apollo-federation/src/sources/connect/expand/tests/schemas/expand/nested_inputs.graphql new file mode 100644 index 0000000000..192ab5c497 --- /dev/null +++ b/apollo-federation/src/sources/connect/expand/tests/schemas/expand/nested_inputs.graphql @@ -0,0 +1,75 @@ +schema + @link(url: "https://specs.apollo.dev/link/v1.0") + @link(url: "https://specs.apollo.dev/join/v0.5", for: EXECUTION) + @link(url: "https://specs.apollo.dev/connect/v0.1", for: EXECUTION) + @join__directive(graphs: [CONNECTORS], name: "link", args: {url: "https://specs.apollo.dev/connect/v0.1", import: ["@connect", "@source"]}) + @join__directive(graphs: [CONNECTORS], name: "source", args: {name: "example", http: {baseURL: "http://example"}}) +{ + query: Query +} + +directive @join__directive(graphs: [join__Graph!], name: String!, args: join__DirectiveArguments) repeatable on SCHEMA | OBJECT | INTERFACE | FIELD_DEFINITION + +directive @join__enumValue(graph: join__Graph!) repeatable on ENUM_VALUE + +directive @join__field(graph: join__Graph, requires: join__FieldSet, provides: join__FieldSet, type: String, external: Boolean, override: String, usedOverridden: Boolean, overrideLabel: String, contextArguments: [join__ContextArgument!]) repeatable on FIELD_DEFINITION | INPUT_FIELD_DEFINITION + +directive @join__graph(name: String!, url: String!) on ENUM_VALUE + +directive @join__implements(graph: join__Graph!, interface: String!) repeatable on OBJECT | INTERFACE + +directive @join__type(graph: join__Graph!, key: join__FieldSet, extension: Boolean! = false, resolvable: Boolean! = true, isInterfaceObject: Boolean! = false) repeatable on OBJECT | INTERFACE | UNION | ENUM | INPUT_OBJECT | SCALAR + +directive @join__unionMember(graph: join__Graph!, member: String!) repeatable on UNION + +directive @link(url: String, as: String, for: link__Purpose, import: [link__Import]) repeatable on SCHEMA + +input BazInput + @join__type(graph: CONNECTORS) +{ + buzz: String + quux: QuuxInput +} + +input join__ContextArgument { + name: String! + type: String! + context: String! + selection: join__FieldValue! +} + +scalar join__DirectiveArguments + +scalar join__FieldSet + +scalar join__FieldValue + +enum join__Graph { + CONNECTORS @join__graph(name: "connectors", url: "none") +} + +scalar link__Import + +enum link__Purpose { + """ + `SECURITY` features provide metadata necessary to securely resolve fields. + """ + SECURITY + + """ + `EXECUTION` features provide metadata necessary for operation execution. + """ + EXECUTION +} + +type Query + @join__type(graph: CONNECTORS) +{ + foo(bar: String, baz: BazInput, doubleBaz: BazInput): String @join__directive(graphs: [CONNECTORS], name: "connect", args: {source: "example", http: {GET: "/{$args.bar}/{$args.doubleBaz.buzz}/{$args.baz.quux.quaz}"}, selection: "$"}) +} + +input QuuxInput + @join__type(graph: CONNECTORS) +{ + quaz: String +} diff --git a/apollo-federation/src/sources/connect/expand/tests/schemas/expand/nested_inputs.yaml b/apollo-federation/src/sources/connect/expand/tests/schemas/expand/nested_inputs.yaml new file mode 100644 index 0000000000..5d0ca4fa09 --- /dev/null +++ b/apollo-federation/src/sources/connect/expand/tests/schemas/expand/nested_inputs.yaml @@ -0,0 +1,29 @@ +subgraphs: + connectors: + routing_url: none + schema: + sdl: | + extend schema + @link( + url: "https://specs.apollo.dev/federation/v2.7" + import: ["@key"] + ) + @link(url: "https://specs.apollo.dev/connect/v0.1", import: ["@connect", "@source"]) + @source(name: "example", http: { baseURL: "http://example" }) + + type Query { + foo(bar: String, baz: BazInput, doubleBaz: BazInput): String @connect( + source: "example", + http: { GET: "/{$$args.bar}/{$$args.doubleBaz.buzz}/{$$args.baz.quux.quaz}" } + selection: "$" + ) + } + + input BazInput { + buzz: String + quux: QuuxInput + } + + input QuuxInput { + quaz: String + } diff --git a/apollo-federation/src/sources/connect/expand/tests/schemas/expand/normalize_names.graphql b/apollo-federation/src/sources/connect/expand/tests/schemas/expand/normalize_names.graphql new file mode 100644 index 0000000000..346fd6e0c5 --- /dev/null +++ b/apollo-federation/src/sources/connect/expand/tests/schemas/expand/normalize_names.graphql @@ -0,0 +1,71 @@ +schema + @link(url: "https://specs.apollo.dev/link/v1.0") + @link(url: "https://specs.apollo.dev/join/v0.5", for: EXECUTION) + @link(url: "https://specs.apollo.dev/connect/v0.1", for: EXECUTION) + @join__directive(graphs: [CONNECTORS_SUBGRAPH], name: "link", args: {url: "https://specs.apollo.dev/connect/v0.1", import: ["@connect", "@source"]}) + @join__directive(graphs: [CONNECTORS_SUBGRAPH], name: "source", args: {name: "example", http: {baseURL: "http://example"}}) +{ + query: Query +} + +directive @join__directive(graphs: [join__Graph!], name: String!, args: join__DirectiveArguments) repeatable on SCHEMA | OBJECT | INTERFACE | FIELD_DEFINITION + +directive @join__enumValue(graph: join__Graph!) repeatable on ENUM_VALUE + +directive @join__field(graph: join__Graph, requires: join__FieldSet, provides: join__FieldSet, type: String, external: Boolean, override: String, usedOverridden: Boolean, overrideLabel: String, contextArguments: [join__ContextArgument!]) repeatable on FIELD_DEFINITION | INPUT_FIELD_DEFINITION + +directive @join__graph(name: String!, url: String!) on ENUM_VALUE + +directive @join__implements(graph: join__Graph!, interface: String!) repeatable on OBJECT | INTERFACE + +directive @join__type(graph: join__Graph!, key: join__FieldSet, extension: Boolean! = false, resolvable: Boolean! = true, isInterfaceObject: Boolean! = false) repeatable on OBJECT | INTERFACE | UNION | ENUM | INPUT_OBJECT | SCALAR + +directive @join__unionMember(graph: join__Graph!, member: String!) repeatable on UNION + +directive @link(url: String, as: String, for: link__Purpose, import: [link__Import]) repeatable on SCHEMA + +input join__ContextArgument { + name: String! + type: String! + context: String! + selection: join__FieldValue! +} + +scalar join__DirectiveArguments + +scalar join__FieldSet + +scalar join__FieldValue + +enum join__Graph { + CONNECTORS_SUBGRAPH @join__graph(name: "connectors-subgraph", url: "none") +} + +scalar link__Import + +enum link__Purpose { + """ + `SECURITY` features provide metadata necessary to securely resolve fields. + """ + SECURITY + + """ + `EXECUTION` features provide metadata necessary for operation execution. + """ + EXECUTION +} + +type Query + @join__type(graph: CONNECTORS_SUBGRAPH) +{ + users: [User] @join__directive(graphs: [CONNECTORS_SUBGRAPH], name: "connect", args: {source: "example", http: {GET: "/"}, selection: "id a"}) + user(id: ID!): User @join__directive(graphs: [CONNECTORS_SUBGRAPH], name: "connect", args: {source: "example", http: {GET: "/{$args.id}"}, selection: "id a b", entity: true}) +} + +type User + @join__type(graph: CONNECTORS_SUBGRAPH, key: "id") +{ + id: ID! + a: String + b: String +} diff --git a/apollo-federation/src/sources/connect/expand/tests/schemas/expand/normalize_names.yaml b/apollo-federation/src/sources/connect/expand/tests/schemas/expand/normalize_names.yaml new file mode 100644 index 0000000000..c0f92868e0 --- /dev/null +++ b/apollo-federation/src/sources/connect/expand/tests/schemas/expand/normalize_names.yaml @@ -0,0 +1,25 @@ +subgraphs: + connectors-subgraph: + routing_url: none + schema: + sdl: | + extend schema + @link( + url: "https://specs.apollo.dev/federation/v2.7" + import: ["@key", "@external", "@requires"] + ) + @link(url: "https://specs.apollo.dev/connect/v0.1", import: ["@connect", "@source"]) + @source(name: "example", http: { baseURL: "http://example" }) + + type Query { + users: [User] @connect(source: "example", http: { GET: "/" }, selection: "id a") + + user(id: ID!): User + @connect(source: "example", http: { GET: "/{$$args.id}" }, selection: "id a b", entity: true) + } + + type User @key(fields: "id") { + id: ID! + a: String + b: String + } \ No newline at end of file diff --git a/apollo-federation/src/sources/connect/expand/tests/schemas/expand/realistic.graphql b/apollo-federation/src/sources/connect/expand/tests/schemas/expand/realistic.graphql new file mode 100644 index 0000000000..888fbb3ebc --- /dev/null +++ b/apollo-federation/src/sources/connect/expand/tests/schemas/expand/realistic.graphql @@ -0,0 +1,154 @@ +schema + @link(url: "https://specs.apollo.dev/link/v1.0") + @link(url: "https://specs.apollo.dev/join/v0.5", for: EXECUTION) + @link(url: "https://specs.apollo.dev/connect/v0.1", for: EXECUTION) + @join__directive(graphs: [CONNECTORS], name: "link", args: {url: "https://specs.apollo.dev/connect/v0.1", import: ["@connect", "@source"]}) + @join__directive(graphs: [CONNECTORS], name: "source", args: {name: "example", http: {baseURL: "http://example"}}) +{ + query: Query + mutation: Mutation +} + +directive @join__directive(graphs: [join__Graph!], name: String!, args: join__DirectiveArguments) repeatable on SCHEMA | OBJECT | INTERFACE | FIELD_DEFINITION + +directive @join__enumValue(graph: join__Graph!) repeatable on ENUM_VALUE + +directive @join__field(graph: join__Graph, requires: join__FieldSet, provides: join__FieldSet, type: String, external: Boolean, override: String, usedOverridden: Boolean, overrideLabel: String, contextArguments: [join__ContextArgument!]) repeatable on FIELD_DEFINITION | INPUT_FIELD_DEFINITION + +directive @join__graph(name: String!, url: String!) on ENUM_VALUE + +directive @join__implements(graph: join__Graph!, interface: String!) repeatable on OBJECT | INTERFACE + +directive @join__type(graph: join__Graph!, key: join__FieldSet, extension: Boolean! = false, resolvable: Boolean! = true, isInterfaceObject: Boolean! = false) repeatable on OBJECT | INTERFACE | UNION | ENUM | INPUT_OBJECT | SCALAR + +directive @join__unionMember(graph: join__Graph!, member: String!) repeatable on UNION + +directive @link(url: String, as: String, for: link__Purpose, import: [link__Import]) repeatable on SCHEMA + +type Address + @join__type(graph: CONNECTORS) +{ + street: String + suite: String + city: String + zipcode: String + geo: AddressGeo +} + +type AddressGeo + @join__type(graph: CONNECTORS) +{ + lat: Float + lng: Float +} + +input AddressGeoInput + @join__type(graph: CONNECTORS) +{ + lat: Float + lng: Float +} + +input AddressInput + @join__type(graph: CONNECTORS) +{ + street: String + suite: String + city: String + zipcode: String + geo: AddressGeoInput +} + +type CompanyInfo + @join__type(graph: CONNECTORS) +{ + name: String + catchPhrase: String + bs: String + email: EmailAddress +} + +input CompanyInput + @join__type(graph: CONNECTORS) +{ + name: String! + catchPhrase: String +} + +input CreateUserInput + @join__type(graph: CONNECTORS) +{ + name: String! + username: String! + email: EmailAddress! + status: Status! + address: AddressInput +} + +scalar EmailAddress + @join__type(graph: CONNECTORS) + +input join__ContextArgument { + name: String! + type: String! + context: String! + selection: join__FieldValue! +} + +scalar join__DirectiveArguments + +scalar join__FieldSet + +scalar join__FieldValue + +enum join__Graph { + CONNECTORS @join__graph(name: "connectors", url: "none") +} + +scalar link__Import + +enum link__Purpose { + """ + `SECURITY` features provide metadata necessary to securely resolve fields. + """ + SECURITY + + """ + `EXECUTION` features provide metadata necessary for operation execution. + """ + EXECUTION +} + +type Mutation + @join__type(graph: CONNECTORS) +{ + createUser(input: CreateUserInput!): User @join__directive(graphs: [CONNECTORS], name: "connect", args: {source: "example", http: {POST: "/create/user", body: "$args.input { name username email status address { street suite city zipcode geo { lat lng } } }"}, selection: "id"}) +} + +type Query + @join__type(graph: CONNECTORS) +{ + filterUsersByEmailDomain(email: EmailAddress!): [User] @join__directive(graphs: [CONNECTORS], name: "connect", args: {source: "example", http: {GET: "/filter/users", body: "emailDomain: $args.email"}, selection: "id\nname"}) + usersByCompany(company: CompanyInput!): [User] @join__directive(graphs: [CONNECTORS], name: "connect", args: {source: "example", http: {GET: "/by-company/{$args.company.name}"}, selection: "id\nname\ncompany {\n name\n catchPhrase\n bs\n}"}) + user(id: ID!): User @join__directive(graphs: [CONNECTORS], name: "connect", args: {source: "example", http: {GET: "/{$args.id}"}, selection: "id\nname\nusername\nemail\naddress {\n street\n suite\n city\n zipcode\n geo {\n lat\n lng\n }\n}\nphone\nwebsite\ncompany {\n name\n catchPhrase\n bs\n email\n}", entity: true}) +} + +enum Status + @join__type(graph: CONNECTORS) +{ + ACTIVE @join__enumValue(graph: CONNECTORS) + INACTIVE @join__enumValue(graph: CONNECTORS) +} + +type User + @join__type(graph: CONNECTORS, key: "id") +{ + id: ID! + name: String + username: String + email: EmailAddress + address: Address + phone: String + website: String + company: CompanyInfo +} diff --git a/apollo-federation/src/sources/connect/expand/tests/schemas/expand/realistic.yaml b/apollo-federation/src/sources/connect/expand/tests/schemas/expand/realistic.yaml new file mode 100644 index 0000000000..62d16264a9 --- /dev/null +++ b/apollo-federation/src/sources/connect/expand/tests/schemas/expand/realistic.yaml @@ -0,0 +1,124 @@ +subgraphs: + connectors: + routing_url: none + schema: + sdl: | + extend schema + @link( + url: "https://specs.apollo.dev/federation/v2.10" + import: ["@key"] + ) + @link(url: "https://specs.apollo.dev/connect/v0.1", import: ["@connect", "@source"]) + @source(name: "example", http: { baseURL: "http://example" }) + + type Query { + filterUsersByEmailDomain(email: EmailAddress!): [User] + @connect(source: "example", http: { GET: "/filter/users", body: "emailDomain: $$args.email" }, selection: """ + id + name + """) + + usersByCompany(company: CompanyInput!): [User] + @connect(source: "example", http: { GET: "/by-company/{$$args.company.name}" }, selection: """ + id + name + company { + name + catchPhrase + bs + }""") + + user(id: ID!): User + @connect(source: "example", http: { GET: "/{$$args.id}" }, selection: """ + id + name + username + email + address { + street + suite + city + zipcode + geo { + lat + lng + } + } + phone + website + company { + name + catchPhrase + bs + email + }""", entity: true) + } + + type User @key(fields: "id") { + id: ID! + name: String + username: String + email: EmailAddress + address: Address + phone: String + website: String + company: CompanyInfo + } + + type Address { + street: String + suite: String + city: String + zipcode: String + geo: AddressGeo + } + + type AddressGeo { + lat: Float + lng: Float + } + + type CompanyInfo { + name: String + catchPhrase: String + bs: String + email: EmailAddress + } + + input CompanyInput { + name: String! + catchPhrase: String + } + + scalar EmailAddress + + enum Status { + ACTIVE + INACTIVE + } + + type Mutation { + createUser(input: CreateUserInput!): User + @connect(source: "example", http: { POST: "/create/user", body: "$$args.input { name username email status address { street suite city zipcode geo { lat lng } } }" }, selection: "id") + } + + input CreateUserInput { + name: String! + username: String! + email: EmailAddress! + status: Status! + address: AddressInput + } + + input AddressInput { + street: String + suite: String + city: String + zipcode: String + geo: AddressGeoInput + } + + input AddressGeoInput { + lat: Float + lng: Float + } diff --git a/apollo-federation/src/sources/connect/expand/tests/schemas/expand/sibling_fields.graphql b/apollo-federation/src/sources/connect/expand/tests/schemas/expand/sibling_fields.graphql new file mode 100644 index 0000000000..9acf89e7a2 --- /dev/null +++ b/apollo-federation/src/sources/connect/expand/tests/schemas/expand/sibling_fields.graphql @@ -0,0 +1,75 @@ +schema + @link(url: "https://specs.apollo.dev/link/v1.0") + @link(url: "https://specs.apollo.dev/join/v0.5", for: EXECUTION) + @link(url: "https://specs.apollo.dev/connect/v0.1", for: EXECUTION) + @join__directive(graphs: [CONNECTORS], name: "link", args: {url: "https://specs.apollo.dev/connect/v0.1", import: ["@connect", "@source"]}) + @join__directive(graphs: [CONNECTORS], name: "source", args: {name: "v1", http: {baseURL: "https://rt-airlock-services-listing.herokuapp.com"}}) +{ + query: Query +} + +directive @join__directive(graphs: [join__Graph!], name: String!, args: join__DirectiveArguments) repeatable on SCHEMA | OBJECT | INTERFACE | FIELD_DEFINITION + +directive @join__enumValue(graph: join__Graph!) repeatable on ENUM_VALUE + +directive @join__field(graph: join__Graph, requires: join__FieldSet, provides: join__FieldSet, type: String, external: Boolean, override: String, usedOverridden: Boolean, overrideLabel: String, contextArguments: [join__ContextArgument!]) repeatable on FIELD_DEFINITION | INPUT_FIELD_DEFINITION + +directive @join__graph(name: String!, url: String!) on ENUM_VALUE + +directive @join__implements(graph: join__Graph!, interface: String!) repeatable on OBJECT | INTERFACE + +directive @join__type(graph: join__Graph!, key: join__FieldSet, extension: Boolean! = false, resolvable: Boolean! = true, isInterfaceObject: Boolean! = false) repeatable on OBJECT | INTERFACE | UNION | ENUM | INPUT_OBJECT | SCALAR + +directive @join__unionMember(graph: join__Graph!, member: String!) repeatable on UNION + +directive @link(url: String, as: String, for: link__Purpose, import: [link__Import]) repeatable on SCHEMA + +input join__ContextArgument { + name: String! + type: String! + context: String! + selection: join__FieldValue! +} + +scalar join__DirectiveArguments + +scalar join__FieldSet + +scalar join__FieldValue + +enum join__Graph { + CONNECTORS @join__graph(name: "connectors", url: "none") +} + +type K + @join__type(graph: CONNECTORS) +{ + id: ID! +} + +scalar link__Import + +enum link__Purpose { + """ + `SECURITY` features provide metadata necessary to securely resolve fields. + """ + SECURITY + + """ + `EXECUTION` features provide metadata necessary for operation execution. + """ + EXECUTION +} + +type Query + @join__type(graph: CONNECTORS) +{ + f: T @join__directive(graphs: [CONNECTORS], name: "connect", args: {http: {GET: "https://my.api/t"}, selection: "k { id }"}) +} + +type T + @join__type(graph: CONNECTORS) +{ + k: K + b: String @join__directive(graphs: [CONNECTORS], name: "connect", args: {http: {GET: "https://my.api/t/{$this.k.id}"}, selection: "b"}) +} diff --git a/apollo-federation/src/sources/connect/expand/tests/schemas/expand/sibling_fields.yaml b/apollo-federation/src/sources/connect/expand/tests/schemas/expand/sibling_fields.yaml new file mode 100644 index 0000000000..6657921a32 --- /dev/null +++ b/apollo-federation/src/sources/connect/expand/tests/schemas/expand/sibling_fields.yaml @@ -0,0 +1,35 @@ +subgraphs: + connectors: + routing_url: none + schema: + sdl: | + extend schema + @link(url: "https://specs.apollo.dev/federation/v2.10", import: ["@key"]) + @link( + url: "https://specs.apollo.dev/connect/v0.1" + import: ["@connect", "@source"] + ) + @source( + name: "v1" + http: { baseURL: "https://rt-airlock-services-listing.herokuapp.com" } + ) + + type T { + k: K + b: String + @connect(http: { GET: "https://my.api/t/{$$this.k.id}" }, selection: "b") + } + + type K { + id: ID! + } + + type Query { + f: T + @connect( + http: { GET: "https://my.api/t" } + selection: """ + k { id } + """ + ) + } diff --git a/apollo-federation/src/sources/connect/expand/tests/schemas/expand/simple.graphql b/apollo-federation/src/sources/connect/expand/tests/schemas/expand/simple.graphql new file mode 100644 index 0000000000..aa26ff6f2f --- /dev/null +++ b/apollo-federation/src/sources/connect/expand/tests/schemas/expand/simple.graphql @@ -0,0 +1,76 @@ +schema + @link(url: "https://specs.apollo.dev/link/v1.0") + @link(url: "https://specs.apollo.dev/join/v0.5", for: EXECUTION) + @link(url: "https://specs.apollo.dev/connect/v0.1", for: EXECUTION) + @join__directive(graphs: [CONNECTORS], name: "link", args: {url: "https://specs.apollo.dev/connect/v0.1", import: ["@connect", "@source"]}) + @join__directive(graphs: [CONNECTORS], name: "source", args: {name: "example", http: {baseURL: "http://example"}}) +{ + query: Query +} + +directive @join__directive(graphs: [join__Graph!], name: String!, args: join__DirectiveArguments) repeatable on SCHEMA | OBJECT | INTERFACE | FIELD_DEFINITION + +directive @join__enumValue(graph: join__Graph!) repeatable on ENUM_VALUE + +directive @join__field(graph: join__Graph, requires: join__FieldSet, provides: join__FieldSet, type: String, external: Boolean, override: String, usedOverridden: Boolean, overrideLabel: String, contextArguments: [join__ContextArgument!]) repeatable on FIELD_DEFINITION | INPUT_FIELD_DEFINITION + +directive @join__graph(name: String!, url: String!) on ENUM_VALUE + +directive @join__implements(graph: join__Graph!, interface: String!) repeatable on OBJECT | INTERFACE + +directive @join__type(graph: join__Graph!, key: join__FieldSet, extension: Boolean! = false, resolvable: Boolean! = true, isInterfaceObject: Boolean! = false) repeatable on OBJECT | INTERFACE | UNION | ENUM | INPUT_OBJECT | SCALAR + +directive @join__unionMember(graph: join__Graph!, member: String!) repeatable on UNION + +directive @link(url: String, as: String, for: link__Purpose, import: [link__Import]) repeatable on SCHEMA + +input join__ContextArgument { + name: String! + type: String! + context: String! + selection: join__FieldValue! +} + +scalar join__DirectiveArguments + +scalar join__FieldSet + +scalar join__FieldValue + +enum join__Graph { + CONNECTORS @join__graph(name: "connectors", url: "none") + GRAPHQL @join__graph(name: "graphql", url: "https://graphql") +} + +scalar link__Import + +enum link__Purpose { + """ + `SECURITY` features provide metadata necessary to securely resolve fields. + """ + SECURITY + + """ + `EXECUTION` features provide metadata necessary for operation execution. + """ + EXECUTION +} + +type Query + @join__type(graph: CONNECTORS) + @join__type(graph: GRAPHQL) +{ + users: [User] @join__field(graph: CONNECTORS) @join__directive(graphs: [CONNECTORS], name: "connect", args: {source: "example", http: {GET: "/"}, selection: "id a"}) + user(id: ID!): User @join__field(graph: CONNECTORS) @join__directive(graphs: [CONNECTORS], name: "connect", args: {source: "example", http: {GET: "/{$args.id}"}, selection: "id a b", entity: true}) +} + +type User + @join__type(graph: CONNECTORS, key: "id") + @join__type(graph: GRAPHQL, key: "id") +{ + id: ID! + a: String @join__field(graph: CONNECTORS) + b: String @join__field(graph: CONNECTORS) + c: String @join__field(graph: CONNECTORS, external: true) @join__field(graph: GRAPHQL) + d: String @join__field(graph: CONNECTORS, requires: "c") @join__directive(graphs: [CONNECTORS], name: "connect", args: {source: "example", http: {GET: "/{$this.c}/d", body: "with_b: $this.b"}, selection: "$"}) +} diff --git a/apollo-federation/src/sources/connect/expand/tests/schemas/expand/simple.yaml b/apollo-federation/src/sources/connect/expand/tests/schemas/expand/simple.yaml new file mode 100644 index 0000000000..c29b7fe604 --- /dev/null +++ b/apollo-federation/src/sources/connect/expand/tests/schemas/expand/simple.yaml @@ -0,0 +1,41 @@ +subgraphs: + connectors: + routing_url: none + schema: + sdl: | + extend schema + @link( + url: "https://specs.apollo.dev/federation/v2.7" + import: ["@key", "@external", "@requires"] + ) + @link(url: "https://specs.apollo.dev/connect/v0.1", import: ["@connect", "@source"]) + @source(name: "example", http: { baseURL: "http://example" }) + + type Query { + users: [User] @connect(source: "example", http: { GET: "/" }, selection: "id a") + + user(id: ID!): User + @connect(source: "example", http: { GET: "/{$$args.id}" }, selection: "id a b", entity: true) + } + + type User @key(fields: "id") { + id: ID! + a: String + b: String + c: String @external + d: String + @requires(fields: "c") + @connect(source: "example", http: { GET: "/{$$this.c}/d", body: "with_b: $$this.b" }, selection: "$") + } + + graphql: + routing_url: https://graphql + schema: + sdl: | + extend schema + @link(url: "https://specs.apollo.dev/federation/v2.7", import: ["@key"]) + + type User @key(fields: "id") { + id: ID! + c: String + } diff --git a/apollo-federation/src/sources/connect/expand/tests/schemas/expand/steelthread.graphql b/apollo-federation/src/sources/connect/expand/tests/schemas/expand/steelthread.graphql new file mode 100644 index 0000000000..462a345fbd --- /dev/null +++ b/apollo-federation/src/sources/connect/expand/tests/schemas/expand/steelthread.graphql @@ -0,0 +1,79 @@ +schema + @link(url: "https://specs.apollo.dev/link/v1.0") + @link(url: "https://specs.apollo.dev/join/v0.5", for: EXECUTION) + @link(url: "https://specs.apollo.dev/connect/v0.1", for: EXECUTION) + @join__directive(graphs: [CONNECTORS], name: "link", args: {url: "https://specs.apollo.dev/connect/v0.1", import: ["@connect", "@source"]}) + @join__directive(graphs: [CONNECTORS], name: "source", args: {name: "json", http: {baseURL: "https://jsonplaceholder.typicode.com/"}}) +{ + query: Query +} + +directive @join__directive(graphs: [join__Graph!], name: String!, args: join__DirectiveArguments) repeatable on SCHEMA | OBJECT | INTERFACE | FIELD_DEFINITION + +directive @join__enumValue(graph: join__Graph!) repeatable on ENUM_VALUE + +directive @join__field(graph: join__Graph, requires: join__FieldSet, provides: join__FieldSet, type: String, external: Boolean, override: String, usedOverridden: Boolean, overrideLabel: String, contextArguments: [join__ContextArgument!]) repeatable on FIELD_DEFINITION | INPUT_FIELD_DEFINITION + +directive @join__graph(name: String!, url: String!) on ENUM_VALUE + +directive @join__implements(graph: join__Graph!, interface: String!) repeatable on OBJECT | INTERFACE + +directive @join__type(graph: join__Graph!, key: join__FieldSet, extension: Boolean! = false, resolvable: Boolean! = true, isInterfaceObject: Boolean! = false) repeatable on OBJECT | INTERFACE | UNION | ENUM | INPUT_OBJECT | SCALAR + +directive @join__unionMember(graph: join__Graph!, member: String!) repeatable on UNION + +directive @link(url: String, as: String, for: link__Purpose, import: [link__Import]) repeatable on SCHEMA + +input join__ContextArgument { + name: String! + type: String! + context: String! + selection: join__FieldValue! +} + +scalar join__DirectiveArguments + +scalar join__FieldSet + +scalar join__FieldValue + +enum join__Graph { + CONNECTORS @join__graph(name: "connectors", url: "none") + GRAPHQL @join__graph(name: "graphql", url: "https://localhost:4001") +} + +scalar JSON + @join__type(graph: CONNECTORS) + +scalar link__Import + +enum link__Purpose { + """ + `SECURITY` features provide metadata necessary to securely resolve fields. + """ + SECURITY + + """ + `EXECUTION` features provide metadata necessary for operation execution. + """ + EXECUTION +} + +type Query + @join__type(graph: CONNECTORS) + @join__type(graph: GRAPHQL) +{ + users: [User] @join__field(graph: CONNECTORS) @join__directive(graphs: [CONNECTORS], name: "connect", args: {source: "json", http: {GET: "/users"}, selection: "id name"}) + user(id: ID!): User @join__field(graph: CONNECTORS) @join__directive(graphs: [CONNECTORS], name: "connect", args: {source: "json", http: {GET: "/users/{$args.id}"}, selection: "id\nname\nusername", entity: true}) +} + +type User + @join__type(graph: CONNECTORS, key: "id") + @join__type(graph: GRAPHQL, key: "id") +{ + id: ID! + name: String @join__field(graph: CONNECTORS) + username: String @join__field(graph: CONNECTORS) + c: String @join__field(graph: CONNECTORS, external: true) @join__field(graph: GRAPHQL) + d: String @join__field(graph: CONNECTORS, requires: "c") @join__directive(graphs: [CONNECTORS], name: "connect", args: {source: "json", http: {GET: "/users/{$this.c}"}, selection: "$.phone"}) +} diff --git a/apollo-federation/src/sources/connect/expand/tests/schemas/expand/steelthread.yaml b/apollo-federation/src/sources/connect/expand/tests/schemas/expand/steelthread.yaml new file mode 100644 index 0000000000..25c07a8b40 --- /dev/null +++ b/apollo-federation/src/sources/connect/expand/tests/schemas/expand/steelthread.yaml @@ -0,0 +1,66 @@ +subgraphs: + connectors: + routing_url: none + schema: + sdl: | + extend schema + @link( + url: "https://specs.apollo.dev/federation/v2.10" + import: ["@key", "@external", "@requires", "@shareable"] + ) + @link( + url: "https://specs.apollo.dev/connect/v0.1" + import: ["@connect", "@source"] + ) + @source( + name: "json" + http: { baseURL: "https://jsonplaceholder.typicode.com/" } + ) + + type Query { + users: [User] + @connect(source: "json", http: { GET: "/users" }, selection: "id name") + + user(id: ID!): User + @connect( + source: "json" + http: { GET: "/users/{$$args.id}" } + selection: """ + id + name + username + """ + entity: true + ) + } + + type User @key(fields: "id") { + id: ID! + name: String + username: String + c: String @external + d: String + @requires(fields: "c") + @connect( + source: "json" + http: { GET: "/users/{$$this.c}" } + selection: "$.phone" + ) + } + + scalar JSON + + graphql: + routing_url: https://localhost:4001 + schema: + sdl: | + extend schema + @link( + url: "https://specs.apollo.dev/federation/v2.7" + import: ["@key"] + ) + + type User @key(fields: "id") { + id: ID! + c: String + } diff --git a/apollo-federation/src/sources/connect/expand/tests/schemas/expand/types_used_twice.graphql b/apollo-federation/src/sources/connect/expand/tests/schemas/expand/types_used_twice.graphql new file mode 100644 index 0000000000..1658ad5162 --- /dev/null +++ b/apollo-federation/src/sources/connect/expand/tests/schemas/expand/types_used_twice.graphql @@ -0,0 +1,81 @@ +schema + @link(url: "https://specs.apollo.dev/link/v1.0") + @link(url: "https://specs.apollo.dev/join/v0.5", for: EXECUTION) + @link(url: "https://specs.apollo.dev/connect/v0.1", for: EXECUTION) + @join__directive(graphs: [CONNECTORS], name: "link", args: {url: "https://specs.apollo.dev/connect/v0.1", import: ["@connect", "@source"]}) + @join__directive(graphs: [CONNECTORS], name: "source", args: {name: "example", http: {baseURL: "http://example"}}) +{ + query: Query +} + +directive @join__directive(graphs: [join__Graph!], name: String!, args: join__DirectiveArguments) repeatable on SCHEMA | OBJECT | INTERFACE | FIELD_DEFINITION + +directive @join__enumValue(graph: join__Graph!) repeatable on ENUM_VALUE + +directive @join__field(graph: join__Graph, requires: join__FieldSet, provides: join__FieldSet, type: String, external: Boolean, override: String, usedOverridden: Boolean, overrideLabel: String, contextArguments: [join__ContextArgument!]) repeatable on FIELD_DEFINITION | INPUT_FIELD_DEFINITION + +directive @join__graph(name: String!, url: String!) on ENUM_VALUE + +directive @join__implements(graph: join__Graph!, interface: String!) repeatable on OBJECT | INTERFACE + +directive @join__type(graph: join__Graph!, key: join__FieldSet, extension: Boolean! = false, resolvable: Boolean! = true, isInterfaceObject: Boolean! = false) repeatable on OBJECT | INTERFACE | UNION | ENUM | INPUT_OBJECT | SCALAR + +directive @join__unionMember(graph: join__Graph!, member: String!) repeatable on UNION + +directive @link(url: String, as: String, for: link__Purpose, import: [link__Import]) repeatable on SCHEMA + +type A + @join__type(graph: CONNECTORS) +{ + id: ID +} + +type B + @join__type(graph: CONNECTORS) +{ + a: A +} + +input join__ContextArgument { + name: String! + type: String! + context: String! + selection: join__FieldValue! +} + +scalar join__DirectiveArguments + +scalar join__FieldSet + +scalar join__FieldValue + +enum join__Graph { + CONNECTORS @join__graph(name: "connectors", url: "none") +} + +scalar link__Import + +enum link__Purpose { + """ + `SECURITY` features provide metadata necessary to securely resolve fields. + """ + SECURITY + + """ + `EXECUTION` features provide metadata necessary for operation execution. + """ + EXECUTION +} + +type Query + @join__type(graph: CONNECTORS) +{ + ts: [T] @join__directive(graphs: [CONNECTORS], name: "connect", args: {source: "example", http: {GET: "/"}, selection: "a { id } b { a { id } }"}) +} + +type T + @join__type(graph: CONNECTORS) +{ + a: A + b: B +} diff --git a/apollo-federation/src/sources/connect/expand/tests/schemas/expand/types_used_twice.yaml b/apollo-federation/src/sources/connect/expand/tests/schemas/expand/types_used_twice.yaml new file mode 100644 index 0000000000..379b183d97 --- /dev/null +++ b/apollo-federation/src/sources/connect/expand/tests/schemas/expand/types_used_twice.yaml @@ -0,0 +1,29 @@ +subgraphs: + connectors: + routing_url: none + schema: + sdl: | + extend schema + @link( + url: "https://specs.apollo.dev/federation/v2.7" + import: ["@key", "@external", "@requires"] + ) + @link(url: "https://specs.apollo.dev/connect/v0.1", import: ["@connect", "@source"]) + @source(name: "example", http: { baseURL: "http://example" }) + + type Query { + ts: [T] @connect(source: "example", http: { GET: "/" }, selection: "a { id } b { a { id } }") + } + + type T { + a: A + b: B + } + + type A { + id: ID + } + + type B { + a: A + } diff --git a/apollo-federation/src/sources/connect/expand/tests/schemas/ignore/directives.graphql b/apollo-federation/src/sources/connect/expand/tests/schemas/ignore/directives.graphql new file mode 100644 index 0000000000..71be26422a --- /dev/null +++ b/apollo-federation/src/sources/connect/expand/tests/schemas/ignore/directives.graphql @@ -0,0 +1,105 @@ +schema + @link(url: "https://specs.apollo.dev/link/v1.0") + @link(url: "https://specs.apollo.dev/join/v0.5", for: EXECUTION) + @link(url: "https://specs.apollo.dev/tag/v0.3") + @link(url: "https://specs.apollo.dev/inaccessible/v0.2", for: SECURITY) + @link(url: "https://specs.apollo.dev/authenticated/v0.1", for: SECURITY) + @link(url: "https://specs.apollo.dev/requiresScopes/v0.1", for: SECURITY) + @link(url: "https://specs.apollo.dev/policy/v0.1", for: SECURITY) + @link(url: "http://specs.example.org/custom/v0.1", import: ["@custom1", "@custom2", {name: "@originalName", as: "@custom3"}]) + @link(url: "http://bugfix/weird/v1.0", import: ["@weird"]) +{ + query: Query +} + +directive @authenticated on FIELD_DEFINITION | OBJECT | INTERFACE | SCALAR | ENUM + +directive @custom1 on OBJECT | FIELD_DEFINITION + +directive @custom2 on OBJECT | FIELD_DEFINITION + +directive @custom3 on OBJECT | FIELD_DEFINITION + +directive @inaccessible on FIELD_DEFINITION | OBJECT | INTERFACE | UNION | ARGUMENT_DEFINITION | SCALAR | ENUM | ENUM_VALUE | INPUT_OBJECT | INPUT_FIELD_DEFINITION + +directive @join__directive(graphs: [join__Graph!], name: String!, args: join__DirectiveArguments) repeatable on SCHEMA | OBJECT | INTERFACE | FIELD_DEFINITION + +directive @join__enumValue(graph: join__Graph!) repeatable on ENUM_VALUE + +directive @join__field(graph: join__Graph, requires: join__FieldSet, provides: join__FieldSet, type: String, external: Boolean, override: String, usedOverridden: Boolean, overrideLabel: String, contextArguments: [join__ContextArgument!]) repeatable on FIELD_DEFINITION | INPUT_FIELD_DEFINITION + +directive @join__graph(name: String!, url: String!) on ENUM_VALUE + +directive @join__implements(graph: join__Graph!, interface: String!) repeatable on OBJECT | INTERFACE + +directive @join__type(graph: join__Graph!, key: join__FieldSet, extension: Boolean! = false, resolvable: Boolean! = true, isInterfaceObject: Boolean! = false) repeatable on OBJECT | INTERFACE | UNION | ENUM | INPUT_OBJECT | SCALAR + +directive @join__unionMember(graph: join__Graph!, member: String!) repeatable on UNION + +directive @link(url: String, as: String, for: link__Purpose, import: [link__Import]) repeatable on SCHEMA + +directive @policy(policies: [[policy__Policy!]!]!) on FIELD_DEFINITION | OBJECT | INTERFACE | SCALAR | ENUM + +directive @requiresScopes(scopes: [[requiresScopes__Scope!]!]!) on FIELD_DEFINITION | OBJECT | INTERFACE | SCALAR | ENUM + +directive @tag(name: String!) repeatable on FIELD_DEFINITION | OBJECT | INTERFACE | UNION | ARGUMENT_DEFINITION | SCALAR | ENUM | ENUM_VALUE | INPUT_OBJECT | INPUT_FIELD_DEFINITION | SCHEMA + +directive @weird on FIELD | FIELD_DEFINITION + +input join__ContextArgument { + name: String! + type: String! + context: String! + selection: join__FieldValue! +} + +scalar join__DirectiveArguments + +scalar join__FieldSet + +scalar join__FieldValue + +enum join__Graph { + ONE @join__graph(name: "one", url: "none") + TWO @join__graph(name: "two", url: "none") +} + +scalar link__Import + +enum link__Purpose { + """ + `SECURITY` features provide metadata necessary to securely resolve fields. + """ + SECURITY + + """ + `EXECUTION` features provide metadata necessary for operation execution. + """ + EXECUTION +} + +scalar policy__Policy + +type Query + @join__type(graph: ONE) + @join__type(graph: TWO) +{ + tagged: String @join__field(graph: ONE) @tag(name: "tag") + hidden: String @inaccessible @join__field(graph: ONE) + custom: T @join__field(graph: ONE) @custom1 + authenticated: String @join__field(graph: ONE) @authenticated + requiresScopes: String @join__field(graph: ONE) @requiresScopes(scopes: ["scope"]) + policy: String @join__field(graph: ONE) @policy(policies: [["admin"]]) + overridden: String @join__field(graph: ONE, override: "two", overrideLabel: "label") @join__field(graph: TWO, overrideLabel: "label") + weird: String @join__field(graph: ONE) @weird + customAgain: String @join__field(graph: TWO) @custom1 +} + +scalar requiresScopes__Scope + +type T + @join__type(graph: ONE) + @custom2 +{ + field: String @custom3 +} diff --git a/apollo-federation/src/sources/connect/expand/tests/schemas/ignore/directives.yaml b/apollo-federation/src/sources/connect/expand/tests/schemas/ignore/directives.yaml new file mode 100644 index 0000000000..4f2bfeea70 --- /dev/null +++ b/apollo-federation/src/sources/connect/expand/tests/schemas/ignore/directives.yaml @@ -0,0 +1,69 @@ +subgraphs: + one: + routing_url: none + schema: + sdl: | + extend schema + @link(url: "https://specs.apollo.dev/federation/v2.8", import: [ + "@override", "@inaccessible", "@tag", + "@authenticated", "@requiresScopes", "@policy", + "@composeDirective" + ] + ) + @link(url: "http://specs.example.org/custom/v0.1", import: ["@custom1", "@custom2", { name: "@originalName", as: "@custom3" }]) + @composeDirective(name: "@custom1") + @composeDirective(name: "@custom2") + @composeDirective(name: "@custom3") + directive @custom1 on OBJECT | FIELD_DEFINITION + directive @custom2 on OBJECT | FIELD_DEFINITION + directive @custom3 on OBJECT | FIELD_DEFINITION + type Query { + tagged: String @tag(name: "tag") + hidden: String @inaccessible + custom: T @custom1 + authenticated: String @authenticated + requiresScopes: String @requiresScopes(scopes: ["scope"]) + policy: String @policy(policies: [["admin"]]) + overridden: String @override(from: "two", label: "label") + } + + type T @custom2 { + field: String @custom3 + } + + # bug fix: if a customer tries to define @defer this way, it should be ignored + extend schema + @link(url: "http://bugfix/namespace/v1.0", import: ["@defer"]) + + directive @defer(label: String) on FIELD + + # bug fix: don't redefine scalars if the user defines them for some reason + scalar federation__RequireScopes + scalar federation__Policy + + # bug fix: here's a weird directive that's both executable and type system + extend schema + @link(url: "http://bugfix/weird/v1.0", import: ["@weird"]) + @composeDirective(name: "@weird") + + directive @weird on FIELD | FIELD_DEFINITION + + extend type Query { + weird: String @weird + } + two: + routing_url: none + schema: + sdl: | + extend schema + @link(url: "https://specs.apollo.dev/federation/v2.8", import: ["@composeDirective"]) + @link(url: "http://specs.example.org/custom/v0.1", import: ["@custom1", "@custom2", { name: "@originalName", as: "@custom3" }]) + @composeDirective(name: "@custom1") + + directive @custom1 on OBJECT | FIELD_DEFINITION + directive @custom2 on OBJECT | FIELD_DEFINITION + directive @custom3 on OBJECT | FIELD_DEFINITION + type Query { + overridden: String + customAgain: String @custom1 + } \ No newline at end of file diff --git a/apollo-federation/src/sources/connect/expand/tests/schemas/ignore/ignored.graphql b/apollo-federation/src/sources/connect/expand/tests/schemas/ignore/ignored.graphql new file mode 100644 index 0000000000..9424c35ed9 --- /dev/null +++ b/apollo-federation/src/sources/connect/expand/tests/schemas/ignore/ignored.graphql @@ -0,0 +1,57 @@ +schema + @link(url: "https://specs.apollo.dev/link/v1.0") + @link(url: "https://specs.apollo.dev/join/v0.4", for: EXECUTION) +{ + query: Query +} + +directive @join__directive(graphs: [join__Graph!], name: String!, args: join__DirectiveArguments) repeatable on SCHEMA | OBJECT | INTERFACE | FIELD_DEFINITION + +directive @join__enumValue(graph: join__Graph!) repeatable on ENUM_VALUE + +directive @join__field(graph: join__Graph, requires: join__FieldSet, provides: join__FieldSet, type: String, external: Boolean, override: String, usedOverridden: Boolean, overrideLabel: String) repeatable on FIELD_DEFINITION | INPUT_FIELD_DEFINITION + +directive @join__graph(name: String!, url: String!) on ENUM_VALUE + +directive @join__implements(graph: join__Graph!, interface: String!) repeatable on OBJECT | INTERFACE + +directive @join__type(graph: join__Graph!, key: join__FieldSet, extension: Boolean! = false, resolvable: Boolean! = true, isInterfaceObject: Boolean! = false) repeatable on OBJECT | INTERFACE | UNION | ENUM | INPUT_OBJECT | SCALAR + +directive @join__unionMember(graph: join__Graph!, member: String!) repeatable on UNION + +directive @link(url: String, as: String, for: link__Purpose, import: [link__Import]) repeatable on SCHEMA + +scalar join__DirectiveArguments + +scalar join__FieldSet + +enum join__Graph { + GRAPHQL @join__graph(name: "graphql", url: "https://graphql") +} + +scalar link__Import + +enum link__Purpose { + """ + `SECURITY` features provide metadata necessary to securely resolve fields. + """ + SECURITY + + """ + `EXECUTION` features provide metadata necessary for operation execution. + """ + EXECUTION +} + +type Query + @join__type(graph: GRAPHQL) +{ + users: [User] +} + +type User + @join__type(graph: GRAPHQL, key: "id") +{ + id: ID! + c: String +} diff --git a/apollo-federation/src/sources/connect/expand/tests/schemas/ignore/ignored.yaml b/apollo-federation/src/sources/connect/expand/tests/schemas/ignore/ignored.yaml new file mode 100644 index 0000000000..b51befad9d --- /dev/null +++ b/apollo-federation/src/sources/connect/expand/tests/schemas/ignore/ignored.yaml @@ -0,0 +1,16 @@ +subgraphs: + graphql: + routing_url: https://graphql + schema: + sdl: | + extend schema + @link(url: "https://specs.apollo.dev/federation/v2.7", import: ["@key"]) + + type User @key(fields: "id") { + id: ID! + c: String + } + + type Query { + users: [User] + } diff --git a/apollo-federation/src/sources/connect/expand/tests/schemas/regenerate.sh b/apollo-federation/src/sources/connect/expand/tests/schemas/regenerate.sh new file mode 100755 index 0000000000..8a569a3331 --- /dev/null +++ b/apollo-federation/src/sources/connect/expand/tests/schemas/regenerate.sh @@ -0,0 +1,26 @@ +# Composes a single supergraph config file passed as an argument or all `.yaml` files in any subdirectories. +# For each supergraph config, outputs a `.graphql` file in the same directory. +# Optionally, you can set `FEDERATION_VERSION` to override the supergraph binary used +set -euo pipefail + +if [ -z "${FEDERATION_VERSION:-}" ]; then + FEDERATION_VERSION="2.10.0-preview.2" +fi + +regenerate_graphql() { + local supergraph_config=$1 + local test_name + test_name=$(basename "$supergraph_config" .yaml) + local dir_name + dir_name=$(dirname "$supergraph_config") + echo "Regenerating $dir_name/$test_name.graphql" + rover supergraph compose --federation-version "=$FEDERATION_VERSION" --config "$supergraph_config" > "$dir_name/$test_name.graphql" +} + +if [ -z "${1:-}" ]; then + for supergraph_config in */*.yaml; do + regenerate_graphql "$supergraph_config" + done +else + regenerate_graphql "$1" +fi \ No newline at end of file diff --git a/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@carryover.graphql-2.snap b/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@carryover.graphql-2.snap new file mode 100644 index 0000000000..83cf15a887 --- /dev/null +++ b/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@carryover.graphql-2.snap @@ -0,0 +1,524 @@ +--- +source: apollo-federation/src/sources/connect/expand/tests/mod.rs +expression: connectors.by_service_name +input_file: apollo-federation/src/sources/connect/expand/tests/schemas/expand/carryover.graphql +--- +{ + "one_Query_ts_0": Connector { + id: ConnectId { + label: "one.json http: GET /t", + subgraph_name: "one", + source_name: Some( + "json", + ), + directive: ObjectOrInterfaceFieldDirectivePosition { + field: Object(Query.ts), + directive_name: "connect", + directive_index: 0, + }, + }, + transport: HttpJsonTransport { + source_url: Some( + Url { + scheme: "http", + cannot_be_a_base: false, + username: "", + password: None, + host: Some( + Domain( + "example", + ), + ), + port: None, + path: "/", + query: None, + fragment: None, + }, + ), + connect_template: URLTemplate { + base: None, + path: [ + StringTemplate { + parts: [ + Constant( + Constant { + value: "t", + location: 1..2, + }, + ), + ], + }, + ], + query: [], + }, + method: Get, + headers: {}, + body: None, + }, + selection: Named( + SubSelection { + selections: [ + Field( + None, + WithRange { + node: Field( + "id", + ), + range: Some( + 0..2, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "tagged", + ), + range: Some( + 3..9, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "hidden", + ), + range: Some( + 10..16, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "custom", + ), + range: Some( + 17..23, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "authenticated", + ), + range: Some( + 24..37, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "requiresScopes", + ), + range: Some( + 38..52, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "policy", + ), + range: Some( + 53..59, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "overridden", + ), + range: Some( + 60..70, + ), + }, + None, + ), + ], + range: Some( + 0..70, + ), + }, + ), + config: None, + max_requests: None, + entity_resolver: None, + spec: V0_1, + request_variables: {}, + response_variables: {}, + }, + "one_Query_t_0": Connector { + id: ConnectId { + label: "one.json http: GET /t/{$args.id}", + subgraph_name: "one", + source_name: Some( + "json", + ), + directive: ObjectOrInterfaceFieldDirectivePosition { + field: Object(Query.t), + directive_name: "connect", + directive_index: 0, + }, + }, + transport: HttpJsonTransport { + source_url: Some( + Url { + scheme: "http", + cannot_be_a_base: false, + username: "", + password: None, + host: Some( + Domain( + "example", + ), + ), + port: None, + path: "/", + query: None, + fragment: None, + }, + ), + connect_template: URLTemplate { + base: None, + path: [ + StringTemplate { + parts: [ + Constant( + Constant { + value: "t", + location: 1..2, + }, + ), + ], + }, + StringTemplate { + parts: [ + Expression( + Expression { + expression: Path( + PathSelection { + path: WithRange { + node: Var( + WithRange { + node: $args, + range: Some( + 0..5, + ), + }, + WithRange { + node: Key( + WithRange { + node: Field( + "id", + ), + range: Some( + 6..8, + ), + }, + WithRange { + node: Empty, + range: Some( + 8..8, + ), + }, + ), + range: Some( + 5..8, + ), + }, + ), + range: Some( + 0..8, + ), + }, + }, + ), + location: 4..12, + }, + ), + ], + }, + ], + query: [], + }, + method: Get, + headers: {}, + body: None, + }, + selection: Named( + SubSelection { + selections: [ + Field( + None, + WithRange { + node: Field( + "id", + ), + range: Some( + 0..2, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "tagged", + ), + range: Some( + 3..9, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "hidden", + ), + range: Some( + 10..16, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "custom", + ), + range: Some( + 17..23, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "authenticated", + ), + range: Some( + 24..37, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "requiresScopes", + ), + range: Some( + 38..52, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "policy", + ), + range: Some( + 53..59, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "overridden", + ), + range: Some( + 60..70, + ), + }, + None, + ), + ], + range: Some( + 0..70, + ), + }, + ), + config: None, + max_requests: None, + entity_resolver: Some( + Explicit, + ), + spec: V0_1, + request_variables: { + $args, + }, + response_variables: {}, + }, + "one_T_r_0": Connector { + id: ConnectId { + label: "one.json http: GET /t/{$this.id}/r", + subgraph_name: "one", + source_name: Some( + "json", + ), + directive: ObjectOrInterfaceFieldDirectivePosition { + field: Object(T.r), + directive_name: "connect", + directive_index: 0, + }, + }, + transport: HttpJsonTransport { + source_url: Some( + Url { + scheme: "http", + cannot_be_a_base: false, + username: "", + password: None, + host: Some( + Domain( + "example", + ), + ), + port: None, + path: "/", + query: None, + fragment: None, + }, + ), + connect_template: URLTemplate { + base: None, + path: [ + StringTemplate { + parts: [ + Constant( + Constant { + value: "t", + location: 1..2, + }, + ), + ], + }, + StringTemplate { + parts: [ + Expression( + Expression { + expression: Path( + PathSelection { + path: WithRange { + node: Var( + WithRange { + node: $this, + range: Some( + 0..5, + ), + }, + WithRange { + node: Key( + WithRange { + node: Field( + "id", + ), + range: Some( + 6..8, + ), + }, + WithRange { + node: Empty, + range: Some( + 8..8, + ), + }, + ), + range: Some( + 5..8, + ), + }, + ), + range: Some( + 0..8, + ), + }, + }, + ), + location: 4..12, + }, + ), + ], + }, + StringTemplate { + parts: [ + Constant( + Constant { + value: "r", + location: 14..15, + }, + ), + ], + }, + ], + query: [], + }, + method: Get, + headers: {}, + body: None, + }, + selection: Named( + SubSelection { + selections: [ + Field( + None, + WithRange { + node: Field( + "id", + ), + range: Some( + 0..2, + ), + }, + None, + ), + ], + range: Some( + 0..2, + ), + }, + ), + config: None, + max_requests: None, + entity_resolver: Some( + Implicit, + ), + spec: V0_1, + request_variables: { + $this, + }, + response_variables: {}, + }, +} diff --git a/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@carryover.graphql-3.snap b/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@carryover.graphql-3.snap new file mode 100644 index 0000000000..3e74c1b284 --- /dev/null +++ b/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@carryover.graphql-3.snap @@ -0,0 +1,84 @@ +--- +source: apollo-federation/src/sources/connect/expand/tests/mod.rs +expression: raw_sdl +input_file: apollo-federation/src/sources/connect/expand/tests/schemas/expand/carryover.graphql +--- +schema @link(url: "https://specs.apollo.dev/link/v1.0") @link(url: "https://specs.apollo.dev/join/v0.3", for: EXECUTION) @link(url: "https://specs.apollo.dev/inaccessible/v0.2", for: SECURITY) @join__directive(graphs: [], name: "link", args: {url: "https://specs.apollo.dev/connect/v0.1"}) @link(url: "https://specs.apollo.dev/tag/v0.3") @link(url: "https://specs.apollo.dev/authenticated/v0.1", for: SECURITY) @link(url: "https://specs.apollo.dev/requiresScopes/v0.1", for: SECURITY) @link(url: "https://specs.apollo.dev/policy/v0.1", for: SECURITY) @link(url: "http://specs.example.org/custom/v0.1", import: ["@custom"]) { + query: Query +} + +directive @link(url: String, as: String, for: link__Purpose, import: [link__Import]) repeatable on SCHEMA + +directive @join__graph(name: String!, url: String!) on ENUM_VALUE + +directive @join__type(graph: join__Graph!, key: join__FieldSet, extension: Boolean! = false, resolvable: Boolean! = true, isInterfaceObject: Boolean! = false) repeatable on ENUM | INPUT_OBJECT | INTERFACE | OBJECT | SCALAR | UNION + +directive @join__field(graph: join__Graph, requires: join__FieldSet, provides: join__FieldSet, type: String, external: Boolean, override: String, overrideLabel: String, usedOverridden: Boolean) repeatable on FIELD_DEFINITION | INPUT_FIELD_DEFINITION + +directive @join__implements(graph: join__Graph!, interface: String!) repeatable on INTERFACE | OBJECT + +directive @join__unionMember(graph: join__Graph!, member: String!) repeatable on UNION + +directive @join__enumValue(graph: join__Graph!) repeatable on ENUM_VALUE + +directive @join__directive(graphs: [join__Graph!], name: String!, args: join__DirectiveArguments!) repeatable on SCHEMA | OBJECT | INTERFACE | FIELD_DEFINITION + +directive @inaccessible on FIELD_DEFINITION | OBJECT | INTERFACE | UNION | ARGUMENT_DEFINITION | SCALAR | ENUM | ENUM_VALUE | INPUT_OBJECT | INPUT_FIELD_DEFINITION + +directive @tag(name: String!) repeatable on FIELD_DEFINITION | OBJECT | INTERFACE | UNION | ARGUMENT_DEFINITION | SCALAR | ENUM | ENUM_VALUE | INPUT_OBJECT | INPUT_FIELD_DEFINITION | SCHEMA + +directive @authenticated on FIELD_DEFINITION | OBJECT | INTERFACE | SCALAR | ENUM + +directive @requiresScopes(scopes: [[requiresScopes__Scope!]!]!) on FIELD_DEFINITION | OBJECT | INTERFACE | SCALAR | ENUM + +directive @policy(policies: [[policy__Policy!]!]!) on FIELD_DEFINITION | OBJECT | INTERFACE | SCALAR | ENUM + +directive @custom on OBJECT | FIELD_DEFINITION + +enum link__Purpose { + """ + SECURITY features provide metadata necessary to securely resolve fields. + """ + SECURITY + """EXECUTION features provide metadata necessary for operation execution.""" + EXECUTION +} + +scalar link__Import + +scalar join__FieldSet + +scalar join__DirectiveArguments + +enum join__Graph { + ONE_QUERY_T_0 @join__graph(name: "one_Query_t_0", url: "none") + ONE_QUERY_TS_0 @join__graph(name: "one_Query_ts_0", url: "none") + ONE_T_R_0 @join__graph(name: "one_T_r_0", url: "none") + TWO @join__graph(name: "two", url: "none") +} + +type T @join__type(graph: ONE_QUERY_T_0, key: "id") @join__type(graph: ONE_QUERY_TS_0) @join__type(graph: ONE_T_R_0, key: "id") @join__type(graph: TWO, key: "id") { + authenticated: String @join__field(graph: ONE_QUERY_T_0, type: "String") @join__field(graph: ONE_QUERY_TS_0, type: "String") @authenticated + custom: String @join__field(graph: ONE_QUERY_T_0, type: "String") @join__field(graph: ONE_QUERY_TS_0, type: "String") @custom + hidden: String @join__field(graph: ONE_QUERY_T_0, type: "String") @join__field(graph: ONE_QUERY_TS_0, type: "String") @inaccessible + id: ID! @join__field(graph: ONE_QUERY_T_0, type: "ID!") @join__field(graph: ONE_QUERY_TS_0, type: "ID!") @join__field(graph: ONE_T_R_0, type: "ID!") @join__field(graph: TWO, type: "ID!") + overridden: String @join__field(graph: ONE_QUERY_T_0, override: "two", overrideLabel: "label", type: "String") @join__field(graph: ONE_QUERY_TS_0, override: "two", overrideLabel: "label", type: "String") @join__field(graph: TWO, type: "String") + policy: String @join__field(graph: ONE_QUERY_T_0, type: "String") @join__field(graph: ONE_QUERY_TS_0, type: "String") @policy(policies: [["admin"]]) + requiresScopes: String @join__field(graph: ONE_QUERY_T_0, type: "String") @join__field(graph: ONE_QUERY_TS_0, type: "String") @requiresScopes(scopes: ["scope"]) + tagged: String @join__field(graph: ONE_QUERY_T_0, type: "String") @join__field(graph: ONE_QUERY_TS_0, type: "String") @tag(name: "tag") + r: R @join__field(graph: ONE_T_R_0, type: "R") +} + +type Query @join__type(graph: ONE_QUERY_T_0) @join__type(graph: ONE_QUERY_TS_0) @join__type(graph: ONE_T_R_0) @join__type(graph: TWO) { + t(id: ID): T @join__field(graph: ONE_QUERY_T_0, type: "T") + ts: [T] @join__field(graph: ONE_QUERY_TS_0, type: "[T]") + _: ID @inaccessible @join__field(graph: ONE_T_R_0, type: "ID") +} + +type R @join__type(graph: ONE_T_R_0) { + id: ID! @join__field(graph: ONE_T_R_0, type: "ID!") +} + +scalar requiresScopes__Scope + +scalar policy__Policy diff --git a/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@carryover.graphql.snap b/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@carryover.graphql.snap new file mode 100644 index 0000000000..93c1cd5a8b --- /dev/null +++ b/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@carryover.graphql.snap @@ -0,0 +1,26 @@ +--- +source: apollo-federation/src/sources/connect/expand/tests/mod.rs +expression: api_schema +input_file: apollo-federation/src/sources/connect/expand/tests/schemas/expand/carryover.graphql +--- +directive @defer(label: String, if: Boolean! = true) on FRAGMENT_SPREAD | INLINE_FRAGMENT + +type Query { + ts: [T] + t(id: ID): T +} + +type R { + id: ID! +} + +type T { + id: ID! + tagged: String + custom: String + authenticated: String + requiresScopes: String + policy: String + overridden: String + r: R +} diff --git a/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@circular.graphql-2.snap b/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@circular.graphql-2.snap new file mode 100644 index 0000000000..28d0ba77f0 --- /dev/null +++ b/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@circular.graphql-2.snap @@ -0,0 +1,184 @@ +--- +source: apollo-federation/src/sources/connect/expand/tests/mod.rs +expression: connectors.by_service_name +input_file: apollo-federation/src/sources/connect/expand/tests/schemas/expand/circular.graphql +--- +{ + "one_Query_t_0": Connector { + id: ConnectId { + label: "one.json http: GET /t/{$args.id}", + subgraph_name: "one", + source_name: Some( + "json", + ), + directive: ObjectOrInterfaceFieldDirectivePosition { + field: Object(Query.t), + directive_name: "connect", + directive_index: 0, + }, + }, + transport: HttpJsonTransport { + source_url: Some( + Url { + scheme: "http", + cannot_be_a_base: false, + username: "", + password: None, + host: Some( + Domain( + "example", + ), + ), + port: None, + path: "/", + query: None, + fragment: None, + }, + ), + connect_template: URLTemplate { + base: None, + path: [ + Component { + parts: [ + Text( + "t", + ), + ], + }, + Component { + parts: [ + Var( + Variable { + var_type: Args, + path: "id", + location: 4..12, + }, + ), + ], + }, + ], + query: {}, + }, + method: Get, + headers: {}, + body: None, + }, + selection: Named( + SubSelection { + selections: [ + Field( + None, + WithRange { + node: Field( + "id", + ), + range: Some( + 0..2, + ), + }, + None, + ), + ], + range: Some( + 0..2, + ), + }, + ), + config: None, + max_requests: None, + entity_resolver: None, + }, + "one_T_ts_0": Connector { + id: ConnectId { + label: "one.json http: GET /t/{$this.id}/ts", + subgraph_name: "one", + source_name: Some( + "json", + ), + directive: ObjectOrInterfaceFieldDirectivePosition { + field: Object(T.ts), + directive_name: "connect", + directive_index: 0, + }, + }, + transport: HttpJsonTransport { + source_url: Some( + Url { + scheme: "http", + cannot_be_a_base: false, + username: "", + password: None, + host: Some( + Domain( + "example", + ), + ), + port: None, + path: "/", + query: None, + fragment: None, + }, + ), + connect_template: URLTemplate { + base: None, + path: [ + Component { + parts: [ + Text( + "t", + ), + ], + }, + Component { + parts: [ + Var( + Variable { + var_type: This, + path: "id", + location: 4..12, + }, + ), + ], + }, + Component { + parts: [ + Text( + "ts", + ), + ], + }, + ], + query: {}, + }, + method: Get, + headers: {}, + body: None, + }, + selection: Named( + SubSelection { + selections: [ + Field( + None, + WithRange { + node: Field( + "id", + ), + range: Some( + 0..2, + ), + }, + None, + ), + ], + range: Some( + 0..2, + ), + }, + ), + config: None, + max_requests: None, + entity_resolver: Some( + Implicit, + ), + }, +} diff --git a/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@circular.graphql-3.snap b/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@circular.graphql-3.snap new file mode 100644 index 0000000000..105c2c450d --- /dev/null +++ b/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@circular.graphql-3.snap @@ -0,0 +1,56 @@ +--- +source: apollo-federation/src/sources/connect/expand/tests/mod.rs +expression: raw_sdl +input_file: apollo-federation/src/sources/connect/expand/tests/schemas/expand/circular.graphql +--- +schema @link(url: "https://specs.apollo.dev/link/v1.0") @link(url: "https://specs.apollo.dev/join/v0.3", for: EXECUTION) @link(url: "https://specs.apollo.dev/inaccessible/v0.2", for: SECURITY) @join__directive(graphs: [], name: "link", args: {url: "https://specs.apollo.dev/connect/v0.1"}) { + query: Query +} + +directive @link(url: String, as: String, for: link__Purpose, import: [link__Import]) repeatable on SCHEMA + +directive @join__graph(name: String!, url: String!) on ENUM_VALUE + +directive @join__type(graph: join__Graph!, key: join__FieldSet, extension: Boolean! = false, resolvable: Boolean! = true, isInterfaceObject: Boolean! = false) repeatable on ENUM | INPUT_OBJECT | INTERFACE | OBJECT | SCALAR | UNION + +directive @join__field(graph: join__Graph, requires: join__FieldSet, provides: join__FieldSet, type: String, external: Boolean, override: String, overrideLabel: String, usedOverridden: Boolean) repeatable on FIELD_DEFINITION | INPUT_FIELD_DEFINITION + +directive @join__implements(graph: join__Graph!, interface: String!) repeatable on INTERFACE | OBJECT + +directive @join__unionMember(graph: join__Graph!, member: String!) repeatable on UNION + +directive @join__enumValue(graph: join__Graph!) repeatable on ENUM_VALUE + +directive @join__directive(graphs: [join__Graph!], name: String!, args: join__DirectiveArguments!) repeatable on SCHEMA | OBJECT | INTERFACE | FIELD_DEFINITION + +directive @inaccessible on FIELD_DEFINITION | OBJECT | INTERFACE | UNION | ARGUMENT_DEFINITION | SCALAR | ENUM | ENUM_VALUE | INPUT_OBJECT | INPUT_FIELD_DEFINITION + +enum link__Purpose { + """ + SECURITY features provide metadata necessary to securely resolve fields. + """ + SECURITY + """EXECUTION features provide metadata necessary for operation execution.""" + EXECUTION +} + +scalar link__Import + +scalar join__FieldSet + +scalar join__DirectiveArguments + +enum join__Graph { + ONE_QUERY_T_0 @join__graph(name: "one_Query_t_0", url: "none") + ONE_T_TS_0 @join__graph(name: "one_T_ts_0", url: "none") +} + +type T @join__type(graph: ONE_QUERY_T_0) @join__type(graph: ONE_T_TS_0, key: "id") { + id: ID! @join__field(graph: ONE_QUERY_T_0) @join__field(graph: ONE_T_TS_0) + ts: [T] @join__field(graph: ONE_T_TS_0) +} + +type Query @join__type(graph: ONE_QUERY_T_0) @join__type(graph: ONE_T_TS_0) { + t(id: ID): T @join__field(graph: ONE_QUERY_T_0) + _: ID @inaccessible @join__field(graph: ONE_T_TS_0) +} diff --git a/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@circular.graphql.snap b/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@circular.graphql.snap new file mode 100644 index 0000000000..57a6cbb857 --- /dev/null +++ b/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@circular.graphql.snap @@ -0,0 +1,15 @@ +--- +source: apollo-federation/src/sources/connect/expand/tests/mod.rs +expression: api_schema +input_file: apollo-federation/src/sources/connect/expand/tests/schemas/expand/circular.graphql +--- +directive @defer(label: String, if: Boolean! = true) on FRAGMENT_SPREAD | INLINE_FRAGMENT + +type Query { + t(id: ID): T +} + +type T { + id: ID! + ts: [T] +} diff --git a/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@interface-object.graphql-2.snap b/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@interface-object.graphql-2.snap new file mode 100644 index 0000000000..edd2b57f5f --- /dev/null +++ b/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@interface-object.graphql-2.snap @@ -0,0 +1,400 @@ +--- +source: apollo-federation/src/sources/connect/expand/tests/mod.rs +expression: connectors.by_service_name +input_file: apollo-federation/src/sources/connect/expand/tests/schemas/expand/interface-object.graphql +--- +{ + "connectors_Itf_e_0": Connector { + id: ConnectId { + label: "connectors.json http: GET /itfs/{$this.id}/e", + subgraph_name: "connectors", + source_name: Some( + "json", + ), + directive: ObjectOrInterfaceFieldDirectivePosition { + field: Object(Itf.e), + directive_name: "connect", + directive_index: 0, + }, + }, + transport: HttpJsonTransport { + source_url: Some( + Url { + scheme: "http", + cannot_be_a_base: false, + username: "", + password: None, + host: Some( + Domain( + "localhost", + ), + ), + port: Some( + 4001, + ), + path: "/", + query: None, + fragment: None, + }, + ), + connect_template: URLTemplate { + base: None, + path: [ + StringTemplate { + parts: [ + Constant( + Constant { + value: "itfs", + location: 1..5, + }, + ), + ], + }, + StringTemplate { + parts: [ + Expression( + Expression { + expression: Path( + PathSelection { + path: WithRange { + node: Var( + WithRange { + node: $this, + range: Some( + 0..5, + ), + }, + WithRange { + node: Key( + WithRange { + node: Field( + "id", + ), + range: Some( + 6..8, + ), + }, + WithRange { + node: Empty, + range: Some( + 8..8, + ), + }, + ), + range: Some( + 5..8, + ), + }, + ), + range: Some( + 0..8, + ), + }, + }, + ), + location: 7..15, + }, + ), + ], + }, + StringTemplate { + parts: [ + Constant( + Constant { + value: "e", + location: 17..18, + }, + ), + ], + }, + ], + query: [], + }, + method: Get, + headers: {}, + body: None, + }, + selection: Path( + PathSelection { + path: WithRange { + node: Var( + WithRange { + node: $, + range: Some( + 0..1, + ), + }, + WithRange { + node: Empty, + range: Some( + 1..1, + ), + }, + ), + range: Some( + 0..1, + ), + }, + }, + ), + config: None, + max_requests: None, + entity_resolver: Some( + Implicit, + ), + spec: V0_1, + request_variables: { + $this, + }, + response_variables: {}, + }, + "connectors_Query_itfs_0": Connector { + id: ConnectId { + label: "connectors.json http: GET /itfs", + subgraph_name: "connectors", + source_name: Some( + "json", + ), + directive: ObjectOrInterfaceFieldDirectivePosition { + field: Object(Query.itfs), + directive_name: "connect", + directive_index: 0, + }, + }, + transport: HttpJsonTransport { + source_url: Some( + Url { + scheme: "http", + cannot_be_a_base: false, + username: "", + password: None, + host: Some( + Domain( + "localhost", + ), + ), + port: Some( + 4001, + ), + path: "/", + query: None, + fragment: None, + }, + ), + connect_template: URLTemplate { + base: None, + path: [ + StringTemplate { + parts: [ + Constant( + Constant { + value: "itfs", + location: 1..5, + }, + ), + ], + }, + ], + query: [], + }, + method: Get, + headers: {}, + body: None, + }, + selection: Named( + SubSelection { + selections: [ + Field( + None, + WithRange { + node: Field( + "id", + ), + range: Some( + 0..2, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "c", + ), + range: Some( + 3..4, + ), + }, + None, + ), + ], + range: Some( + 0..4, + ), + }, + ), + config: None, + max_requests: None, + entity_resolver: None, + spec: V0_1, + request_variables: {}, + response_variables: {}, + }, + "connectors_Query_itf_0": Connector { + id: ConnectId { + label: "connectors.json http: GET /itfs/{$args.id}", + subgraph_name: "connectors", + source_name: Some( + "json", + ), + directive: ObjectOrInterfaceFieldDirectivePosition { + field: Object(Query.itf), + directive_name: "connect", + directive_index: 0, + }, + }, + transport: HttpJsonTransport { + source_url: Some( + Url { + scheme: "http", + cannot_be_a_base: false, + username: "", + password: None, + host: Some( + Domain( + "localhost", + ), + ), + port: Some( + 4001, + ), + path: "/", + query: None, + fragment: None, + }, + ), + connect_template: URLTemplate { + base: None, + path: [ + StringTemplate { + parts: [ + Constant( + Constant { + value: "itfs", + location: 1..5, + }, + ), + ], + }, + StringTemplate { + parts: [ + Expression( + Expression { + expression: Path( + PathSelection { + path: WithRange { + node: Var( + WithRange { + node: $args, + range: Some( + 0..5, + ), + }, + WithRange { + node: Key( + WithRange { + node: Field( + "id", + ), + range: Some( + 6..8, + ), + }, + WithRange { + node: Empty, + range: Some( + 8..8, + ), + }, + ), + range: Some( + 5..8, + ), + }, + ), + range: Some( + 0..8, + ), + }, + }, + ), + location: 7..15, + }, + ), + ], + }, + ], + query: [], + }, + method: Get, + headers: {}, + body: None, + }, + selection: Named( + SubSelection { + selections: [ + Field( + None, + WithRange { + node: Field( + "id", + ), + range: Some( + 0..2, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "c", + ), + range: Some( + 3..4, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "d", + ), + range: Some( + 5..6, + ), + }, + None, + ), + ], + range: Some( + 0..6, + ), + }, + ), + config: None, + max_requests: None, + entity_resolver: Some( + Explicit, + ), + spec: V0_1, + request_variables: { + $args, + }, + response_variables: {}, + }, +} diff --git a/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@interface-object.graphql-3.snap b/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@interface-object.graphql-3.snap new file mode 100644 index 0000000000..922b9b0d8a --- /dev/null +++ b/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@interface-object.graphql-3.snap @@ -0,0 +1,77 @@ +--- +source: apollo-federation/src/sources/connect/expand/tests/mod.rs +expression: raw_sdl +input_file: apollo-federation/src/sources/connect/expand/tests/schemas/expand/interface-object.graphql +--- +schema @link(url: "https://specs.apollo.dev/link/v1.0") @link(url: "https://specs.apollo.dev/join/v0.3", for: EXECUTION) @link(url: "https://specs.apollo.dev/inaccessible/v0.2", for: SECURITY) @join__directive(graphs: [], name: "link", args: {url: "https://specs.apollo.dev/connect/v0.1"}) { + query: Query +} + +directive @link(url: String, as: String, for: link__Purpose, import: [link__Import]) repeatable on SCHEMA + +directive @join__graph(name: String!, url: String!) on ENUM_VALUE + +directive @join__type(graph: join__Graph!, key: join__FieldSet, extension: Boolean! = false, resolvable: Boolean! = true, isInterfaceObject: Boolean! = false) repeatable on ENUM | INPUT_OBJECT | INTERFACE | OBJECT | SCALAR | UNION + +directive @join__field(graph: join__Graph, requires: join__FieldSet, provides: join__FieldSet, type: String, external: Boolean, override: String, overrideLabel: String, usedOverridden: Boolean) repeatable on FIELD_DEFINITION | INPUT_FIELD_DEFINITION + +directive @join__implements(graph: join__Graph!, interface: String!) repeatable on INTERFACE | OBJECT + +directive @join__unionMember(graph: join__Graph!, member: String!) repeatable on UNION + +directive @join__enumValue(graph: join__Graph!) repeatable on ENUM_VALUE + +directive @join__directive(graphs: [join__Graph!], name: String!, args: join__DirectiveArguments!) repeatable on SCHEMA | OBJECT | INTERFACE | FIELD_DEFINITION + +directive @inaccessible on FIELD_DEFINITION | OBJECT | INTERFACE | UNION | ARGUMENT_DEFINITION | SCALAR | ENUM | ENUM_VALUE | INPUT_OBJECT | INPUT_FIELD_DEFINITION + +enum link__Purpose { + """ + SECURITY features provide metadata necessary to securely resolve fields. + """ + SECURITY + """EXECUTION features provide metadata necessary for operation execution.""" + EXECUTION +} + +scalar link__Import + +scalar join__FieldSet + +scalar join__DirectiveArguments + +enum join__Graph { + CONNECTORS_ITF_E_0 @join__graph(name: "connectors_Itf_e_0", url: "none") + CONNECTORS_QUERY_ITF_0 @join__graph(name: "connectors_Query_itf_0", url: "none") + CONNECTORS_QUERY_ITFS_0 @join__graph(name: "connectors_Query_itfs_0", url: "none") + GRAPHQL @join__graph(name: "graphql", url: "none") +} + +interface Itf @join__type(graph: CONNECTORS_ITF_E_0, isInterfaceObject: true, key: "id") @join__type(graph: CONNECTORS_QUERY_ITF_0, isInterfaceObject: true, key: "id") @join__type(graph: CONNECTORS_QUERY_ITFS_0, isInterfaceObject: true, key: "id", resolvable: false) @join__type(graph: GRAPHQL, key: "id") { + e: String @join__field(graph: CONNECTORS_ITF_E_0, type: "String") + id: ID! @join__field(graph: CONNECTORS_ITF_E_0, type: "ID!") @join__field(graph: CONNECTORS_QUERY_ITF_0, type: "ID!") @join__field(graph: CONNECTORS_QUERY_ITFS_0, type: "ID!") @join__field(graph: GRAPHQL, type: "ID!") + c: Int! @join__field(graph: CONNECTORS_QUERY_ITF_0, type: "Int!") @join__field(graph: CONNECTORS_QUERY_ITFS_0, type: "Int!") + d: Int! @join__field(graph: CONNECTORS_QUERY_ITF_0, type: "Int!") +} + +type Query @join__type(graph: CONNECTORS_ITF_E_0) @join__type(graph: CONNECTORS_QUERY_ITF_0) @join__type(graph: CONNECTORS_QUERY_ITFS_0) @join__type(graph: GRAPHQL) { + _: ID @inaccessible @join__field(graph: CONNECTORS_ITF_E_0, type: "ID") + itf(id: ID!): Itf @join__field(graph: CONNECTORS_QUERY_ITF_0, type: "Itf") + itfs: [Itf] @join__field(graph: CONNECTORS_QUERY_ITFS_0, type: "[Itf]") +} + +type T1 implements Itf @join__type(graph: GRAPHQL, key: "id") @join__implements(graph: GRAPHQL, interface: "Itf") { + id: ID! @join__field(graph: GRAPHQL, type: "ID!") + a: String @join__field(graph: GRAPHQL, type: "String") + e: String @join__field + c: Int! @join__field + d: Int! @join__field +} + +type T2 implements Itf @join__type(graph: GRAPHQL, key: "id") @join__implements(graph: GRAPHQL, interface: "Itf") { + id: ID! @join__field(graph: GRAPHQL, type: "ID!") + b: String @join__field(graph: GRAPHQL, type: "String") + e: String @join__field + c: Int! @join__field + d: Int! @join__field +} diff --git a/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@interface-object.graphql.snap b/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@interface-object.graphql.snap new file mode 100644 index 0000000000..1834c8f2e5 --- /dev/null +++ b/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@interface-object.graphql.snap @@ -0,0 +1,34 @@ +--- +source: apollo-federation/src/sources/connect/expand/tests/mod.rs +expression: api_schema +input_file: apollo-federation/src/sources/connect/expand/tests/schemas/expand/interface-object.graphql +--- +directive @defer(label: String, if: Boolean! = true) on FRAGMENT_SPREAD | INLINE_FRAGMENT + +interface Itf { + id: ID! + c: Int! + d: Int! + e: String +} + +type Query { + itfs: [Itf] + itf(id: ID!): Itf +} + +type T1 implements Itf { + id: ID! + a: String + c: Int! + d: Int! + e: String +} + +type T2 implements Itf { + id: ID! + b: String + c: Int! + d: Int! + e: String +} diff --git a/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@keys.graphql-2.snap b/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@keys.graphql-2.snap new file mode 100644 index 0000000000..04f4c88382 --- /dev/null +++ b/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@keys.graphql-2.snap @@ -0,0 +1,1405 @@ +--- +source: apollo-federation/src/sources/connect/expand/tests/mod.rs +expression: connectors.by_service_name +input_file: apollo-federation/src/sources/connect/expand/tests/schemas/expand/keys.graphql +--- +{ + "one_Query_t_0": Connector { + id: ConnectId { + label: "one. http: GET http://localhost/ts/{$args.id}", + subgraph_name: "one", + source_name: None, + directive: ObjectOrInterfaceFieldDirectivePosition { + field: Object(Query.t), + directive_name: "connect", + directive_index: 0, + }, + }, + transport: HttpJsonTransport { + source_url: None, + connect_template: URLTemplate { + base: Some( + Url { + scheme: "http", + cannot_be_a_base: false, + username: "", + password: None, + host: Some( + Domain( + "localhost", + ), + ), + port: None, + path: "/", + query: None, + fragment: None, + }, + ), + path: [ + StringTemplate { + parts: [ + Constant( + Constant { + value: "ts", + location: 17..19, + }, + ), + ], + }, + StringTemplate { + parts: [ + Expression( + Expression { + expression: Path( + PathSelection { + path: WithRange { + node: Var( + WithRange { + node: $args, + range: Some( + 0..5, + ), + }, + WithRange { + node: Key( + WithRange { + node: Field( + "id", + ), + range: Some( + 6..8, + ), + }, + WithRange { + node: Empty, + range: Some( + 8..8, + ), + }, + ), + range: Some( + 5..8, + ), + }, + ), + range: Some( + 0..8, + ), + }, + }, + ), + location: 21..29, + }, + ), + ], + }, + ], + query: [], + }, + method: Get, + headers: {}, + body: None, + }, + selection: Named( + SubSelection { + selections: [ + Field( + None, + WithRange { + node: Field( + "id", + ), + range: Some( + 0..2, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "id2", + ), + range: Some( + 3..6, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "unselected", + ), + range: Some( + 7..17, + ), + }, + None, + ), + ], + range: Some( + 0..17, + ), + }, + ), + config: None, + max_requests: None, + entity_resolver: Some( + Explicit, + ), + spec: V0_1, + request_variables: { + $args, + }, + response_variables: {}, + }, + "one_Query_t2_0": Connector { + id: ConnectId { + label: "one. http: GET http://localhost/ts/{$args.id}?id2={$args.id2}", + subgraph_name: "one", + source_name: None, + directive: ObjectOrInterfaceFieldDirectivePosition { + field: Object(Query.t2), + directive_name: "connect", + directive_index: 0, + }, + }, + transport: HttpJsonTransport { + source_url: None, + connect_template: URLTemplate { + base: Some( + Url { + scheme: "http", + cannot_be_a_base: false, + username: "", + password: None, + host: Some( + Domain( + "localhost", + ), + ), + port: None, + path: "/", + query: None, + fragment: None, + }, + ), + path: [ + StringTemplate { + parts: [ + Constant( + Constant { + value: "ts", + location: 17..19, + }, + ), + ], + }, + StringTemplate { + parts: [ + Expression( + Expression { + expression: Path( + PathSelection { + path: WithRange { + node: Var( + WithRange { + node: $args, + range: Some( + 0..5, + ), + }, + WithRange { + node: Key( + WithRange { + node: Field( + "id", + ), + range: Some( + 6..8, + ), + }, + WithRange { + node: Empty, + range: Some( + 8..8, + ), + }, + ), + range: Some( + 5..8, + ), + }, + ), + range: Some( + 0..8, + ), + }, + }, + ), + location: 21..29, + }, + ), + ], + }, + ], + query: [ + ( + StringTemplate { + parts: [ + Constant( + Constant { + value: "id2", + location: 31..34, + }, + ), + ], + }, + StringTemplate { + parts: [ + Expression( + Expression { + expression: Path( + PathSelection { + path: WithRange { + node: Var( + WithRange { + node: $args, + range: Some( + 0..5, + ), + }, + WithRange { + node: Key( + WithRange { + node: Field( + "id2", + ), + range: Some( + 6..9, + ), + }, + WithRange { + node: Empty, + range: Some( + 9..9, + ), + }, + ), + range: Some( + 5..9, + ), + }, + ), + range: Some( + 0..9, + ), + }, + }, + ), + location: 36..45, + }, + ), + ], + }, + ), + ], + }, + method: Get, + headers: {}, + body: None, + }, + selection: Named( + SubSelection { + selections: [ + Field( + None, + WithRange { + node: Field( + "id", + ), + range: Some( + 0..2, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "id2", + ), + range: Some( + 3..6, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "unselected", + ), + range: Some( + 7..17, + ), + }, + None, + ), + ], + range: Some( + 0..17, + ), + }, + ), + config: None, + max_requests: None, + entity_resolver: Some( + Explicit, + ), + spec: V0_1, + request_variables: { + $args, + }, + response_variables: {}, + }, + "one_Query_unselected_0": Connector { + id: ConnectId { + label: "one. http: GET http://localhost/ts/{$args.unselected}", + subgraph_name: "one", + source_name: None, + directive: ObjectOrInterfaceFieldDirectivePosition { + field: Object(Query.unselected), + directive_name: "connect", + directive_index: 0, + }, + }, + transport: HttpJsonTransport { + source_url: None, + connect_template: URLTemplate { + base: Some( + Url { + scheme: "http", + cannot_be_a_base: false, + username: "", + password: None, + host: Some( + Domain( + "localhost", + ), + ), + port: None, + path: "/", + query: None, + fragment: None, + }, + ), + path: [ + StringTemplate { + parts: [ + Constant( + Constant { + value: "ts", + location: 17..19, + }, + ), + ], + }, + StringTemplate { + parts: [ + Expression( + Expression { + expression: Path( + PathSelection { + path: WithRange { + node: Var( + WithRange { + node: $args, + range: Some( + 0..5, + ), + }, + WithRange { + node: Key( + WithRange { + node: Field( + "unselected", + ), + range: Some( + 6..16, + ), + }, + WithRange { + node: Empty, + range: Some( + 16..16, + ), + }, + ), + range: Some( + 5..16, + ), + }, + ), + range: Some( + 0..16, + ), + }, + }, + ), + location: 21..37, + }, + ), + ], + }, + ], + query: [], + }, + method: Get, + headers: {}, + body: None, + }, + selection: Named( + SubSelection { + selections: [ + Field( + None, + WithRange { + node: Field( + "id", + ), + range: Some( + 0..2, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "id2", + ), + range: Some( + 3..6, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "accessibleByUnselected", + ), + range: Some( + 7..29, + ), + }, + None, + ), + ], + range: Some( + 0..29, + ), + }, + ), + config: None, + max_requests: None, + entity_resolver: Some( + Explicit, + ), + spec: V0_1, + request_variables: { + $args, + }, + response_variables: {}, + }, + "one_T_r1_0": Connector { + id: ConnectId { + label: "one. http: GET http://localhost/rs/{$this.id}", + subgraph_name: "one", + source_name: None, + directive: ObjectOrInterfaceFieldDirectivePosition { + field: Object(T.r1), + directive_name: "connect", + directive_index: 0, + }, + }, + transport: HttpJsonTransport { + source_url: None, + connect_template: URLTemplate { + base: Some( + Url { + scheme: "http", + cannot_be_a_base: false, + username: "", + password: None, + host: Some( + Domain( + "localhost", + ), + ), + port: None, + path: "/", + query: None, + fragment: None, + }, + ), + path: [ + StringTemplate { + parts: [ + Constant( + Constant { + value: "rs", + location: 17..19, + }, + ), + ], + }, + StringTemplate { + parts: [ + Expression( + Expression { + expression: Path( + PathSelection { + path: WithRange { + node: Var( + WithRange { + node: $this, + range: Some( + 0..5, + ), + }, + WithRange { + node: Key( + WithRange { + node: Field( + "id", + ), + range: Some( + 6..8, + ), + }, + WithRange { + node: Empty, + range: Some( + 8..8, + ), + }, + ), + range: Some( + 5..8, + ), + }, + ), + range: Some( + 0..8, + ), + }, + }, + ), + location: 21..29, + }, + ), + ], + }, + ], + query: [], + }, + method: Get, + headers: {}, + body: None, + }, + selection: Named( + SubSelection { + selections: [ + Field( + None, + WithRange { + node: Field( + "id", + ), + range: Some( + 0..2, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "id2", + ), + range: Some( + 3..6, + ), + }, + None, + ), + ], + range: Some( + 0..6, + ), + }, + ), + config: None, + max_requests: None, + entity_resolver: Some( + Implicit, + ), + spec: V0_1, + request_variables: { + $this, + }, + response_variables: {}, + }, + "one_T_r2_0": Connector { + id: ConnectId { + label: "one. http: GET http://localhost/rs/{$this.id}?id2={$this.id2}", + subgraph_name: "one", + source_name: None, + directive: ObjectOrInterfaceFieldDirectivePosition { + field: Object(T.r2), + directive_name: "connect", + directive_index: 0, + }, + }, + transport: HttpJsonTransport { + source_url: None, + connect_template: URLTemplate { + base: Some( + Url { + scheme: "http", + cannot_be_a_base: false, + username: "", + password: None, + host: Some( + Domain( + "localhost", + ), + ), + port: None, + path: "/", + query: None, + fragment: None, + }, + ), + path: [ + StringTemplate { + parts: [ + Constant( + Constant { + value: "rs", + location: 17..19, + }, + ), + ], + }, + StringTemplate { + parts: [ + Expression( + Expression { + expression: Path( + PathSelection { + path: WithRange { + node: Var( + WithRange { + node: $this, + range: Some( + 0..5, + ), + }, + WithRange { + node: Key( + WithRange { + node: Field( + "id", + ), + range: Some( + 6..8, + ), + }, + WithRange { + node: Empty, + range: Some( + 8..8, + ), + }, + ), + range: Some( + 5..8, + ), + }, + ), + range: Some( + 0..8, + ), + }, + }, + ), + location: 21..29, + }, + ), + ], + }, + ], + query: [ + ( + StringTemplate { + parts: [ + Constant( + Constant { + value: "id2", + location: 31..34, + }, + ), + ], + }, + StringTemplate { + parts: [ + Expression( + Expression { + expression: Path( + PathSelection { + path: WithRange { + node: Var( + WithRange { + node: $this, + range: Some( + 0..5, + ), + }, + WithRange { + node: Key( + WithRange { + node: Field( + "id2", + ), + range: Some( + 6..9, + ), + }, + WithRange { + node: Empty, + range: Some( + 9..9, + ), + }, + ), + range: Some( + 5..9, + ), + }, + ), + range: Some( + 0..9, + ), + }, + }, + ), + location: 36..45, + }, + ), + ], + }, + ), + ], + }, + method: Get, + headers: {}, + body: None, + }, + selection: Named( + SubSelection { + selections: [ + Field( + None, + WithRange { + node: Field( + "id", + ), + range: Some( + 0..2, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "id2", + ), + range: Some( + 3..6, + ), + }, + None, + ), + ], + range: Some( + 0..6, + ), + }, + ), + config: None, + max_requests: None, + entity_resolver: Some( + Implicit, + ), + spec: V0_1, + request_variables: { + $this, + }, + response_variables: {}, + }, + "one_T_r3_0": Connector { + id: ConnectId { + label: "one. http: GET http://localhost/rs/{$this.id}", + subgraph_name: "one", + source_name: None, + directive: ObjectOrInterfaceFieldDirectivePosition { + field: Object(T.r3), + directive_name: "connect", + directive_index: 0, + }, + }, + transport: HttpJsonTransport { + source_url: None, + connect_template: URLTemplate { + base: Some( + Url { + scheme: "http", + cannot_be_a_base: false, + username: "", + password: None, + host: Some( + Domain( + "localhost", + ), + ), + port: None, + path: "/", + query: None, + fragment: None, + }, + ), + path: [ + StringTemplate { + parts: [ + Constant( + Constant { + value: "rs", + location: 17..19, + }, + ), + ], + }, + StringTemplate { + parts: [ + Expression( + Expression { + expression: Path( + PathSelection { + path: WithRange { + node: Var( + WithRange { + node: $this, + range: Some( + 0..5, + ), + }, + WithRange { + node: Key( + WithRange { + node: Field( + "id", + ), + range: Some( + 6..8, + ), + }, + WithRange { + node: Empty, + range: Some( + 8..8, + ), + }, + ), + range: Some( + 5..8, + ), + }, + ), + range: Some( + 0..8, + ), + }, + }, + ), + location: 21..29, + }, + ), + ], + }, + ], + query: [], + }, + method: Get, + headers: {}, + body: None, + }, + selection: Named( + SubSelection { + selections: [ + Field( + None, + WithRange { + node: Field( + "id", + ), + range: Some( + 0..2, + ), + }, + None, + ), + Path { + alias: Some( + Alias { + name: WithRange { + node: Field( + "id2", + ), + range: Some( + 3..6, + ), + }, + range: Some( + 3..7, + ), + }, + ), + inline: false, + path: PathSelection { + path: WithRange { + node: Var( + WithRange { + node: $this, + range: Some( + 8..13, + ), + }, + WithRange { + node: Key( + WithRange { + node: Field( + "id2", + ), + range: Some( + 14..17, + ), + }, + WithRange { + node: Empty, + range: Some( + 17..17, + ), + }, + ), + range: Some( + 13..17, + ), + }, + ), + range: Some( + 8..17, + ), + }, + }, + }, + ], + range: Some( + 0..17, + ), + }, + ), + config: None, + max_requests: None, + entity_resolver: Some( + Implicit, + ), + spec: V0_1, + request_variables: { + $this, + }, + response_variables: { + $this, + }, + }, + "one_T_r4_0": Connector { + id: ConnectId { + label: "one. http: POST http://localhost/rs", + subgraph_name: "one", + source_name: None, + directive: ObjectOrInterfaceFieldDirectivePosition { + field: Object(T.r4), + directive_name: "connect", + directive_index: 0, + }, + }, + transport: HttpJsonTransport { + source_url: None, + connect_template: URLTemplate { + base: Some( + Url { + scheme: "http", + cannot_be_a_base: false, + username: "", + password: None, + host: Some( + Domain( + "localhost", + ), + ), + port: None, + path: "/", + query: None, + fragment: None, + }, + ), + path: [ + StringTemplate { + parts: [ + Constant( + Constant { + value: "rs", + location: 17..19, + }, + ), + ], + }, + ], + query: [], + }, + method: Post, + headers: {}, + body: Some( + Named( + SubSelection { + selections: [ + Path { + alias: Some( + Alias { + name: WithRange { + node: Field( + "id", + ), + range: Some( + 0..2, + ), + }, + range: Some( + 0..3, + ), + }, + ), + inline: false, + path: PathSelection { + path: WithRange { + node: Var( + WithRange { + node: $this, + range: Some( + 4..9, + ), + }, + WithRange { + node: Key( + WithRange { + node: Field( + "id", + ), + range: Some( + 10..12, + ), + }, + WithRange { + node: Empty, + range: Some( + 12..12, + ), + }, + ), + range: Some( + 9..12, + ), + }, + ), + range: Some( + 4..12, + ), + }, + }, + }, + ], + range: Some( + 0..12, + ), + }, + ), + ), + }, + selection: Named( + SubSelection { + selections: [ + Field( + None, + WithRange { + node: Field( + "id", + ), + range: Some( + 0..2, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "id2", + ), + range: Some( + 3..6, + ), + }, + None, + ), + ], + range: Some( + 0..6, + ), + }, + ), + config: None, + max_requests: None, + entity_resolver: Some( + Implicit, + ), + spec: V0_1, + request_variables: { + $this, + }, + response_variables: {}, + }, + "one_T_r5_0": Connector { + id: ConnectId { + label: "one. http: POST http://localhost/rs", + subgraph_name: "one", + source_name: None, + directive: ObjectOrInterfaceFieldDirectivePosition { + field: Object(T.r5), + directive_name: "connect", + directive_index: 0, + }, + }, + transport: HttpJsonTransport { + source_url: None, + connect_template: URLTemplate { + base: Some( + Url { + scheme: "http", + cannot_be_a_base: false, + username: "", + password: None, + host: Some( + Domain( + "localhost", + ), + ), + port: None, + path: "/", + query: None, + fragment: None, + }, + ), + path: [ + StringTemplate { + parts: [ + Constant( + Constant { + value: "rs", + location: 17..19, + }, + ), + ], + }, + ], + query: [], + }, + method: Post, + headers: {}, + body: Some( + Named( + SubSelection { + selections: [ + Path { + alias: Some( + Alias { + name: WithRange { + node: Field( + "id", + ), + range: Some( + 0..2, + ), + }, + range: Some( + 0..3, + ), + }, + ), + inline: false, + path: PathSelection { + path: WithRange { + node: Var( + WithRange { + node: $this, + range: Some( + 4..9, + ), + }, + WithRange { + node: Key( + WithRange { + node: Field( + "id", + ), + range: Some( + 10..12, + ), + }, + WithRange { + node: Empty, + range: Some( + 12..12, + ), + }, + ), + range: Some( + 9..12, + ), + }, + ), + range: Some( + 4..12, + ), + }, + }, + }, + ], + range: Some( + 0..12, + ), + }, + ), + ), + }, + selection: Named( + SubSelection { + selections: [ + Field( + None, + WithRange { + node: Field( + "id", + ), + range: Some( + 0..2, + ), + }, + None, + ), + Path { + alias: Some( + Alias { + name: WithRange { + node: Field( + "id2", + ), + range: Some( + 3..6, + ), + }, + range: Some( + 3..7, + ), + }, + ), + inline: false, + path: PathSelection { + path: WithRange { + node: Var( + WithRange { + node: $this, + range: Some( + 8..13, + ), + }, + WithRange { + node: Key( + WithRange { + node: Field( + "id2", + ), + range: Some( + 14..17, + ), + }, + WithRange { + node: Empty, + range: Some( + 17..17, + ), + }, + ), + range: Some( + 13..17, + ), + }, + ), + range: Some( + 8..17, + ), + }, + }, + }, + ], + range: Some( + 0..17, + ), + }, + ), + config: None, + max_requests: None, + entity_resolver: Some( + Implicit, + ), + spec: V0_1, + request_variables: { + $this, + }, + response_variables: { + $this, + }, + }, +} diff --git a/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@keys.graphql-3.snap b/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@keys.graphql-3.snap new file mode 100644 index 0000000000..fce100380e --- /dev/null +++ b/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@keys.graphql-3.snap @@ -0,0 +1,76 @@ +--- +source: apollo-federation/src/sources/connect/expand/tests/mod.rs +expression: raw_sdl +input_file: apollo-federation/src/sources/connect/expand/tests/schemas/expand/keys.graphql +--- +schema @link(url: "https://specs.apollo.dev/link/v1.0") @link(url: "https://specs.apollo.dev/join/v0.3", for: EXECUTION) @link(url: "https://specs.apollo.dev/inaccessible/v0.2", for: SECURITY) @join__directive(graphs: [], name: "link", args: {url: "https://specs.apollo.dev/connect/v0.1"}) { + query: Query +} + +directive @link(url: String, as: String, for: link__Purpose, import: [link__Import]) repeatable on SCHEMA + +directive @join__graph(name: String!, url: String!) on ENUM_VALUE + +directive @join__type(graph: join__Graph!, key: join__FieldSet, extension: Boolean! = false, resolvable: Boolean! = true, isInterfaceObject: Boolean! = false) repeatable on ENUM | INPUT_OBJECT | INTERFACE | OBJECT | SCALAR | UNION + +directive @join__field(graph: join__Graph, requires: join__FieldSet, provides: join__FieldSet, type: String, external: Boolean, override: String, overrideLabel: String, usedOverridden: Boolean) repeatable on FIELD_DEFINITION | INPUT_FIELD_DEFINITION + +directive @join__implements(graph: join__Graph!, interface: String!) repeatable on INTERFACE | OBJECT + +directive @join__unionMember(graph: join__Graph!, member: String!) repeatable on UNION + +directive @join__enumValue(graph: join__Graph!) repeatable on ENUM_VALUE + +directive @join__directive(graphs: [join__Graph!], name: String!, args: join__DirectiveArguments!) repeatable on SCHEMA | OBJECT | INTERFACE | FIELD_DEFINITION + +directive @inaccessible on FIELD_DEFINITION | OBJECT | INTERFACE | UNION | ARGUMENT_DEFINITION | SCALAR | ENUM | ENUM_VALUE | INPUT_OBJECT | INPUT_FIELD_DEFINITION + +enum link__Purpose { + """ + SECURITY features provide metadata necessary to securely resolve fields. + """ + SECURITY + """EXECUTION features provide metadata necessary for operation execution.""" + EXECUTION +} + +scalar link__Import + +scalar join__FieldSet + +scalar join__DirectiveArguments + +enum join__Graph { + ONE_QUERY_T2_0 @join__graph(name: "one_Query_t2_0", url: "none") + ONE_QUERY_T_0 @join__graph(name: "one_Query_t_0", url: "none") + ONE_QUERY_UNSELECTED_0 @join__graph(name: "one_Query_unselected_0", url: "none") + ONE_T_R1_0 @join__graph(name: "one_T_r1_0", url: "none") + ONE_T_R2_0 @join__graph(name: "one_T_r2_0", url: "none") + ONE_T_R3_0 @join__graph(name: "one_T_r3_0", url: "none") + ONE_T_R4_0 @join__graph(name: "one_T_r4_0", url: "none") + ONE_T_R5_0 @join__graph(name: "one_T_r5_0", url: "none") +} + +type T @join__type(graph: ONE_QUERY_T2_0, key: "id id2") @join__type(graph: ONE_QUERY_T_0, key: "id") @join__type(graph: ONE_QUERY_UNSELECTED_0, key: "unselected") @join__type(graph: ONE_T_R1_0, key: "id") @join__type(graph: ONE_T_R2_0, key: "id id2") @join__type(graph: ONE_T_R3_0, key: "id id2") @join__type(graph: ONE_T_R4_0, key: "id") @join__type(graph: ONE_T_R5_0, key: "id id2") { + id: ID! @join__field(graph: ONE_QUERY_T2_0, type: "ID!") @join__field(graph: ONE_QUERY_T_0, type: "ID!") @join__field(graph: ONE_QUERY_UNSELECTED_0, type: "ID!") @join__field(graph: ONE_T_R1_0, type: "ID!") @join__field(graph: ONE_T_R2_0, type: "ID!") @join__field(graph: ONE_T_R3_0, type: "ID!") @join__field(graph: ONE_T_R4_0, type: "ID!") @join__field(graph: ONE_T_R5_0, type: "ID!") + id2: ID! @join__field(graph: ONE_QUERY_T2_0, type: "ID!") @join__field(graph: ONE_QUERY_T_0, type: "ID!") @join__field(graph: ONE_QUERY_UNSELECTED_0, type: "ID!") @join__field(graph: ONE_T_R2_0, type: "ID!") @join__field(graph: ONE_T_R3_0, type: "ID!") @join__field(graph: ONE_T_R5_0, type: "ID!") + unselected: ID! @join__field(graph: ONE_QUERY_T2_0, type: "ID!") @join__field(graph: ONE_QUERY_T_0, type: "ID!") @join__field(graph: ONE_QUERY_UNSELECTED_0, type: "ID!") + accessibleByUnselected: ID! @join__field(graph: ONE_QUERY_UNSELECTED_0, type: "ID!") + r1: R @join__field(graph: ONE_T_R1_0, type: "R") + r2: R @join__field(graph: ONE_T_R2_0, type: "R") + r3: R @join__field(graph: ONE_T_R3_0, type: "R") + r4: R @join__field(graph: ONE_T_R4_0, type: "R") + r5: R @join__field(graph: ONE_T_R5_0, type: "R") +} + +type Query @join__type(graph: ONE_QUERY_T2_0) @join__type(graph: ONE_QUERY_T_0) @join__type(graph: ONE_QUERY_UNSELECTED_0) @join__type(graph: ONE_T_R1_0) @join__type(graph: ONE_T_R2_0) @join__type(graph: ONE_T_R3_0) @join__type(graph: ONE_T_R4_0) @join__type(graph: ONE_T_R5_0) { + t2(id: ID!, id2: ID!): T @join__field(graph: ONE_QUERY_T2_0, type: "T") + t(id: ID!): T @join__field(graph: ONE_QUERY_T_0, type: "T") + unselected(unselected: ID!): T @join__field(graph: ONE_QUERY_UNSELECTED_0, type: "T") + _: ID @inaccessible @join__field(graph: ONE_T_R1_0, type: "ID") @join__field(graph: ONE_T_R2_0, type: "ID") @join__field(graph: ONE_T_R3_0, type: "ID") @join__field(graph: ONE_T_R4_0, type: "ID") @join__field(graph: ONE_T_R5_0, type: "ID") +} + +type R @join__type(graph: ONE_T_R1_0) @join__type(graph: ONE_T_R2_0) @join__type(graph: ONE_T_R3_0) @join__type(graph: ONE_T_R4_0) @join__type(graph: ONE_T_R5_0) { + id: ID! @join__field(graph: ONE_T_R1_0, type: "ID!") @join__field(graph: ONE_T_R2_0, type: "ID!") @join__field(graph: ONE_T_R3_0, type: "ID!") @join__field(graph: ONE_T_R4_0, type: "ID!") @join__field(graph: ONE_T_R5_0, type: "ID!") + id2: ID! @join__field(graph: ONE_T_R1_0, type: "ID!") @join__field(graph: ONE_T_R2_0, type: "ID!") @join__field(graph: ONE_T_R3_0, type: "ID!") @join__field(graph: ONE_T_R4_0, type: "ID!") @join__field(graph: ONE_T_R5_0, type: "ID!") +} diff --git a/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@keys.graphql.snap b/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@keys.graphql.snap new file mode 100644 index 0000000000..67300fc0d2 --- /dev/null +++ b/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@keys.graphql.snap @@ -0,0 +1,30 @@ +--- +source: apollo-federation/src/sources/connect/expand/tests/mod.rs +expression: api_schema +input_file: apollo-federation/src/sources/connect/expand/tests/schemas/expand/keys.graphql +--- +directive @defer(label: String, if: Boolean! = true) on FRAGMENT_SPREAD | INLINE_FRAGMENT + +type Query { + t(id: ID!): T + t2(id: ID!, id2: ID!): T + " Uses the `unselected` field as a key, but doesn't select it " + unselected(unselected: ID!): T +} + +type R { + id: ID! + id2: ID! +} + +type T { + id: ID! + id2: ID! + unselected: ID! + accessibleByUnselected: ID! + r1: R + r2: R + r3: R + r4: R + r5: R +} diff --git a/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@nested_inputs.graphql-2.snap b/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@nested_inputs.graphql-2.snap new file mode 100644 index 0000000000..bd83daf098 --- /dev/null +++ b/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@nested_inputs.graphql-2.snap @@ -0,0 +1,266 @@ +--- +source: apollo-federation/src/sources/connect/expand/tests/mod.rs +expression: connectors.by_service_name +input_file: apollo-federation/src/sources/connect/expand/tests/schemas/expand/nested_inputs.graphql +--- +{ + "connectors_Query_foo_0": Connector { + id: ConnectId { + label: "connectors.example http: GET /{$args.bar}/{$args.doubleBaz.buzz}/{$args.baz.quux.quaz}", + subgraph_name: "connectors", + source_name: Some( + "example", + ), + directive: ObjectOrInterfaceFieldDirectivePosition { + field: Object(Query.foo), + directive_name: "connect", + directive_index: 0, + }, + }, + transport: HttpJsonTransport { + source_url: Some( + Url { + scheme: "http", + cannot_be_a_base: false, + username: "", + password: None, + host: Some( + Domain( + "example", + ), + ), + port: None, + path: "/", + query: None, + fragment: None, + }, + ), + connect_template: URLTemplate { + base: None, + path: [ + StringTemplate { + parts: [ + Expression( + Expression { + expression: Path( + PathSelection { + path: WithRange { + node: Var( + WithRange { + node: $args, + range: Some( + 0..5, + ), + }, + WithRange { + node: Key( + WithRange { + node: Field( + "bar", + ), + range: Some( + 6..9, + ), + }, + WithRange { + node: Empty, + range: Some( + 9..9, + ), + }, + ), + range: Some( + 5..9, + ), + }, + ), + range: Some( + 0..9, + ), + }, + }, + ), + location: 2..11, + }, + ), + ], + }, + StringTemplate { + parts: [ + Expression( + Expression { + expression: Path( + PathSelection { + path: WithRange { + node: Var( + WithRange { + node: $args, + range: Some( + 0..5, + ), + }, + WithRange { + node: Key( + WithRange { + node: Field( + "doubleBaz", + ), + range: Some( + 6..15, + ), + }, + WithRange { + node: Key( + WithRange { + node: Field( + "buzz", + ), + range: Some( + 16..20, + ), + }, + WithRange { + node: Empty, + range: Some( + 20..20, + ), + }, + ), + range: Some( + 15..20, + ), + }, + ), + range: Some( + 5..20, + ), + }, + ), + range: Some( + 0..20, + ), + }, + }, + ), + location: 14..34, + }, + ), + ], + }, + StringTemplate { + parts: [ + Expression( + Expression { + expression: Path( + PathSelection { + path: WithRange { + node: Var( + WithRange { + node: $args, + range: Some( + 0..5, + ), + }, + WithRange { + node: Key( + WithRange { + node: Field( + "baz", + ), + range: Some( + 6..9, + ), + }, + WithRange { + node: Key( + WithRange { + node: Field( + "quux", + ), + range: Some( + 10..14, + ), + }, + WithRange { + node: Key( + WithRange { + node: Field( + "quaz", + ), + range: Some( + 15..19, + ), + }, + WithRange { + node: Empty, + range: Some( + 19..19, + ), + }, + ), + range: Some( + 14..19, + ), + }, + ), + range: Some( + 9..19, + ), + }, + ), + range: Some( + 5..19, + ), + }, + ), + range: Some( + 0..19, + ), + }, + }, + ), + location: 37..56, + }, + ), + ], + }, + ], + query: [], + }, + method: Get, + headers: {}, + body: None, + }, + selection: Path( + PathSelection { + path: WithRange { + node: Var( + WithRange { + node: $, + range: Some( + 0..1, + ), + }, + WithRange { + node: Empty, + range: Some( + 1..1, + ), + }, + ), + range: Some( + 0..1, + ), + }, + }, + ), + config: None, + max_requests: None, + entity_resolver: None, + spec: V0_1, + request_variables: { + $args, + }, + response_variables: {}, + }, +} diff --git a/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@nested_inputs.graphql-3.snap b/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@nested_inputs.graphql-3.snap new file mode 100644 index 0000000000..a75f649988 --- /dev/null +++ b/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@nested_inputs.graphql-3.snap @@ -0,0 +1,56 @@ +--- +source: apollo-federation/src/sources/connect/expand/tests/mod.rs +expression: raw_sdl +input_file: apollo-federation/src/sources/connect/expand/tests/schemas/expand/nested_inputs.graphql +--- +schema @link(url: "https://specs.apollo.dev/link/v1.0") @link(url: "https://specs.apollo.dev/join/v0.3", for: EXECUTION) @join__directive(graphs: [], name: "link", args: {url: "https://specs.apollo.dev/connect/v0.1"}) { + query: Query +} + +directive @link(url: String, as: String, for: link__Purpose, import: [link__Import]) repeatable on SCHEMA + +directive @join__graph(name: String!, url: String!) on ENUM_VALUE + +directive @join__type(graph: join__Graph!, key: join__FieldSet, extension: Boolean! = false, resolvable: Boolean! = true, isInterfaceObject: Boolean! = false) repeatable on ENUM | INPUT_OBJECT | INTERFACE | OBJECT | SCALAR | UNION + +directive @join__field(graph: join__Graph, requires: join__FieldSet, provides: join__FieldSet, type: String, external: Boolean, override: String, overrideLabel: String, usedOverridden: Boolean) repeatable on FIELD_DEFINITION | INPUT_FIELD_DEFINITION + +directive @join__implements(graph: join__Graph!, interface: String!) repeatable on INTERFACE | OBJECT + +directive @join__unionMember(graph: join__Graph!, member: String!) repeatable on UNION + +directive @join__enumValue(graph: join__Graph!) repeatable on ENUM_VALUE + +directive @join__directive(graphs: [join__Graph!], name: String!, args: join__DirectiveArguments!) repeatable on SCHEMA | OBJECT | INTERFACE | FIELD_DEFINITION + +enum link__Purpose { + """ + SECURITY features provide metadata necessary to securely resolve fields. + """ + SECURITY + """EXECUTION features provide metadata necessary for operation execution.""" + EXECUTION +} + +scalar link__Import + +scalar join__FieldSet + +scalar join__DirectiveArguments + +enum join__Graph { + CONNECTORS_QUERY_FOO_0 @join__graph(name: "connectors_Query_foo_0", url: "none") +} + +input QuuxInput @join__type(graph: CONNECTORS_QUERY_FOO_0) { + quaz: String @join__field(graph: CONNECTORS_QUERY_FOO_0, type: "String") +} + +input BazInput @join__type(graph: CONNECTORS_QUERY_FOO_0) { + buzz: String @join__field(graph: CONNECTORS_QUERY_FOO_0, type: "String") + quux: QuuxInput @join__field(graph: CONNECTORS_QUERY_FOO_0, type: "QuuxInput") +} + +type Query @join__type(graph: CONNECTORS_QUERY_FOO_0) { + foo(bar: String, baz: BazInput, doubleBaz: BazInput): String @join__field(graph: CONNECTORS_QUERY_FOO_0, type: "String") +} diff --git a/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@nested_inputs.graphql.snap b/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@nested_inputs.graphql.snap new file mode 100644 index 0000000000..5347838d3f --- /dev/null +++ b/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@nested_inputs.graphql.snap @@ -0,0 +1,19 @@ +--- +source: apollo-federation/src/sources/connect/expand/tests/mod.rs +expression: api_schema +input_file: apollo-federation/src/sources/connect/expand/tests/schemas/expand/nested_inputs.graphql +--- +directive @defer(label: String, if: Boolean! = true) on FRAGMENT_SPREAD | INLINE_FRAGMENT + +input BazInput { + buzz: String + quux: QuuxInput +} + +type Query { + foo(bar: String, baz: BazInput, doubleBaz: BazInput): String +} + +input QuuxInput { + quaz: String +} diff --git a/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@normalize_names.graphql-2.snap b/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@normalize_names.graphql-2.snap new file mode 100644 index 0000000000..6318b65e64 --- /dev/null +++ b/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@normalize_names.graphql-2.snap @@ -0,0 +1,231 @@ +--- +source: apollo-federation/src/sources/connect/expand/tests/mod.rs +expression: connectors.by_service_name +input_file: apollo-federation/src/sources/connect/expand/tests/schemas/expand/normalize_names.graphql +--- +{ + "connectors-subgraph_Query_users_0": Connector { + id: ConnectId { + label: "connectors-subgraph.example http: GET ", + subgraph_name: "connectors-subgraph", + source_name: Some( + "example", + ), + directive: ObjectOrInterfaceFieldDirectivePosition { + field: Object(Query.users), + directive_name: "connect", + directive_index: 0, + }, + }, + transport: HttpJsonTransport { + source_url: Some( + Url { + scheme: "http", + cannot_be_a_base: false, + username: "", + password: None, + host: Some( + Domain( + "example", + ), + ), + port: None, + path: "/", + query: None, + fragment: None, + }, + ), + connect_template: URLTemplate { + base: None, + path: [], + query: [], + }, + method: Get, + headers: {}, + body: None, + }, + selection: Named( + SubSelection { + selections: [ + Field( + None, + WithRange { + node: Field( + "id", + ), + range: Some( + 0..2, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "a", + ), + range: Some( + 3..4, + ), + }, + None, + ), + ], + range: Some( + 0..4, + ), + }, + ), + config: None, + max_requests: None, + entity_resolver: None, + spec: V0_1, + request_variables: {}, + response_variables: {}, + }, + "connectors-subgraph_Query_user_0": Connector { + id: ConnectId { + label: "connectors-subgraph.example http: GET /{$args.id}", + subgraph_name: "connectors-subgraph", + source_name: Some( + "example", + ), + directive: ObjectOrInterfaceFieldDirectivePosition { + field: Object(Query.user), + directive_name: "connect", + directive_index: 0, + }, + }, + transport: HttpJsonTransport { + source_url: Some( + Url { + scheme: "http", + cannot_be_a_base: false, + username: "", + password: None, + host: Some( + Domain( + "example", + ), + ), + port: None, + path: "/", + query: None, + fragment: None, + }, + ), + connect_template: URLTemplate { + base: None, + path: [ + StringTemplate { + parts: [ + Expression( + Expression { + expression: Path( + PathSelection { + path: WithRange { + node: Var( + WithRange { + node: $args, + range: Some( + 0..5, + ), + }, + WithRange { + node: Key( + WithRange { + node: Field( + "id", + ), + range: Some( + 6..8, + ), + }, + WithRange { + node: Empty, + range: Some( + 8..8, + ), + }, + ), + range: Some( + 5..8, + ), + }, + ), + range: Some( + 0..8, + ), + }, + }, + ), + location: 2..10, + }, + ), + ], + }, + ], + query: [], + }, + method: Get, + headers: {}, + body: None, + }, + selection: Named( + SubSelection { + selections: [ + Field( + None, + WithRange { + node: Field( + "id", + ), + range: Some( + 0..2, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "a", + ), + range: Some( + 3..4, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "b", + ), + range: Some( + 5..6, + ), + }, + None, + ), + ], + range: Some( + 0..6, + ), + }, + ), + config: None, + max_requests: None, + entity_resolver: Some( + Explicit, + ), + spec: V0_1, + request_variables: { + $args, + }, + response_variables: {}, + }, +} diff --git a/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@normalize_names.graphql-3.snap b/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@normalize_names.graphql-3.snap new file mode 100644 index 0000000000..79be821174 --- /dev/null +++ b/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@normalize_names.graphql-3.snap @@ -0,0 +1,55 @@ +--- +source: apollo-federation/src/sources/connect/expand/tests/mod.rs +expression: raw_sdl +input_file: apollo-federation/src/sources/connect/expand/tests/schemas/expand/normalize_names.graphql +--- +schema @link(url: "https://specs.apollo.dev/link/v1.0") @link(url: "https://specs.apollo.dev/join/v0.3", for: EXECUTION) @join__directive(graphs: [], name: "link", args: {url: "https://specs.apollo.dev/connect/v0.1"}) { + query: Query +} + +directive @link(url: String, as: String, for: link__Purpose, import: [link__Import]) repeatable on SCHEMA + +directive @join__graph(name: String!, url: String!) on ENUM_VALUE + +directive @join__type(graph: join__Graph!, key: join__FieldSet, extension: Boolean! = false, resolvable: Boolean! = true, isInterfaceObject: Boolean! = false) repeatable on ENUM | INPUT_OBJECT | INTERFACE | OBJECT | SCALAR | UNION + +directive @join__field(graph: join__Graph, requires: join__FieldSet, provides: join__FieldSet, type: String, external: Boolean, override: String, overrideLabel: String, usedOverridden: Boolean) repeatable on FIELD_DEFINITION | INPUT_FIELD_DEFINITION + +directive @join__implements(graph: join__Graph!, interface: String!) repeatable on INTERFACE | OBJECT + +directive @join__unionMember(graph: join__Graph!, member: String!) repeatable on UNION + +directive @join__enumValue(graph: join__Graph!) repeatable on ENUM_VALUE + +directive @join__directive(graphs: [join__Graph!], name: String!, args: join__DirectiveArguments!) repeatable on SCHEMA | OBJECT | INTERFACE | FIELD_DEFINITION + +enum link__Purpose { + """ + SECURITY features provide metadata necessary to securely resolve fields. + """ + SECURITY + """EXECUTION features provide metadata necessary for operation execution.""" + EXECUTION +} + +scalar link__Import + +scalar join__FieldSet + +scalar join__DirectiveArguments + +enum join__Graph { + CONNECTORS_SUBGRAPH_QUERY_USER_0 @join__graph(name: "connectors-subgraph_Query_user_0", url: "none") + CONNECTORS_SUBGRAPH_QUERY_USERS_0 @join__graph(name: "connectors-subgraph_Query_users_0", url: "none") +} + +type User @join__type(graph: CONNECTORS_SUBGRAPH_QUERY_USER_0, key: "id") @join__type(graph: CONNECTORS_SUBGRAPH_QUERY_USERS_0) { + a: String @join__field(graph: CONNECTORS_SUBGRAPH_QUERY_USER_0, type: "String") @join__field(graph: CONNECTORS_SUBGRAPH_QUERY_USERS_0, type: "String") + b: String @join__field(graph: CONNECTORS_SUBGRAPH_QUERY_USER_0, type: "String") + id: ID! @join__field(graph: CONNECTORS_SUBGRAPH_QUERY_USER_0, type: "ID!") @join__field(graph: CONNECTORS_SUBGRAPH_QUERY_USERS_0, type: "ID!") +} + +type Query @join__type(graph: CONNECTORS_SUBGRAPH_QUERY_USER_0) @join__type(graph: CONNECTORS_SUBGRAPH_QUERY_USERS_0) { + user(id: ID!): User @join__field(graph: CONNECTORS_SUBGRAPH_QUERY_USER_0, type: "User") + users: [User] @join__field(graph: CONNECTORS_SUBGRAPH_QUERY_USERS_0, type: "[User]") +} diff --git a/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@normalize_names.graphql.snap b/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@normalize_names.graphql.snap new file mode 100644 index 0000000000..3dc9a09bc5 --- /dev/null +++ b/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@normalize_names.graphql.snap @@ -0,0 +1,17 @@ +--- +source: apollo-federation/src/sources/connect/expand/tests/mod.rs +expression: api_schema +input_file: apollo-federation/src/sources/connect/expand/tests/schemas/expand/normalize_names.graphql +--- +directive @defer(label: String, if: Boolean! = true) on FRAGMENT_SPREAD | INLINE_FRAGMENT + +type Query { + users: [User] + user(id: ID!): User +} + +type User { + id: ID! + a: String + b: String +} diff --git a/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@realistic.graphql-2.snap b/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@realistic.graphql-2.snap new file mode 100644 index 0000000000..91d300e6d0 --- /dev/null +++ b/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@realistic.graphql-2.snap @@ -0,0 +1,1043 @@ +--- +source: apollo-federation/src/sources/connect/expand/tests/mod.rs +expression: connectors.by_service_name +input_file: apollo-federation/src/sources/connect/expand/tests/schemas/expand/realistic.graphql +--- +{ + "connectors_Mutation_createUser_0": Connector { + id: ConnectId { + label: "connectors.example http: POST /create/user", + subgraph_name: "connectors", + source_name: Some( + "example", + ), + directive: ObjectOrInterfaceFieldDirectivePosition { + field: Object(Mutation.createUser), + directive_name: "connect", + directive_index: 0, + }, + }, + transport: HttpJsonTransport { + source_url: Some( + Url { + scheme: "http", + cannot_be_a_base: false, + username: "", + password: None, + host: Some( + Domain( + "example", + ), + ), + port: None, + path: "/", + query: None, + fragment: None, + }, + ), + connect_template: URLTemplate { + base: None, + path: [ + StringTemplate { + parts: [ + Constant( + Constant { + value: "create", + location: 1..7, + }, + ), + ], + }, + StringTemplate { + parts: [ + Constant( + Constant { + value: "user", + location: 8..12, + }, + ), + ], + }, + ], + query: [], + }, + method: Post, + headers: {}, + body: Some( + Path( + PathSelection { + path: WithRange { + node: Var( + WithRange { + node: $args, + range: Some( + 0..5, + ), + }, + WithRange { + node: Key( + WithRange { + node: Field( + "input", + ), + range: Some( + 6..11, + ), + }, + WithRange { + node: Selection( + SubSelection { + selections: [ + Field( + None, + WithRange { + node: Field( + "name", + ), + range: Some( + 14..18, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "username", + ), + range: Some( + 19..27, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "email", + ), + range: Some( + 28..33, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "status", + ), + range: Some( + 34..40, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "address", + ), + range: Some( + 41..48, + ), + }, + Some( + SubSelection { + selections: [ + Field( + None, + WithRange { + node: Field( + "street", + ), + range: Some( + 51..57, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "suite", + ), + range: Some( + 58..63, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "city", + ), + range: Some( + 64..68, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "zipcode", + ), + range: Some( + 69..76, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "geo", + ), + range: Some( + 77..80, + ), + }, + Some( + SubSelection { + selections: [ + Field( + None, + WithRange { + node: Field( + "lat", + ), + range: Some( + 83..86, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "lng", + ), + range: Some( + 87..90, + ), + }, + None, + ), + ], + range: Some( + 81..92, + ), + }, + ), + ), + ], + range: Some( + 49..94, + ), + }, + ), + ), + ], + range: Some( + 12..96, + ), + }, + ), + range: Some( + 12..96, + ), + }, + ), + range: Some( + 5..96, + ), + }, + ), + range: Some( + 0..96, + ), + }, + }, + ), + ), + }, + selection: Named( + SubSelection { + selections: [ + Field( + None, + WithRange { + node: Field( + "id", + ), + range: Some( + 0..2, + ), + }, + None, + ), + ], + range: Some( + 0..2, + ), + }, + ), + config: None, + max_requests: None, + entity_resolver: None, + spec: V0_1, + request_variables: { + $args, + }, + response_variables: {}, + }, + "connectors_Query_filterUsersByEmailDomain_0": Connector { + id: ConnectId { + label: "connectors.example http: GET /filter/users", + subgraph_name: "connectors", + source_name: Some( + "example", + ), + directive: ObjectOrInterfaceFieldDirectivePosition { + field: Object(Query.filterUsersByEmailDomain), + directive_name: "connect", + directive_index: 0, + }, + }, + transport: HttpJsonTransport { + source_url: Some( + Url { + scheme: "http", + cannot_be_a_base: false, + username: "", + password: None, + host: Some( + Domain( + "example", + ), + ), + port: None, + path: "/", + query: None, + fragment: None, + }, + ), + connect_template: URLTemplate { + base: None, + path: [ + StringTemplate { + parts: [ + Constant( + Constant { + value: "filter", + location: 1..7, + }, + ), + ], + }, + StringTemplate { + parts: [ + Constant( + Constant { + value: "users", + location: 8..13, + }, + ), + ], + }, + ], + query: [], + }, + method: Get, + headers: {}, + body: Some( + Named( + SubSelection { + selections: [ + Path { + alias: Some( + Alias { + name: WithRange { + node: Field( + "emailDomain", + ), + range: Some( + 0..11, + ), + }, + range: Some( + 0..12, + ), + }, + ), + inline: false, + path: PathSelection { + path: WithRange { + node: Var( + WithRange { + node: $args, + range: Some( + 13..18, + ), + }, + WithRange { + node: Key( + WithRange { + node: Field( + "email", + ), + range: Some( + 19..24, + ), + }, + WithRange { + node: Empty, + range: Some( + 24..24, + ), + }, + ), + range: Some( + 18..24, + ), + }, + ), + range: Some( + 13..24, + ), + }, + }, + }, + ], + range: Some( + 0..24, + ), + }, + ), + ), + }, + selection: Named( + SubSelection { + selections: [ + Field( + None, + WithRange { + node: Field( + "id", + ), + range: Some( + 0..2, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "name", + ), + range: Some( + 3..7, + ), + }, + None, + ), + ], + range: Some( + 0..7, + ), + }, + ), + config: None, + max_requests: None, + entity_resolver: None, + spec: V0_1, + request_variables: { + $args, + }, + response_variables: {}, + }, + "connectors_Query_usersByCompany_0": Connector { + id: ConnectId { + label: "connectors.example http: GET /by-company/{$args.company.name}", + subgraph_name: "connectors", + source_name: Some( + "example", + ), + directive: ObjectOrInterfaceFieldDirectivePosition { + field: Object(Query.usersByCompany), + directive_name: "connect", + directive_index: 0, + }, + }, + transport: HttpJsonTransport { + source_url: Some( + Url { + scheme: "http", + cannot_be_a_base: false, + username: "", + password: None, + host: Some( + Domain( + "example", + ), + ), + port: None, + path: "/", + query: None, + fragment: None, + }, + ), + connect_template: URLTemplate { + base: None, + path: [ + StringTemplate { + parts: [ + Constant( + Constant { + value: "by-company", + location: 1..11, + }, + ), + ], + }, + StringTemplate { + parts: [ + Expression( + Expression { + expression: Path( + PathSelection { + path: WithRange { + node: Var( + WithRange { + node: $args, + range: Some( + 0..5, + ), + }, + WithRange { + node: Key( + WithRange { + node: Field( + "company", + ), + range: Some( + 6..13, + ), + }, + WithRange { + node: Key( + WithRange { + node: Field( + "name", + ), + range: Some( + 14..18, + ), + }, + WithRange { + node: Empty, + range: Some( + 18..18, + ), + }, + ), + range: Some( + 13..18, + ), + }, + ), + range: Some( + 5..18, + ), + }, + ), + range: Some( + 0..18, + ), + }, + }, + ), + location: 13..31, + }, + ), + ], + }, + ], + query: [], + }, + method: Get, + headers: {}, + body: None, + }, + selection: Named( + SubSelection { + selections: [ + Field( + None, + WithRange { + node: Field( + "id", + ), + range: Some( + 0..2, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "name", + ), + range: Some( + 3..7, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "company", + ), + range: Some( + 8..15, + ), + }, + Some( + SubSelection { + selections: [ + Field( + None, + WithRange { + node: Field( + "name", + ), + range: Some( + 20..24, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "catchPhrase", + ), + range: Some( + 27..38, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "bs", + ), + range: Some( + 41..43, + ), + }, + None, + ), + ], + range: Some( + 16..45, + ), + }, + ), + ), + ], + range: Some( + 0..45, + ), + }, + ), + config: None, + max_requests: None, + entity_resolver: None, + spec: V0_1, + request_variables: { + $args, + }, + response_variables: {}, + }, + "connectors_Query_user_0": Connector { + id: ConnectId { + label: "connectors.example http: GET /{$args.id}", + subgraph_name: "connectors", + source_name: Some( + "example", + ), + directive: ObjectOrInterfaceFieldDirectivePosition { + field: Object(Query.user), + directive_name: "connect", + directive_index: 0, + }, + }, + transport: HttpJsonTransport { + source_url: Some( + Url { + scheme: "http", + cannot_be_a_base: false, + username: "", + password: None, + host: Some( + Domain( + "example", + ), + ), + port: None, + path: "/", + query: None, + fragment: None, + }, + ), + connect_template: URLTemplate { + base: None, + path: [ + StringTemplate { + parts: [ + Expression( + Expression { + expression: Path( + PathSelection { + path: WithRange { + node: Var( + WithRange { + node: $args, + range: Some( + 0..5, + ), + }, + WithRange { + node: Key( + WithRange { + node: Field( + "id", + ), + range: Some( + 6..8, + ), + }, + WithRange { + node: Empty, + range: Some( + 8..8, + ), + }, + ), + range: Some( + 5..8, + ), + }, + ), + range: Some( + 0..8, + ), + }, + }, + ), + location: 2..10, + }, + ), + ], + }, + ], + query: [], + }, + method: Get, + headers: {}, + body: None, + }, + selection: Named( + SubSelection { + selections: [ + Field( + None, + WithRange { + node: Field( + "id", + ), + range: Some( + 0..2, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "name", + ), + range: Some( + 3..7, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "username", + ), + range: Some( + 8..16, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "email", + ), + range: Some( + 17..22, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "address", + ), + range: Some( + 23..30, + ), + }, + Some( + SubSelection { + selections: [ + Field( + None, + WithRange { + node: Field( + "street", + ), + range: Some( + 35..41, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "suite", + ), + range: Some( + 44..49, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "city", + ), + range: Some( + 52..56, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "zipcode", + ), + range: Some( + 59..66, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "geo", + ), + range: Some( + 69..72, + ), + }, + Some( + SubSelection { + selections: [ + Field( + None, + WithRange { + node: Field( + "lat", + ), + range: Some( + 79..82, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "lng", + ), + range: Some( + 87..90, + ), + }, + None, + ), + ], + range: Some( + 73..94, + ), + }, + ), + ), + ], + range: Some( + 31..96, + ), + }, + ), + ), + Field( + None, + WithRange { + node: Field( + "phone", + ), + range: Some( + 97..102, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "website", + ), + range: Some( + 103..110, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "company", + ), + range: Some( + 111..118, + ), + }, + Some( + SubSelection { + selections: [ + Field( + None, + WithRange { + node: Field( + "name", + ), + range: Some( + 123..127, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "catchPhrase", + ), + range: Some( + 130..141, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "bs", + ), + range: Some( + 144..146, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "email", + ), + range: Some( + 149..154, + ), + }, + None, + ), + ], + range: Some( + 119..156, + ), + }, + ), + ), + ], + range: Some( + 0..156, + ), + }, + ), + config: None, + max_requests: None, + entity_resolver: Some( + Explicit, + ), + spec: V0_1, + request_variables: { + $args, + }, + response_variables: {}, + }, +} diff --git a/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@realistic.graphql-3.snap b/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@realistic.graphql-3.snap new file mode 100644 index 0000000000..cbf876497a --- /dev/null +++ b/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@realistic.graphql-3.snap @@ -0,0 +1,124 @@ +--- +source: apollo-federation/src/sources/connect/expand/tests/mod.rs +expression: raw_sdl +input_file: apollo-federation/src/sources/connect/expand/tests/schemas/expand/realistic.graphql +--- +schema @link(url: "https://specs.apollo.dev/link/v1.0") @link(url: "https://specs.apollo.dev/join/v0.3", for: EXECUTION) @link(url: "https://specs.apollo.dev/inaccessible/v0.2", for: SECURITY) @join__directive(graphs: [], name: "link", args: {url: "https://specs.apollo.dev/connect/v0.1"}) { + query: Query + mutation: Mutation +} + +directive @link(url: String, as: String, for: link__Purpose, import: [link__Import]) repeatable on SCHEMA + +directive @join__graph(name: String!, url: String!) on ENUM_VALUE + +directive @join__type(graph: join__Graph!, key: join__FieldSet, extension: Boolean! = false, resolvable: Boolean! = true, isInterfaceObject: Boolean! = false) repeatable on ENUM | INPUT_OBJECT | INTERFACE | OBJECT | SCALAR | UNION + +directive @join__field(graph: join__Graph, requires: join__FieldSet, provides: join__FieldSet, type: String, external: Boolean, override: String, overrideLabel: String, usedOverridden: Boolean) repeatable on FIELD_DEFINITION | INPUT_FIELD_DEFINITION + +directive @join__implements(graph: join__Graph!, interface: String!) repeatable on INTERFACE | OBJECT + +directive @join__unionMember(graph: join__Graph!, member: String!) repeatable on UNION + +directive @join__enumValue(graph: join__Graph!) repeatable on ENUM_VALUE + +directive @join__directive(graphs: [join__Graph!], name: String!, args: join__DirectiveArguments!) repeatable on SCHEMA | OBJECT | INTERFACE | FIELD_DEFINITION + +directive @inaccessible on FIELD_DEFINITION | OBJECT | INTERFACE | UNION | ARGUMENT_DEFINITION | SCALAR | ENUM | ENUM_VALUE | INPUT_OBJECT | INPUT_FIELD_DEFINITION + +enum link__Purpose { + """ + SECURITY features provide metadata necessary to securely resolve fields. + """ + SECURITY + """EXECUTION features provide metadata necessary for operation execution.""" + EXECUTION +} + +scalar link__Import + +scalar join__FieldSet + +scalar join__DirectiveArguments + +enum join__Graph { + CONNECTORS_MUTATION_CREATEUSER_0 @join__graph(name: "connectors_Mutation_createUser_0", url: "none") + CONNECTORS_QUERY_FILTERUSERSBYEMAILDOMAIN_0 @join__graph(name: "connectors_Query_filterUsersByEmailDomain_0", url: "none") + CONNECTORS_QUERY_USER_0 @join__graph(name: "connectors_Query_user_0", url: "none") + CONNECTORS_QUERY_USERSBYCOMPANY_0 @join__graph(name: "connectors_Query_usersByCompany_0", url: "none") +} + +scalar EmailAddress @join__type(graph: CONNECTORS_MUTATION_CREATEUSER_0) @join__type(graph: CONNECTORS_QUERY_FILTERUSERSBYEMAILDOMAIN_0) @join__type(graph: CONNECTORS_QUERY_USER_0) + +enum Status @join__type(graph: CONNECTORS_MUTATION_CREATEUSER_0) { + ACTIVE @join__enumValue(graph: CONNECTORS_MUTATION_CREATEUSER_0) + INACTIVE @join__enumValue(graph: CONNECTORS_MUTATION_CREATEUSER_0) +} + +input AddressGeoInput @join__type(graph: CONNECTORS_MUTATION_CREATEUSER_0) { + lat: Float @join__field(graph: CONNECTORS_MUTATION_CREATEUSER_0, type: "Float") + lng: Float @join__field(graph: CONNECTORS_MUTATION_CREATEUSER_0, type: "Float") +} + +input AddressInput @join__type(graph: CONNECTORS_MUTATION_CREATEUSER_0) { + street: String @join__field(graph: CONNECTORS_MUTATION_CREATEUSER_0, type: "String") + suite: String @join__field(graph: CONNECTORS_MUTATION_CREATEUSER_0, type: "String") + city: String @join__field(graph: CONNECTORS_MUTATION_CREATEUSER_0, type: "String") + zipcode: String @join__field(graph: CONNECTORS_MUTATION_CREATEUSER_0, type: "String") + geo: AddressGeoInput @join__field(graph: CONNECTORS_MUTATION_CREATEUSER_0, type: "AddressGeoInput") +} + +input CreateUserInput @join__type(graph: CONNECTORS_MUTATION_CREATEUSER_0) { + name: String! @join__field(graph: CONNECTORS_MUTATION_CREATEUSER_0, type: "String!") + username: String! @join__field(graph: CONNECTORS_MUTATION_CREATEUSER_0, type: "String!") + email: EmailAddress! @join__field(graph: CONNECTORS_MUTATION_CREATEUSER_0, type: "EmailAddress!") + status: Status! @join__field(graph: CONNECTORS_MUTATION_CREATEUSER_0, type: "Status!") + address: AddressInput @join__field(graph: CONNECTORS_MUTATION_CREATEUSER_0, type: "AddressInput") +} + +type User @join__type(graph: CONNECTORS_MUTATION_CREATEUSER_0) @join__type(graph: CONNECTORS_QUERY_FILTERUSERSBYEMAILDOMAIN_0) @join__type(graph: CONNECTORS_QUERY_USER_0, key: "id") @join__type(graph: CONNECTORS_QUERY_USERSBYCOMPANY_0) { + id: ID! @join__field(graph: CONNECTORS_MUTATION_CREATEUSER_0, type: "ID!") @join__field(graph: CONNECTORS_QUERY_FILTERUSERSBYEMAILDOMAIN_0, type: "ID!") @join__field(graph: CONNECTORS_QUERY_USER_0, type: "ID!") @join__field(graph: CONNECTORS_QUERY_USERSBYCOMPANY_0, type: "ID!") + name: String @join__field(graph: CONNECTORS_QUERY_FILTERUSERSBYEMAILDOMAIN_0, type: "String") @join__field(graph: CONNECTORS_QUERY_USER_0, type: "String") @join__field(graph: CONNECTORS_QUERY_USERSBYCOMPANY_0, type: "String") + address: Address @join__field(graph: CONNECTORS_QUERY_USER_0, type: "Address") + company: CompanyInfo @join__field(graph: CONNECTORS_QUERY_USER_0, type: "CompanyInfo") @join__field(graph: CONNECTORS_QUERY_USERSBYCOMPANY_0, type: "CompanyInfo") + email: EmailAddress @join__field(graph: CONNECTORS_QUERY_USER_0, type: "EmailAddress") + phone: String @join__field(graph: CONNECTORS_QUERY_USER_0, type: "String") + username: String @join__field(graph: CONNECTORS_QUERY_USER_0, type: "String") + website: String @join__field(graph: CONNECTORS_QUERY_USER_0, type: "String") +} + +type Mutation @join__type(graph: CONNECTORS_MUTATION_CREATEUSER_0) { + createUser(input: CreateUserInput!): User @join__field(graph: CONNECTORS_MUTATION_CREATEUSER_0, type: "User") +} + +type Query @join__type(graph: CONNECTORS_MUTATION_CREATEUSER_0) @join__type(graph: CONNECTORS_QUERY_FILTERUSERSBYEMAILDOMAIN_0) @join__type(graph: CONNECTORS_QUERY_USER_0) @join__type(graph: CONNECTORS_QUERY_USERSBYCOMPANY_0) { + _: ID @inaccessible @join__field(graph: CONNECTORS_MUTATION_CREATEUSER_0, type: "ID") + filterUsersByEmailDomain(email: EmailAddress!): [User] @join__field(graph: CONNECTORS_QUERY_FILTERUSERSBYEMAILDOMAIN_0, type: "[User]") + user(id: ID!): User @join__field(graph: CONNECTORS_QUERY_USER_0, type: "User") + usersByCompany(company: CompanyInput!): [User] @join__field(graph: CONNECTORS_QUERY_USERSBYCOMPANY_0, type: "[User]") +} + +type AddressGeo @join__type(graph: CONNECTORS_QUERY_USER_0) { + lat: Float @join__field(graph: CONNECTORS_QUERY_USER_0, type: "Float") + lng: Float @join__field(graph: CONNECTORS_QUERY_USER_0, type: "Float") +} + +type Address @join__type(graph: CONNECTORS_QUERY_USER_0) { + city: String @join__field(graph: CONNECTORS_QUERY_USER_0, type: "String") + geo: AddressGeo @join__field(graph: CONNECTORS_QUERY_USER_0, type: "AddressGeo") + street: String @join__field(graph: CONNECTORS_QUERY_USER_0, type: "String") + suite: String @join__field(graph: CONNECTORS_QUERY_USER_0, type: "String") + zipcode: String @join__field(graph: CONNECTORS_QUERY_USER_0, type: "String") +} + +type CompanyInfo @join__type(graph: CONNECTORS_QUERY_USER_0) @join__type(graph: CONNECTORS_QUERY_USERSBYCOMPANY_0) { + bs: String @join__field(graph: CONNECTORS_QUERY_USER_0, type: "String") @join__field(graph: CONNECTORS_QUERY_USERSBYCOMPANY_0, type: "String") + catchPhrase: String @join__field(graph: CONNECTORS_QUERY_USER_0, type: "String") @join__field(graph: CONNECTORS_QUERY_USERSBYCOMPANY_0, type: "String") + email: EmailAddress @join__field(graph: CONNECTORS_QUERY_USER_0, type: "EmailAddress") + name: String @join__field(graph: CONNECTORS_QUERY_USER_0, type: "String") @join__field(graph: CONNECTORS_QUERY_USERSBYCOMPANY_0, type: "String") +} + +input CompanyInput @join__type(graph: CONNECTORS_QUERY_USERSBYCOMPANY_0) { + name: String! @join__field(graph: CONNECTORS_QUERY_USERSBYCOMPANY_0, type: "String!") + catchPhrase: String @join__field(graph: CONNECTORS_QUERY_USERSBYCOMPANY_0, type: "String") +} diff --git a/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@realistic.graphql.snap b/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@realistic.graphql.snap new file mode 100644 index 0000000000..137fc3f565 --- /dev/null +++ b/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@realistic.graphql.snap @@ -0,0 +1,80 @@ +--- +source: apollo-federation/src/sources/connect/expand/tests/mod.rs +expression: api_schema +input_file: apollo-federation/src/sources/connect/expand/tests/schemas/expand/realistic.graphql +--- +directive @defer(label: String, if: Boolean! = true) on FRAGMENT_SPREAD | INLINE_FRAGMENT + +type Address { + street: String + suite: String + city: String + zipcode: String + geo: AddressGeo +} + +type AddressGeo { + lat: Float + lng: Float +} + +input AddressGeoInput { + lat: Float + lng: Float +} + +input AddressInput { + street: String + suite: String + city: String + zipcode: String + geo: AddressGeoInput +} + +type CompanyInfo { + name: String + catchPhrase: String + bs: String + email: EmailAddress +} + +input CompanyInput { + name: String! + catchPhrase: String +} + +input CreateUserInput { + name: String! + username: String! + email: EmailAddress! + status: Status! + address: AddressInput +} + +scalar EmailAddress + +type Mutation { + createUser(input: CreateUserInput!): User +} + +type Query { + filterUsersByEmailDomain(email: EmailAddress!): [User] + usersByCompany(company: CompanyInput!): [User] + user(id: ID!): User +} + +enum Status { + ACTIVE + INACTIVE +} + +type User { + id: ID! + name: String + username: String + email: EmailAddress + address: Address + phone: String + website: String + company: CompanyInfo +} diff --git a/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@sibling_fields.graphql-2.snap b/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@sibling_fields.graphql-2.snap new file mode 100644 index 0000000000..8fc72357a1 --- /dev/null +++ b/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@sibling_fields.graphql-2.snap @@ -0,0 +1,247 @@ +--- +source: apollo-federation/src/sources/connect/expand/tests/mod.rs +expression: connectors.by_service_name +input_file: apollo-federation/src/sources/connect/expand/tests/schemas/expand/sibling_fields.graphql +--- +{ + "connectors_Query_f_0": Connector { + id: ConnectId { + label: "connectors. http: GET https://my.api/t", + subgraph_name: "connectors", + source_name: None, + directive: ObjectOrInterfaceFieldDirectivePosition { + field: Object(Query.f), + directive_name: "connect", + directive_index: 0, + }, + }, + transport: HttpJsonTransport { + source_url: None, + connect_template: URLTemplate { + base: Some( + Url { + scheme: "https", + cannot_be_a_base: false, + username: "", + password: None, + host: Some( + Domain( + "my.api", + ), + ), + port: None, + path: "/", + query: None, + fragment: None, + }, + ), + path: [ + StringTemplate { + parts: [ + Constant( + Constant { + value: "t", + location: 15..16, + }, + ), + ], + }, + ], + query: [], + }, + method: Get, + headers: {}, + body: None, + }, + selection: Named( + SubSelection { + selections: [ + Field( + None, + WithRange { + node: Field( + "k", + ), + range: Some( + 0..1, + ), + }, + Some( + SubSelection { + selections: [ + Field( + None, + WithRange { + node: Field( + "id", + ), + range: Some( + 4..6, + ), + }, + None, + ), + ], + range: Some( + 2..8, + ), + }, + ), + ), + ], + range: Some( + 0..8, + ), + }, + ), + config: None, + max_requests: None, + entity_resolver: None, + spec: V0_1, + request_variables: {}, + response_variables: {}, + }, + "connectors_T_b_0": Connector { + id: ConnectId { + label: "connectors. http: GET https://my.api/t/{$this.k.id}", + subgraph_name: "connectors", + source_name: None, + directive: ObjectOrInterfaceFieldDirectivePosition { + field: Object(T.b), + directive_name: "connect", + directive_index: 0, + }, + }, + transport: HttpJsonTransport { + source_url: None, + connect_template: URLTemplate { + base: Some( + Url { + scheme: "https", + cannot_be_a_base: false, + username: "", + password: None, + host: Some( + Domain( + "my.api", + ), + ), + port: None, + path: "/", + query: None, + fragment: None, + }, + ), + path: [ + StringTemplate { + parts: [ + Constant( + Constant { + value: "t", + location: 15..16, + }, + ), + ], + }, + StringTemplate { + parts: [ + Expression( + Expression { + expression: Path( + PathSelection { + path: WithRange { + node: Var( + WithRange { + node: $this, + range: Some( + 0..5, + ), + }, + WithRange { + node: Key( + WithRange { + node: Field( + "k", + ), + range: Some( + 6..7, + ), + }, + WithRange { + node: Key( + WithRange { + node: Field( + "id", + ), + range: Some( + 8..10, + ), + }, + WithRange { + node: Empty, + range: Some( + 10..10, + ), + }, + ), + range: Some( + 7..10, + ), + }, + ), + range: Some( + 5..10, + ), + }, + ), + range: Some( + 0..10, + ), + }, + }, + ), + location: 18..28, + }, + ), + ], + }, + ], + query: [], + }, + method: Get, + headers: {}, + body: None, + }, + selection: Named( + SubSelection { + selections: [ + Field( + None, + WithRange { + node: Field( + "b", + ), + range: Some( + 0..1, + ), + }, + None, + ), + ], + range: Some( + 0..1, + ), + }, + ), + config: None, + max_requests: None, + entity_resolver: Some( + Implicit, + ), + spec: V0_1, + request_variables: { + $this, + }, + response_variables: {}, + }, +} diff --git a/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@sibling_fields.graphql-3.snap b/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@sibling_fields.graphql-3.snap new file mode 100644 index 0000000000..3d35dbd762 --- /dev/null +++ b/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@sibling_fields.graphql-3.snap @@ -0,0 +1,60 @@ +--- +source: apollo-federation/src/sources/connect/expand/tests/mod.rs +expression: raw_sdl +input_file: apollo-federation/src/sources/connect/expand/tests/schemas/expand/sibling_fields.graphql +--- +schema @link(url: "https://specs.apollo.dev/link/v1.0") @link(url: "https://specs.apollo.dev/join/v0.3", for: EXECUTION) @link(url: "https://specs.apollo.dev/inaccessible/v0.2", for: SECURITY) @join__directive(graphs: [], name: "link", args: {url: "https://specs.apollo.dev/connect/v0.1"}) { + query: Query +} + +directive @link(url: String, as: String, for: link__Purpose, import: [link__Import]) repeatable on SCHEMA + +directive @join__graph(name: String!, url: String!) on ENUM_VALUE + +directive @join__type(graph: join__Graph!, key: join__FieldSet, extension: Boolean! = false, resolvable: Boolean! = true, isInterfaceObject: Boolean! = false) repeatable on ENUM | INPUT_OBJECT | INTERFACE | OBJECT | SCALAR | UNION + +directive @join__field(graph: join__Graph, requires: join__FieldSet, provides: join__FieldSet, type: String, external: Boolean, override: String, overrideLabel: String, usedOverridden: Boolean) repeatable on FIELD_DEFINITION | INPUT_FIELD_DEFINITION + +directive @join__implements(graph: join__Graph!, interface: String!) repeatable on INTERFACE | OBJECT + +directive @join__unionMember(graph: join__Graph!, member: String!) repeatable on UNION + +directive @join__enumValue(graph: join__Graph!) repeatable on ENUM_VALUE + +directive @join__directive(graphs: [join__Graph!], name: String!, args: join__DirectiveArguments!) repeatable on SCHEMA | OBJECT | INTERFACE | FIELD_DEFINITION + +directive @inaccessible on FIELD_DEFINITION | OBJECT | INTERFACE | UNION | ARGUMENT_DEFINITION | SCALAR | ENUM | ENUM_VALUE | INPUT_OBJECT | INPUT_FIELD_DEFINITION + +enum link__Purpose { + """ + SECURITY features provide metadata necessary to securely resolve fields. + """ + SECURITY + """EXECUTION features provide metadata necessary for operation execution.""" + EXECUTION +} + +scalar link__Import + +scalar join__FieldSet + +scalar join__DirectiveArguments + +enum join__Graph { + CONNECTORS_QUERY_F_0 @join__graph(name: "connectors_Query_f_0", url: "none") + CONNECTORS_T_B_0 @join__graph(name: "connectors_T_b_0", url: "none") +} + +type K @join__type(graph: CONNECTORS_QUERY_F_0) @join__type(graph: CONNECTORS_T_B_0) { + id: ID! @join__field(graph: CONNECTORS_QUERY_F_0, type: "ID!") @join__field(graph: CONNECTORS_T_B_0, type: "ID!") +} + +type T @join__type(graph: CONNECTORS_QUERY_F_0) @join__type(graph: CONNECTORS_T_B_0, key: "k { id }") { + k: K @join__field(graph: CONNECTORS_QUERY_F_0, type: "K") @join__field(graph: CONNECTORS_T_B_0, type: "K") + b: String @join__field(graph: CONNECTORS_T_B_0, type: "String") +} + +type Query @join__type(graph: CONNECTORS_QUERY_F_0) @join__type(graph: CONNECTORS_T_B_0) { + f: T @join__field(graph: CONNECTORS_QUERY_F_0, type: "T") + _: ID @inaccessible @join__field(graph: CONNECTORS_T_B_0, type: "ID") +} diff --git a/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@sibling_fields.graphql.snap b/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@sibling_fields.graphql.snap new file mode 100644 index 0000000000..845b9f83e8 --- /dev/null +++ b/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@sibling_fields.graphql.snap @@ -0,0 +1,19 @@ +--- +source: apollo-federation/src/sources/connect/expand/tests/mod.rs +expression: api_schema +input_file: apollo-federation/src/sources/connect/expand/tests/schemas/expand/sibling_fields.graphql +--- +directive @defer(label: String, if: Boolean! = true) on FRAGMENT_SPREAD | INLINE_FRAGMENT + +type K { + id: ID! +} + +type Query { + f: T +} + +type T { + k: K + b: String +} diff --git a/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@simple.graphql-2.snap b/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@simple.graphql-2.snap new file mode 100644 index 0000000000..0323ff7192 --- /dev/null +++ b/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@simple.graphql-2.snap @@ -0,0 +1,427 @@ +--- +source: apollo-federation/src/sources/connect/expand/tests/mod.rs +expression: connectors.by_service_name +input_file: apollo-federation/src/sources/connect/expand/tests/schemas/expand/simple.graphql +--- +{ + "connectors_Query_users_0": Connector { + id: ConnectId { + label: "connectors.example http: GET ", + subgraph_name: "connectors", + source_name: Some( + "example", + ), + directive: ObjectOrInterfaceFieldDirectivePosition { + field: Object(Query.users), + directive_name: "connect", + directive_index: 0, + }, + }, + transport: HttpJsonTransport { + source_url: Some( + Url { + scheme: "http", + cannot_be_a_base: false, + username: "", + password: None, + host: Some( + Domain( + "example", + ), + ), + port: None, + path: "/", + query: None, + fragment: None, + }, + ), + connect_template: URLTemplate { + base: None, + path: [], + query: [], + }, + method: Get, + headers: {}, + body: None, + }, + selection: Named( + SubSelection { + selections: [ + Field( + None, + WithRange { + node: Field( + "id", + ), + range: Some( + 0..2, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "a", + ), + range: Some( + 3..4, + ), + }, + None, + ), + ], + range: Some( + 0..4, + ), + }, + ), + config: None, + max_requests: None, + entity_resolver: None, + spec: V0_1, + request_variables: {}, + response_variables: {}, + }, + "connectors_Query_user_0": Connector { + id: ConnectId { + label: "connectors.example http: GET /{$args.id}", + subgraph_name: "connectors", + source_name: Some( + "example", + ), + directive: ObjectOrInterfaceFieldDirectivePosition { + field: Object(Query.user), + directive_name: "connect", + directive_index: 0, + }, + }, + transport: HttpJsonTransport { + source_url: Some( + Url { + scheme: "http", + cannot_be_a_base: false, + username: "", + password: None, + host: Some( + Domain( + "example", + ), + ), + port: None, + path: "/", + query: None, + fragment: None, + }, + ), + connect_template: URLTemplate { + base: None, + path: [ + StringTemplate { + parts: [ + Expression( + Expression { + expression: Path( + PathSelection { + path: WithRange { + node: Var( + WithRange { + node: $args, + range: Some( + 0..5, + ), + }, + WithRange { + node: Key( + WithRange { + node: Field( + "id", + ), + range: Some( + 6..8, + ), + }, + WithRange { + node: Empty, + range: Some( + 8..8, + ), + }, + ), + range: Some( + 5..8, + ), + }, + ), + range: Some( + 0..8, + ), + }, + }, + ), + location: 2..10, + }, + ), + ], + }, + ], + query: [], + }, + method: Get, + headers: {}, + body: None, + }, + selection: Named( + SubSelection { + selections: [ + Field( + None, + WithRange { + node: Field( + "id", + ), + range: Some( + 0..2, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "a", + ), + range: Some( + 3..4, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "b", + ), + range: Some( + 5..6, + ), + }, + None, + ), + ], + range: Some( + 0..6, + ), + }, + ), + config: None, + max_requests: None, + entity_resolver: Some( + Explicit, + ), + spec: V0_1, + request_variables: { + $args, + }, + response_variables: {}, + }, + "connectors_User_d_1": Connector { + id: ConnectId { + label: "connectors.example http: GET /{$this.c}/d", + subgraph_name: "connectors", + source_name: Some( + "example", + ), + directive: ObjectOrInterfaceFieldDirectivePosition { + field: Object(User.d), + directive_name: "connect", + directive_index: 1, + }, + }, + transport: HttpJsonTransport { + source_url: Some( + Url { + scheme: "http", + cannot_be_a_base: false, + username: "", + password: None, + host: Some( + Domain( + "example", + ), + ), + port: None, + path: "/", + query: None, + fragment: None, + }, + ), + connect_template: URLTemplate { + base: None, + path: [ + StringTemplate { + parts: [ + Expression( + Expression { + expression: Path( + PathSelection { + path: WithRange { + node: Var( + WithRange { + node: $this, + range: Some( + 0..5, + ), + }, + WithRange { + node: Key( + WithRange { + node: Field( + "c", + ), + range: Some( + 6..7, + ), + }, + WithRange { + node: Empty, + range: Some( + 7..7, + ), + }, + ), + range: Some( + 5..7, + ), + }, + ), + range: Some( + 0..7, + ), + }, + }, + ), + location: 2..9, + }, + ), + ], + }, + StringTemplate { + parts: [ + Constant( + Constant { + value: "d", + location: 11..12, + }, + ), + ], + }, + ], + query: [], + }, + method: Get, + headers: {}, + body: Some( + Named( + SubSelection { + selections: [ + Path { + alias: Some( + Alias { + name: WithRange { + node: Field( + "with_b", + ), + range: Some( + 0..6, + ), + }, + range: Some( + 0..7, + ), + }, + ), + inline: false, + path: PathSelection { + path: WithRange { + node: Var( + WithRange { + node: $this, + range: Some( + 8..13, + ), + }, + WithRange { + node: Key( + WithRange { + node: Field( + "b", + ), + range: Some( + 14..15, + ), + }, + WithRange { + node: Empty, + range: Some( + 15..15, + ), + }, + ), + range: Some( + 13..15, + ), + }, + ), + range: Some( + 8..15, + ), + }, + }, + }, + ], + range: Some( + 0..15, + ), + }, + ), + ), + }, + selection: Path( + PathSelection { + path: WithRange { + node: Var( + WithRange { + node: $, + range: Some( + 0..1, + ), + }, + WithRange { + node: Empty, + range: Some( + 1..1, + ), + }, + ), + range: Some( + 0..1, + ), + }, + }, + ), + config: None, + max_requests: None, + entity_resolver: Some( + Implicit, + ), + spec: V0_1, + request_variables: { + $this, + }, + response_variables: {}, + }, +} diff --git a/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@simple.graphql-3.snap b/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@simple.graphql-3.snap new file mode 100644 index 0000000000..47723ab672 --- /dev/null +++ b/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@simple.graphql-3.snap @@ -0,0 +1,62 @@ +--- +source: apollo-federation/src/sources/connect/expand/tests/mod.rs +expression: raw_sdl +input_file: apollo-federation/src/sources/connect/expand/tests/schemas/expand/simple.graphql +--- +schema @link(url: "https://specs.apollo.dev/link/v1.0") @link(url: "https://specs.apollo.dev/join/v0.3", for: EXECUTION) @link(url: "https://specs.apollo.dev/inaccessible/v0.2", for: SECURITY) @join__directive(graphs: [], name: "link", args: {url: "https://specs.apollo.dev/connect/v0.1"}) { + query: Query +} + +directive @link(url: String, as: String, for: link__Purpose, import: [link__Import]) repeatable on SCHEMA + +directive @join__graph(name: String!, url: String!) on ENUM_VALUE + +directive @join__type(graph: join__Graph!, key: join__FieldSet, extension: Boolean! = false, resolvable: Boolean! = true, isInterfaceObject: Boolean! = false) repeatable on ENUM | INPUT_OBJECT | INTERFACE | OBJECT | SCALAR | UNION + +directive @join__field(graph: join__Graph, requires: join__FieldSet, provides: join__FieldSet, type: String, external: Boolean, override: String, overrideLabel: String, usedOverridden: Boolean) repeatable on FIELD_DEFINITION | INPUT_FIELD_DEFINITION + +directive @join__implements(graph: join__Graph!, interface: String!) repeatable on INTERFACE | OBJECT + +directive @join__unionMember(graph: join__Graph!, member: String!) repeatable on UNION + +directive @join__enumValue(graph: join__Graph!) repeatable on ENUM_VALUE + +directive @join__directive(graphs: [join__Graph!], name: String!, args: join__DirectiveArguments!) repeatable on SCHEMA | OBJECT | INTERFACE | FIELD_DEFINITION + +directive @inaccessible on FIELD_DEFINITION | OBJECT | INTERFACE | UNION | ARGUMENT_DEFINITION | SCALAR | ENUM | ENUM_VALUE | INPUT_OBJECT | INPUT_FIELD_DEFINITION + +enum link__Purpose { + """ + SECURITY features provide metadata necessary to securely resolve fields. + """ + SECURITY + """EXECUTION features provide metadata necessary for operation execution.""" + EXECUTION +} + +scalar link__Import + +scalar join__FieldSet + +scalar join__DirectiveArguments + +enum join__Graph { + CONNECTORS_QUERY_USER_0 @join__graph(name: "connectors_Query_user_0", url: "none") + CONNECTORS_QUERY_USERS_0 @join__graph(name: "connectors_Query_users_0", url: "none") + CONNECTORS_USER_D_1 @join__graph(name: "connectors_User_d_1", url: "none") + GRAPHQL @join__graph(name: "graphql", url: "https://graphql") +} + +type User @join__type(graph: CONNECTORS_QUERY_USER_0, key: "id") @join__type(graph: CONNECTORS_QUERY_USERS_0) @join__type(graph: CONNECTORS_USER_D_1, key: "c b") @join__type(graph: GRAPHQL, key: "id") { + a: String @join__field(graph: CONNECTORS_QUERY_USER_0, type: "String") @join__field(graph: CONNECTORS_QUERY_USERS_0, type: "String") + b: String @join__field(graph: CONNECTORS_QUERY_USER_0, type: "String") @join__field(graph: CONNECTORS_USER_D_1, type: "String") + id: ID! @join__field(graph: CONNECTORS_QUERY_USER_0, type: "ID!") @join__field(graph: CONNECTORS_QUERY_USERS_0, type: "ID!") @join__field(graph: GRAPHQL, type: "ID!") + d: String @join__field(graph: CONNECTORS_USER_D_1, type: "String") + c: String @join__field(graph: CONNECTORS_USER_D_1, type: "String") @join__field(graph: GRAPHQL, type: "String") +} + +type Query @join__type(graph: CONNECTORS_QUERY_USER_0) @join__type(graph: CONNECTORS_QUERY_USERS_0) @join__type(graph: CONNECTORS_USER_D_1) @join__type(graph: GRAPHQL) { + user(id: ID!): User @join__field(graph: CONNECTORS_QUERY_USER_0, type: "User") + users: [User] @join__field(graph: CONNECTORS_QUERY_USERS_0, type: "[User]") + _: ID @inaccessible @join__field(graph: CONNECTORS_USER_D_1, type: "ID") +} diff --git a/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@simple.graphql.snap b/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@simple.graphql.snap new file mode 100644 index 0000000000..7b27ff4c09 --- /dev/null +++ b/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@simple.graphql.snap @@ -0,0 +1,19 @@ +--- +source: apollo-federation/src/sources/connect/expand/tests/mod.rs +expression: api_schema +input_file: apollo-federation/src/sources/connect/expand/tests/schemas/expand/simple.graphql +--- +directive @defer(label: String, if: Boolean! = true) on FRAGMENT_SPREAD | INLINE_FRAGMENT + +type Query { + users: [User] + user(id: ID!): User +} + +type User { + id: ID! + a: String + b: String + c: String + d: String +} diff --git a/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@steelthread.graphql-2.snap b/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@steelthread.graphql-2.snap new file mode 100644 index 0000000000..be3b10be8b --- /dev/null +++ b/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@steelthread.graphql-2.snap @@ -0,0 +1,399 @@ +--- +source: apollo-federation/src/sources/connect/expand/tests/mod.rs +expression: connectors.by_service_name +input_file: apollo-federation/src/sources/connect/expand/tests/schemas/expand/steelthread.graphql +--- +{ + "connectors_Query_users_0": Connector { + id: ConnectId { + label: "connectors.json http: GET /users", + subgraph_name: "connectors", + source_name: Some( + "json", + ), + directive: ObjectOrInterfaceFieldDirectivePosition { + field: Object(Query.users), + directive_name: "connect", + directive_index: 0, + }, + }, + transport: HttpJsonTransport { + source_url: Some( + Url { + scheme: "https", + cannot_be_a_base: false, + username: "", + password: None, + host: Some( + Domain( + "jsonplaceholder.typicode.com", + ), + ), + port: None, + path: "/", + query: None, + fragment: None, + }, + ), + connect_template: URLTemplate { + base: None, + path: [ + StringTemplate { + parts: [ + Constant( + Constant { + value: "users", + location: 1..6, + }, + ), + ], + }, + ], + query: [], + }, + method: Get, + headers: {}, + body: None, + }, + selection: Named( + SubSelection { + selections: [ + Field( + None, + WithRange { + node: Field( + "id", + ), + range: Some( + 0..2, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "name", + ), + range: Some( + 3..7, + ), + }, + None, + ), + ], + range: Some( + 0..7, + ), + }, + ), + config: None, + max_requests: None, + entity_resolver: None, + spec: V0_1, + request_variables: {}, + response_variables: {}, + }, + "connectors_Query_user_0": Connector { + id: ConnectId { + label: "connectors.json http: GET /users/{$args.id}", + subgraph_name: "connectors", + source_name: Some( + "json", + ), + directive: ObjectOrInterfaceFieldDirectivePosition { + field: Object(Query.user), + directive_name: "connect", + directive_index: 0, + }, + }, + transport: HttpJsonTransport { + source_url: Some( + Url { + scheme: "https", + cannot_be_a_base: false, + username: "", + password: None, + host: Some( + Domain( + "jsonplaceholder.typicode.com", + ), + ), + port: None, + path: "/", + query: None, + fragment: None, + }, + ), + connect_template: URLTemplate { + base: None, + path: [ + StringTemplate { + parts: [ + Constant( + Constant { + value: "users", + location: 1..6, + }, + ), + ], + }, + StringTemplate { + parts: [ + Expression( + Expression { + expression: Path( + PathSelection { + path: WithRange { + node: Var( + WithRange { + node: $args, + range: Some( + 0..5, + ), + }, + WithRange { + node: Key( + WithRange { + node: Field( + "id", + ), + range: Some( + 6..8, + ), + }, + WithRange { + node: Empty, + range: Some( + 8..8, + ), + }, + ), + range: Some( + 5..8, + ), + }, + ), + range: Some( + 0..8, + ), + }, + }, + ), + location: 8..16, + }, + ), + ], + }, + ], + query: [], + }, + method: Get, + headers: {}, + body: None, + }, + selection: Named( + SubSelection { + selections: [ + Field( + None, + WithRange { + node: Field( + "id", + ), + range: Some( + 0..2, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "name", + ), + range: Some( + 3..7, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "username", + ), + range: Some( + 8..16, + ), + }, + None, + ), + ], + range: Some( + 0..16, + ), + }, + ), + config: None, + max_requests: None, + entity_resolver: Some( + Explicit, + ), + spec: V0_1, + request_variables: { + $args, + }, + response_variables: {}, + }, + "connectors_User_d_1": Connector { + id: ConnectId { + label: "connectors.json http: GET /users/{$this.c}", + subgraph_name: "connectors", + source_name: Some( + "json", + ), + directive: ObjectOrInterfaceFieldDirectivePosition { + field: Object(User.d), + directive_name: "connect", + directive_index: 1, + }, + }, + transport: HttpJsonTransport { + source_url: Some( + Url { + scheme: "https", + cannot_be_a_base: false, + username: "", + password: None, + host: Some( + Domain( + "jsonplaceholder.typicode.com", + ), + ), + port: None, + path: "/", + query: None, + fragment: None, + }, + ), + connect_template: URLTemplate { + base: None, + path: [ + StringTemplate { + parts: [ + Constant( + Constant { + value: "users", + location: 1..6, + }, + ), + ], + }, + StringTemplate { + parts: [ + Expression( + Expression { + expression: Path( + PathSelection { + path: WithRange { + node: Var( + WithRange { + node: $this, + range: Some( + 0..5, + ), + }, + WithRange { + node: Key( + WithRange { + node: Field( + "c", + ), + range: Some( + 6..7, + ), + }, + WithRange { + node: Empty, + range: Some( + 7..7, + ), + }, + ), + range: Some( + 5..7, + ), + }, + ), + range: Some( + 0..7, + ), + }, + }, + ), + location: 8..15, + }, + ), + ], + }, + ], + query: [], + }, + method: Get, + headers: {}, + body: None, + }, + selection: Path( + PathSelection { + path: WithRange { + node: Var( + WithRange { + node: $, + range: Some( + 0..1, + ), + }, + WithRange { + node: Key( + WithRange { + node: Field( + "phone", + ), + range: Some( + 2..7, + ), + }, + WithRange { + node: Empty, + range: Some( + 7..7, + ), + }, + ), + range: Some( + 1..7, + ), + }, + ), + range: Some( + 0..7, + ), + }, + }, + ), + config: None, + max_requests: None, + entity_resolver: Some( + Implicit, + ), + spec: V0_1, + request_variables: { + $this, + }, + response_variables: {}, + }, +} diff --git a/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@steelthread.graphql-3.snap b/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@steelthread.graphql-3.snap new file mode 100644 index 0000000000..8cb27f68d0 --- /dev/null +++ b/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@steelthread.graphql-3.snap @@ -0,0 +1,62 @@ +--- +source: apollo-federation/src/sources/connect/expand/tests/mod.rs +expression: raw_sdl +input_file: apollo-federation/src/sources/connect/expand/tests/schemas/expand/steelthread.graphql +--- +schema @link(url: "https://specs.apollo.dev/link/v1.0") @link(url: "https://specs.apollo.dev/join/v0.3", for: EXECUTION) @link(url: "https://specs.apollo.dev/inaccessible/v0.2", for: SECURITY) @join__directive(graphs: [], name: "link", args: {url: "https://specs.apollo.dev/connect/v0.1"}) { + query: Query +} + +directive @link(url: String, as: String, for: link__Purpose, import: [link__Import]) repeatable on SCHEMA + +directive @join__graph(name: String!, url: String!) on ENUM_VALUE + +directive @join__type(graph: join__Graph!, key: join__FieldSet, extension: Boolean! = false, resolvable: Boolean! = true, isInterfaceObject: Boolean! = false) repeatable on ENUM | INPUT_OBJECT | INTERFACE | OBJECT | SCALAR | UNION + +directive @join__field(graph: join__Graph, requires: join__FieldSet, provides: join__FieldSet, type: String, external: Boolean, override: String, overrideLabel: String, usedOverridden: Boolean) repeatable on FIELD_DEFINITION | INPUT_FIELD_DEFINITION + +directive @join__implements(graph: join__Graph!, interface: String!) repeatable on INTERFACE | OBJECT + +directive @join__unionMember(graph: join__Graph!, member: String!) repeatable on UNION + +directive @join__enumValue(graph: join__Graph!) repeatable on ENUM_VALUE + +directive @join__directive(graphs: [join__Graph!], name: String!, args: join__DirectiveArguments!) repeatable on SCHEMA | OBJECT | INTERFACE | FIELD_DEFINITION + +directive @inaccessible on FIELD_DEFINITION | OBJECT | INTERFACE | UNION | ARGUMENT_DEFINITION | SCALAR | ENUM | ENUM_VALUE | INPUT_OBJECT | INPUT_FIELD_DEFINITION + +enum link__Purpose { + """ + SECURITY features provide metadata necessary to securely resolve fields. + """ + SECURITY + """EXECUTION features provide metadata necessary for operation execution.""" + EXECUTION +} + +scalar link__Import + +scalar join__FieldSet + +scalar join__DirectiveArguments + +enum join__Graph { + CONNECTORS_QUERY_USER_0 @join__graph(name: "connectors_Query_user_0", url: "none") + CONNECTORS_QUERY_USERS_0 @join__graph(name: "connectors_Query_users_0", url: "none") + CONNECTORS_USER_D_1 @join__graph(name: "connectors_User_d_1", url: "none") + GRAPHQL @join__graph(name: "graphql", url: "https://localhost:4001") +} + +type User @join__type(graph: CONNECTORS_QUERY_USER_0, key: "id") @join__type(graph: CONNECTORS_QUERY_USERS_0) @join__type(graph: CONNECTORS_USER_D_1, key: "c") @join__type(graph: GRAPHQL, key: "id") { + id: ID! @join__field(graph: CONNECTORS_QUERY_USER_0, type: "ID!") @join__field(graph: CONNECTORS_QUERY_USERS_0, type: "ID!") @join__field(graph: GRAPHQL, type: "ID!") + name: String @join__field(graph: CONNECTORS_QUERY_USER_0, type: "String") @join__field(graph: CONNECTORS_QUERY_USERS_0, type: "String") + username: String @join__field(graph: CONNECTORS_QUERY_USER_0, type: "String") + d: String @join__field(graph: CONNECTORS_USER_D_1, type: "String") + c: String @join__field(graph: CONNECTORS_USER_D_1, type: "String") @join__field(graph: GRAPHQL, type: "String") +} + +type Query @join__type(graph: CONNECTORS_QUERY_USER_0) @join__type(graph: CONNECTORS_QUERY_USERS_0) @join__type(graph: CONNECTORS_USER_D_1) @join__type(graph: GRAPHQL) { + user(id: ID!): User @join__field(graph: CONNECTORS_QUERY_USER_0, type: "User") + users: [User] @join__field(graph: CONNECTORS_QUERY_USERS_0, type: "[User]") + _: ID @inaccessible @join__field(graph: CONNECTORS_USER_D_1, type: "ID") +} diff --git a/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@steelthread.graphql.snap b/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@steelthread.graphql.snap new file mode 100644 index 0000000000..830dd64961 --- /dev/null +++ b/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@steelthread.graphql.snap @@ -0,0 +1,21 @@ +--- +source: apollo-federation/src/sources/connect/expand/tests/mod.rs +expression: api_schema +input_file: apollo-federation/src/sources/connect/expand/tests/schemas/expand/steelthread.graphql +--- +directive @defer(label: String, if: Boolean! = true) on FRAGMENT_SPREAD | INLINE_FRAGMENT + +scalar JSON + +type Query { + users: [User] + user(id: ID!): User +} + +type User { + id: ID! + name: String + username: String + c: String + d: String +} diff --git a/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@types_used_twice.graphql-2.snap b/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@types_used_twice.graphql-2.snap new file mode 100644 index 0000000000..7514920800 --- /dev/null +++ b/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@types_used_twice.graphql-2.snap @@ -0,0 +1,147 @@ +--- +source: apollo-federation/src/sources/connect/expand/tests/mod.rs +expression: connectors.by_service_name +input_file: apollo-federation/src/sources/connect/expand/tests/schemas/expand/types_used_twice.graphql +--- +{ + "connectors_Query_ts_0": Connector { + id: ConnectId { + label: "connectors.example http: GET ", + subgraph_name: "connectors", + source_name: Some( + "example", + ), + directive: ObjectOrInterfaceFieldDirectivePosition { + field: Object(Query.ts), + directive_name: "connect", + directive_index: 0, + }, + }, + transport: HttpJsonTransport { + source_url: Some( + Url { + scheme: "http", + cannot_be_a_base: false, + username: "", + password: None, + host: Some( + Domain( + "example", + ), + ), + port: None, + path: "/", + query: None, + fragment: None, + }, + ), + connect_template: URLTemplate { + base: None, + path: [], + query: [], + }, + method: Get, + headers: {}, + body: None, + }, + selection: Named( + SubSelection { + selections: [ + Field( + None, + WithRange { + node: Field( + "a", + ), + range: Some( + 0..1, + ), + }, + Some( + SubSelection { + selections: [ + Field( + None, + WithRange { + node: Field( + "id", + ), + range: Some( + 4..6, + ), + }, + None, + ), + ], + range: Some( + 2..8, + ), + }, + ), + ), + Field( + None, + WithRange { + node: Field( + "b", + ), + range: Some( + 9..10, + ), + }, + Some( + SubSelection { + selections: [ + Field( + None, + WithRange { + node: Field( + "a", + ), + range: Some( + 13..14, + ), + }, + Some( + SubSelection { + selections: [ + Field( + None, + WithRange { + node: Field( + "id", + ), + range: Some( + 17..19, + ), + }, + None, + ), + ], + range: Some( + 15..21, + ), + }, + ), + ), + ], + range: Some( + 11..23, + ), + }, + ), + ), + ], + range: Some( + 0..23, + ), + }, + ), + config: None, + max_requests: None, + entity_resolver: None, + spec: V0_1, + request_variables: {}, + response_variables: {}, + }, +} diff --git a/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@types_used_twice.graphql-3.snap b/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@types_used_twice.graphql-3.snap new file mode 100644 index 0000000000..b679b918c4 --- /dev/null +++ b/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@types_used_twice.graphql-3.snap @@ -0,0 +1,60 @@ +--- +source: apollo-federation/src/sources/connect/expand/tests/mod.rs +expression: raw_sdl +input_file: apollo-federation/src/sources/connect/expand/tests/schemas/expand/types_used_twice.graphql +--- +schema @link(url: "https://specs.apollo.dev/link/v1.0") @link(url: "https://specs.apollo.dev/join/v0.3", for: EXECUTION) @join__directive(graphs: [], name: "link", args: {url: "https://specs.apollo.dev/connect/v0.1"}) { + query: Query +} + +directive @link(url: String, as: String, for: link__Purpose, import: [link__Import]) repeatable on SCHEMA + +directive @join__graph(name: String!, url: String!) on ENUM_VALUE + +directive @join__type(graph: join__Graph!, key: join__FieldSet, extension: Boolean! = false, resolvable: Boolean! = true, isInterfaceObject: Boolean! = false) repeatable on ENUM | INPUT_OBJECT | INTERFACE | OBJECT | SCALAR | UNION + +directive @join__field(graph: join__Graph, requires: join__FieldSet, provides: join__FieldSet, type: String, external: Boolean, override: String, overrideLabel: String, usedOverridden: Boolean) repeatable on FIELD_DEFINITION | INPUT_FIELD_DEFINITION + +directive @join__implements(graph: join__Graph!, interface: String!) repeatable on INTERFACE | OBJECT + +directive @join__unionMember(graph: join__Graph!, member: String!) repeatable on UNION + +directive @join__enumValue(graph: join__Graph!) repeatable on ENUM_VALUE + +directive @join__directive(graphs: [join__Graph!], name: String!, args: join__DirectiveArguments!) repeatable on SCHEMA | OBJECT | INTERFACE | FIELD_DEFINITION + +enum link__Purpose { + """ + SECURITY features provide metadata necessary to securely resolve fields. + """ + SECURITY + """EXECUTION features provide metadata necessary for operation execution.""" + EXECUTION +} + +scalar link__Import + +scalar join__FieldSet + +scalar join__DirectiveArguments + +enum join__Graph { + CONNECTORS_QUERY_TS_0 @join__graph(name: "connectors_Query_ts_0", url: "none") +} + +type A @join__type(graph: CONNECTORS_QUERY_TS_0) { + id: ID @join__field(graph: CONNECTORS_QUERY_TS_0, type: "ID") +} + +type B @join__type(graph: CONNECTORS_QUERY_TS_0) { + a: A @join__field(graph: CONNECTORS_QUERY_TS_0, type: "A") +} + +type T @join__type(graph: CONNECTORS_QUERY_TS_0) { + a: A @join__field(graph: CONNECTORS_QUERY_TS_0, type: "A") + b: B @join__field(graph: CONNECTORS_QUERY_TS_0, type: "B") +} + +type Query @join__type(graph: CONNECTORS_QUERY_TS_0) { + ts: [T] @join__field(graph: CONNECTORS_QUERY_TS_0, type: "[T]") +} diff --git a/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@types_used_twice.graphql.snap b/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@types_used_twice.graphql.snap new file mode 100644 index 0000000000..430f847d54 --- /dev/null +++ b/apollo-federation/src/sources/connect/expand/tests/snapshots/it_expand_supergraph@types_used_twice.graphql.snap @@ -0,0 +1,23 @@ +--- +source: apollo-federation/src/sources/connect/expand/tests/mod.rs +expression: api_schema +input_file: apollo-federation/src/sources/connect/expand/tests/schemas/expand/types_used_twice.graphql +--- +directive @defer(label: String, if: Boolean! = true) on FRAGMENT_SPREAD | INLINE_FRAGMENT + +type A { + id: ID +} + +type B { + a: A +} + +type Query { + ts: [T] +} + +type T { + a: A + b: B +} diff --git a/apollo-federation/src/sources/connect/expand/visitors/input.rs b/apollo-federation/src/sources/connect/expand/visitors/input.rs new file mode 100644 index 0000000000..80eb9937d2 --- /dev/null +++ b/apollo-federation/src/sources/connect/expand/visitors/input.rs @@ -0,0 +1,132 @@ +use std::ops::Deref; + +use apollo_compiler::ast::InputValueDefinition; +use apollo_compiler::schema::Component; +use apollo_compiler::schema::InputObjectType; +use apollo_compiler::Node; +use indexmap::IndexMap; + +use super::filter_directives; +use super::try_insert; +use super::try_pre_insert; +use super::FieldVisitor; +use super::GroupVisitor; +use super::SchemaVisitor; +use crate::error::FederationError; +use crate::schema::position::InputObjectFieldDefinitionPosition; +use crate::schema::position::InputObjectTypeDefinitionPosition; +use crate::schema::position::TypeDefinitionPosition; + +impl FieldVisitor + for SchemaVisitor<'_, InputObjectTypeDefinitionPosition, InputObjectType> +{ + type Error = FederationError; + + fn visit<'a>(&mut self, field: InputObjectFieldDefinitionPosition) -> Result<(), Self::Error> { + let (_, r#type) = self.type_stack.last_mut().ok_or(FederationError::internal( + "tried to visit a field in a group not yet visited", + ))?; + + // Extract the node info + let field_def = field.get(self.original_schema.schema())?; + + // Add the input to the currently processing object, making sure to not overwrite if it already + // exists (and verify that we didn't change the type) + let new_field = InputValueDefinition { + description: field_def.description.clone(), + name: field_def.name.clone(), + default_value: field_def.default_value.clone(), + ty: field_def.ty.clone(), + directives: filter_directives(self.directive_deny_list, &field_def.directives), + }; + + let input_type = self + .original_schema + .get_type(field_def.ty.inner_named_type().clone())?; + match input_type { + TypeDefinitionPosition::Scalar(pos) => { + try_pre_insert!(self.to_schema, pos)?; + try_insert!( + self.to_schema, + pos, + pos.get(self.original_schema.schema())?.clone() + )?; + } + TypeDefinitionPosition::Enum(pos) => { + try_pre_insert!(self.to_schema, pos)?; + try_insert!( + self.to_schema, + pos, + pos.get(self.original_schema.schema())?.clone() + )?; + } + _ => {} + } + + if let Some(old_field) = r#type.fields.get(&field.field_name) { + if *old_field.deref().deref() != new_field { + return Err(FederationError::internal( + format!( "tried to write field to existing type, but field type was different. expected {new_field:?} found {old_field:?}"), + )); + } + } else { + r#type + .fields + .insert(field.field_name, Component::new(new_field)); + } + + Ok(()) + } +} + +impl GroupVisitor + for SchemaVisitor<'_, InputObjectTypeDefinitionPosition, InputObjectType> +{ + fn try_get_group_for_field( + &self, + field: &InputObjectFieldDefinitionPosition, + ) -> Result, FederationError> { + // Return the next group, if found + let field_type = field.get(self.original_schema.schema())?; + let inner_type = self + .original_schema + .get_type(field_type.ty.inner_named_type().clone())?; + match inner_type { + TypeDefinitionPosition::InputObject(input) => Ok(Some(input)), + TypeDefinitionPosition::Scalar(_) | TypeDefinitionPosition::Enum(_) => Ok(None), + + other => Err(FederationError::internal(format!( + "input objects cannot include fields of type: {}", + other.type_name() + ))), + } + } + + fn enter_group<'a>( + &mut self, + group: &InputObjectTypeDefinitionPosition, + ) -> Result, FederationError> { + try_pre_insert!(self.to_schema, group)?; + + let group_def = group.get(self.original_schema.schema())?; + let output_type = InputObjectType { + description: group_def.description.clone(), + name: group_def.name.clone(), + directives: filter_directives(self.directive_deny_list, &group_def.directives), + fields: IndexMap::with_hasher(Default::default()), // Filled in by the rest of the visitor + }; + + self.type_stack.push((group.clone(), output_type)); + let def = group.get(self.original_schema.schema())?; + Ok(def.fields.keys().cloned().map(|f| group.field(f)).collect()) + } + + fn exit_group(&mut self) -> Result<(), FederationError> { + let (definition, r#type) = self.type_stack.pop().ok_or(FederationError::internal( + "tried to exit a group not yet visited", + ))?; + + // Now actually consolidate the object into our schema + try_insert!(self.to_schema, definition, Node::new(r#type)) + } +} diff --git a/apollo-federation/src/sources/connect/expand/visitors/mod.rs b/apollo-federation/src/sources/connect/expand/visitors/mod.rs new file mode 100644 index 0000000000..9420d56456 --- /dev/null +++ b/apollo-federation/src/sources/connect/expand/visitors/mod.rs @@ -0,0 +1,424 @@ +//! Expansion Visitors +//! +//! This module contains various helper visitors for traversing nested structures, +//! adding needed types to a mutable schema. + +pub(crate) mod input; +mod selection; + +use std::collections::VecDeque; + +use apollo_compiler::ast::Directive; +use apollo_compiler::Name; +use indexmap::IndexSet; + +use crate::schema::FederationSchema; +use crate::schema::ValidFederationSchema; + +/// Filter out directives from a directive list +pub(crate) fn filter_directives<'a, D, I, O>(deny_list: &IndexSet, directives: D) -> O +where + D: IntoIterator, + I: 'a + AsRef + Clone, + O: FromIterator, +{ + directives + .into_iter() + .filter(|d| !deny_list.contains(&d.as_ref().name)) + .cloned() + .collect() +} + +/// Try to pre-insert into a schema, ignoring the operation if the type already exists +/// and matches the existing type +macro_rules! try_pre_insert { + ($schema:expr, $pos:expr) => {{ + if let Some(old_pos) = $schema.try_get_type($pos.type_name.clone()) { + // Verify that the types match + let pos = $crate::schema::position::TypeDefinitionPosition::from($pos.clone()); + if old_pos != pos { + Err($crate::FederationError::internal(format!( + "found different type when upserting: expected {:?} found {:?}", + pos, old_pos + ))) + } else { + Ok(()) + } + } else { + $pos.pre_insert($schema) + } + }}; +} + +/// Try to insert into a schema, ignoring the operation if the type already exists +/// and matches the existing type +macro_rules! try_insert { + ($schema:expr, $pos:expr, $def:expr) => {{ + if let Some(old_pos) = $schema.try_get_type($pos.type_name.clone()) { + // Verify that the types match + let pos = $crate::schema::position::TypeDefinitionPosition::from($pos.clone()); + if old_pos != pos { + Err($crate::FederationError::internal(format!( + "found different type when upserting: expected {:?} found {:?}", + pos, old_pos + ))) + } else { + Ok(()) + } + } else { + $pos.insert($schema, $def) + } + }}; +} +pub(crate) use try_insert; +pub(crate) use try_pre_insert; + +/// Visitor for arbitrary field types. +/// +/// Any type of interest that should be viewed when traversing the tree-like structure +/// defined by [GroupVisitor] should implement this trait. +pub(crate) trait FieldVisitor: Sized { + type Error; + + /// Visit a field + fn visit(&mut self, field: Field) -> Result<(), Self::Error>; +} + +/// Visitor for arbitrary tree-like structures where nodes can also have children +/// +/// This trait treats all nodes in the graph as Fields, checking if a Field is also +/// a group for handling children. Visiting order is depth-first. +pub(crate) trait GroupVisitor +where + Self: FieldVisitor, + Field: Clone, +{ + /// Try to get a group from a field, returning None if the field is not a group + fn try_get_group_for_field( + &self, + field: &Field, + ) -> Result, >::Error>; + + /// Enter a subselection group + /// Note: You can assume that the field corresponding to this + /// group will be visited first. + fn enter_group( + &mut self, + group: &Group, + ) -> Result, >::Error>; + + /// Exit a subselection group + /// Note: You can assume that the named selection corresponding to this + /// group will be visited and entered first. + fn exit_group(&mut self) -> Result<(), >::Error>; + + /// Walk through the `Group`, visiting each output key. If at any point, one of the + /// visitor methods returns an error, then the walk will be stopped and the error will be + /// returned. + fn walk(mut self, entry: Group) -> Result<(), >::Error> { + // Start visiting each of the fields + let mut to_visit = + VecDeque::from_iter(self.enter_group(&entry)?.into_iter().map(|n| (0i32, n))); + let mut current_depth = 0; + while let Some((depth, next)) = to_visit.pop_front() { + for _ in depth..current_depth { + self.exit_group()?; + } + current_depth = depth; + + self.visit(next.clone())?; + + // If we have a named selection that has a subselection, then we want to + // make sure that we visit the children before all other siblings. + // + // Note: We reverse here since we always push to the front. + if let Some(group) = self.try_get_group_for_field(&next)? { + current_depth += 1; + + let fields = self.enter_group(&group)?; + fields + .into_iter() + .rev() + .for_each(|s| to_visit.push_front((current_depth, s))); + } + } + + // Make sure that we exit until we are no longer nested + for _ in 0..=current_depth { + self.exit_group()?; + } + + Ok(()) + } +} + +/// A visitor for schema building. +/// +/// This implementation of the JSONSelection visitor walks a JSONSelection, +/// copying over all output types (and respective fields / sub types) as it goes +/// from a reference schema. +pub(crate) struct SchemaVisitor<'a, Group, GroupType> { + /// List of directives to not copy over into the target schema. + directive_deny_list: &'a IndexSet, + + /// The original schema used for sourcing all types / fields / directives / etc. + original_schema: &'a ValidFederationSchema, + + /// The target schema for adding all types. + to_schema: &'a mut FederationSchema, + + /// A stack of parent types used for fetching subtypes + /// + /// Each entry corresponds to a nested subselect in the JSONSelection. + type_stack: Vec<(Group, GroupType)>, +} + +impl<'a, Group, GroupType> SchemaVisitor<'a, Group, GroupType> { + pub(crate) fn new( + original_schema: &'a ValidFederationSchema, + to_schema: &'a mut FederationSchema, + directive_deny_list: &'a IndexSet, + ) -> SchemaVisitor<'a, Group, GroupType> { + SchemaVisitor { + directive_deny_list, + original_schema, + to_schema, + type_stack: Vec::new(), + } + } +} + +#[cfg(test)] +mod tests { + use insta::assert_snapshot; + use itertools::Itertools; + + use crate::error::FederationError; + use crate::sources::connect::expand::visitors::FieldVisitor; + use crate::sources::connect::expand::visitors::GroupVisitor; + use crate::sources::connect::json_selection::NamedSelection; + use crate::sources::connect::JSONSelection; + use crate::sources::connect::SubSelection; + + /// Visitor for tests. + /// + /// Each node visited is added, along with its depth. This is later printed + /// such that groups are indented based on depth. + struct TestVisitor<'a> { + depth_stack: Vec, + visited: &'a mut Vec<(usize, String)>, + } + + impl<'a> TestVisitor<'a> { + fn new(visited: &'a mut Vec<(usize, String)>) -> Self { + Self { + depth_stack: Vec::new(), + visited, + } + } + + fn last_depth(&self) -> Option { + self.depth_stack.last().copied() + } + } + + fn print_visited(visited: Vec<(usize, String)>) -> String { + let mut result = String::new(); + for (depth, visited) in visited { + result.push_str(&format!("{}{visited}\n", "| ".repeat(depth))); + } + + result + } + + impl FieldVisitor for TestVisitor<'_> { + type Error = FederationError; + + fn visit<'a>(&mut self, field: NamedSelection) -> Result<(), Self::Error> { + for name in field.names() { + self.visited + .push((self.last_depth().unwrap_or_default(), name.to_string())); + } + + Ok(()) + } + } + + impl GroupVisitor for TestVisitor<'_> { + fn try_get_group_for_field( + &self, + field: &NamedSelection, + ) -> Result, FederationError> { + Ok(field.next_subselection().cloned()) + } + + fn enter_group( + &mut self, + group: &SubSelection, + ) -> Result, FederationError> { + let next_depth = self.last_depth().map(|d| d + 1).unwrap_or(0); + self.depth_stack.push(next_depth); + Ok(group + .selections_iter() + .sorted_by_key(|s| s.names()) + .cloned() + .collect()) + } + + fn exit_group(&mut self) -> Result<(), FederationError> { + self.depth_stack.pop().unwrap(); + Ok(()) + } + } + + #[test] + fn it_iterates_over_empty_path() { + let mut visited = Vec::new(); + let visitor = TestVisitor::new(&mut visited); + let selection = JSONSelection::parse("").unwrap(); + + visitor + .walk(selection.next_subselection().cloned().unwrap()) + .unwrap(); + assert_snapshot!(print_visited(visited), @""); + } + + #[test] + fn it_iterates_over_simple_selection() { + let mut visited = Vec::new(); + let visitor = TestVisitor::new(&mut visited); + let selection = JSONSelection::parse("a b c d").unwrap(); + + visitor + .walk(selection.next_subselection().cloned().unwrap()) + .unwrap(); + assert_snapshot!(print_visited(visited), @r###" + a + b + c + d + "###); + } + + #[test] + fn it_iterates_over_aliased_selection() { + let mut visited = Vec::new(); + let visitor = TestVisitor::new(&mut visited); + let selection = JSONSelection::parse("a: one b: two c: three d: four").unwrap(); + + visitor + .walk(selection.next_subselection().cloned().unwrap()) + .unwrap(); + assert_snapshot!(print_visited(visited), @r###" + a + b + c + d + "###); + } + + #[test] + fn it_iterates_over_nested_selection() { + let mut visited = Vec::new(); + let visitor = TestVisitor::new(&mut visited); + let selection = JSONSelection::parse("a { b { c { d { e } } } } f").unwrap(); + + visitor + .walk(selection.next_subselection().cloned().unwrap()) + .unwrap(); + assert_snapshot!(print_visited(visited), @r###" + a + | b + | | c + | | | d + | | | | e + f + "###); + } + + #[test] + fn it_iterates_over_paths() { + let mut visited = Vec::new(); + let visitor = TestVisitor::new(&mut visited); + let selection = JSONSelection::parse( + "a + $.b { + c + $.d { + e + f: g.h { i } + } + } + j", + ) + .unwrap(); + + visitor + .walk(selection.next_subselection().cloned().unwrap()) + .unwrap(); + assert_snapshot!(print_visited(visited), @r###" + a + c + e + f + | i + j + "###); + } + + #[test] + fn it_iterates_over_complex_selection() { + let mut visited = Vec::new(); + let visitor = TestVisitor::new(&mut visited); + let selection = JSONSelection::parse( + "id + name + username + email + address { + street + suite + city + zipcode + geo { + lat + lng + } + } + phone + website + company { + name + catchPhrase + bs + }", + ) + .unwrap(); + + visitor + .walk(selection.next_subselection().cloned().unwrap()) + .unwrap(); + assert_snapshot!(print_visited(visited), @r###" + address + | city + | geo + | | lat + | | lng + | street + | suite + | zipcode + company + | bs + | catchPhrase + | name + email + id + name + phone + username + website + "###); + // let iter = selection.iter(); + // assert_debug_snapshot!(iter.collect_vec()); + } +} diff --git a/apollo-federation/src/sources/connect/expand/visitors/selection.rs b/apollo-federation/src/sources/connect/expand/visitors/selection.rs new file mode 100644 index 0000000000..18bc549f81 --- /dev/null +++ b/apollo-federation/src/sources/connect/expand/visitors/selection.rs @@ -0,0 +1,201 @@ +use std::ops::Deref; + +use apollo_compiler::ast::FieldDefinition; +use apollo_compiler::schema::Component; +use apollo_compiler::schema::EnumType; +use apollo_compiler::schema::ObjectType; +use apollo_compiler::schema::ScalarType; +use apollo_compiler::Name; +use apollo_compiler::Node; +use indexmap::IndexMap; +use itertools::Itertools; + +use super::filter_directives; +use super::try_insert; +use super::try_pre_insert; +use super::FieldVisitor; +use super::GroupVisitor; +use super::SchemaVisitor; +use crate::error::FederationError; +use crate::schema::position::ObjectTypeDefinitionPosition; +use crate::schema::position::TypeDefinitionPosition; +use crate::sources::connect::json_selection::NamedSelection; +use crate::sources::connect::SubSelection; + +/// Type alias for JSONSelection group info +/// +/// A JSONSelection has subselections which do not have a way to lookup the parent subselection +/// nor the field name corresponding to that selection, so we need to keep the matching schema object +/// type when validating selections against concrete types. +pub(crate) type JSONSelectionGroup = (ObjectTypeDefinitionPosition, SubSelection); + +impl FieldVisitor for SchemaVisitor<'_, ObjectTypeDefinitionPosition, ObjectType> { + type Error = FederationError; + + fn visit<'a>(&mut self, field: NamedSelection) -> Result<(), Self::Error> { + let (definition, r#type) = self.type_stack.last_mut().ok_or(FederationError::internal( + "tried to visit a field in a group not yet entered", + ))?; + + // Get the type of the field so we know how to visit it + for field_name in field.names() { + let field_name = Name::new(field_name)?; + let field = definition + .field(field_name.clone()) + .get(self.original_schema.schema())?; + let field_type = self + .original_schema + .get_type(field.ty.inner_named_type().clone())?; + let extended_field_type = field_type.get(self.original_schema.schema())?; + + // We only need to care about the type of the field if it isn't built-in + if !extended_field_type.is_built_in() { + match field_type { + TypeDefinitionPosition::Scalar(scalar) => { + let def = scalar.get(self.original_schema.schema())?; + let def = ScalarType { + description: def.description.clone(), + name: def.name.clone(), + directives: filter_directives( + self.directive_deny_list, + &def.directives, + ), + }; + + try_pre_insert!(self.to_schema, scalar)?; + try_insert!(self.to_schema, scalar, Node::new(def))?; + } + TypeDefinitionPosition::Enum(r#enum) => { + let def = r#enum.get(self.original_schema.schema())?; + let def = EnumType { + description: def.description.clone(), + name: def.name.clone(), + directives: filter_directives( + self.directive_deny_list, + &def.directives, + ), + values: def.values.clone(), + }; + + try_pre_insert!(self.to_schema, r#enum)?; + try_insert!(self.to_schema, r#enum, Node::new(def))?; + } + + // This will be handled by the rest of the visitor + TypeDefinitionPosition::Object(_) => {} + + // These will be handled later + TypeDefinitionPosition::Union(_) => { + return Err(FederationError::internal( + "unions are not yet handled for expansion", + )) + } + + // Anything else is not supported + TypeDefinitionPosition::InputObject(input) => { + return Err(FederationError::internal(format!( + "expected field to be a leaf or object type, found: input {}", + input.type_name, + ))) + } + TypeDefinitionPosition::Interface(interface) => { + return Err(FederationError::internal(format!( + "expected field to be a leaf or object type, found: interface {}", + interface.type_name, + ))) + } + }; + } + + // Add the field to the currently processing object, making sure to not overwrite if it already + // exists (and verify that we didn't change the type) + let new_field = FieldDefinition { + description: field.description.clone(), + name: field.name.clone(), + arguments: field.arguments.clone(), + ty: field.ty.clone(), + directives: filter_directives(self.directive_deny_list, &field.directives), + }; + if let Some(old_field) = r#type.fields.get(&field_name) { + if *old_field.deref().deref() != new_field { + return Err(FederationError::internal( + format!( "tried to write field to existing type, but field type was different. expected {new_field:?} found {old_field:?}"), + )); + } + } else { + r#type.fields.insert(field_name, Component::new(new_field)); + } + } + + Ok(()) + } +} + +impl GroupVisitor + for SchemaVisitor<'_, ObjectTypeDefinitionPosition, ObjectType> +{ + fn try_get_group_for_field( + &self, + field: &NamedSelection, + ) -> Result, FederationError> { + let (definition, _) = self.type_stack.last().ok_or(FederationError::internal( + "tried to get fields on a group not yet visited", + ))?; + + match field.names().first() { + Some(field_name) => { + let field_name = Name::new(field_name)?; + let field_type_name = definition + .field(field_name) + .get(self.original_schema.schema())? + .ty + .inner_named_type(); + + let TypeDefinitionPosition::Object(field_type) = + self.original_schema.get_type(field_type_name.clone())? + else { + return Ok(None); + }; + + Ok(field.next_subselection().cloned().map(|s| (field_type, s))) + } + None => Ok(None), + } + } + + fn enter_group( + &mut self, + (group_type, group): &JSONSelectionGroup, + ) -> Result, FederationError> { + try_pre_insert!(self.to_schema, group_type)?; + let def = group_type.get(self.original_schema.schema())?; + + let sub_type = ObjectType { + description: def.description.clone(), + name: def.name.clone(), + implements_interfaces: def.implements_interfaces.clone(), + directives: filter_directives(self.directive_deny_list, &def.directives), + fields: IndexMap::with_hasher(Default::default()), // Will be filled in by the `visit` method for each field + }; + + self.type_stack.push((group_type.clone(), sub_type)); + Ok(group + .selections_iter() + .sorted_by_key(|s| s.names()) + .cloned() + .collect()) + } + + fn exit_group(&mut self) -> Result<(), FederationError> { + let (definition, r#type) = self.type_stack.pop().ok_or(FederationError::internal( + "tried to exit a group not yet entered", + ))?; + + try_insert!(self.to_schema, definition, Node::new(r#type)) + } +} + +#[cfg(test)] +mod tests { + // TODO: Write these tests +} diff --git a/apollo-federation/src/sources/connect/header.rs b/apollo-federation/src/sources/connect/header.rs new file mode 100644 index 0000000000..99f85409e7 --- /dev/null +++ b/apollo-federation/src/sources/connect/header.rs @@ -0,0 +1,465 @@ +//! Headers defined in connectors `@source` and `@connect` directives. + +use std::ops::Deref; +use std::str::FromStr; + +use apollo_compiler::collections::IndexMap; +use serde_json_bytes::Value; + +use crate::sources::connect::string_template; +use crate::sources::connect::string_template::StringTemplate; + +#[derive(Clone, Debug)] +pub struct HeaderValue(StringTemplate); + +impl HeaderValue { + /// Evaluate expressions in the header value. + /// + /// # Errors + /// + /// Returns an error any expression can't be evaluated, or evaluates to an unsupported type. + pub fn interpolate(&self, vars: &IndexMap) -> Result { + let mut result = Vec::new(); + for part in &self.parts { + let value = part.interpolate(vars).map_err(|err| err.message)?; + result.extend(value.as_bytes()); + } + http::HeaderValue::from_bytes(&result).map_err(|e| e.to_string()) + } +} + +impl Deref for HeaderValue { + type Target = StringTemplate; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl FromStr for HeaderValue { + type Err = string_template::Error; + + fn from_str(s: &str) -> Result { + StringTemplate::parse(s, 0).map(HeaderValue) + } +} + +#[cfg(test)] +mod test_header_value_parse { + use insta::assert_debug_snapshot; + + use super::*; + + #[test] + fn simple_constant() { + assert_debug_snapshot!( + HeaderValue::from_str("text"), + @r###" + Ok( + HeaderValue( + StringTemplate { + parts: [ + Constant( + Constant { + value: "text", + location: 0..4, + }, + ), + ], + }, + ), + ) + "### + ); + } + #[test] + fn simple_expression() { + assert_debug_snapshot!( + HeaderValue::from_str("{$config.one}"), + @r###" + Ok( + HeaderValue( + StringTemplate { + parts: [ + Expression( + Expression { + expression: Path( + PathSelection { + path: WithRange { + node: Var( + WithRange { + node: $config, + range: Some( + 0..7, + ), + }, + WithRange { + node: Key( + WithRange { + node: Field( + "one", + ), + range: Some( + 8..11, + ), + }, + WithRange { + node: Empty, + range: Some( + 11..11, + ), + }, + ), + range: Some( + 7..11, + ), + }, + ), + range: Some( + 0..11, + ), + }, + }, + ), + location: 1..12, + }, + ), + ], + }, + ), + ) + "### + ); + } + #[test] + fn mixed_constant_and_expression() { + assert_debug_snapshot!( + HeaderValue::from_str("text{$config.one}text"), + @r###" + Ok( + HeaderValue( + StringTemplate { + parts: [ + Constant( + Constant { + value: "text", + location: 0..4, + }, + ), + Expression( + Expression { + expression: Path( + PathSelection { + path: WithRange { + node: Var( + WithRange { + node: $config, + range: Some( + 0..7, + ), + }, + WithRange { + node: Key( + WithRange { + node: Field( + "one", + ), + range: Some( + 8..11, + ), + }, + WithRange { + node: Empty, + range: Some( + 11..11, + ), + }, + ), + range: Some( + 7..11, + ), + }, + ), + range: Some( + 0..11, + ), + }, + }, + ), + location: 5..16, + }, + ), + Constant( + Constant { + value: "text", + location: 17..21, + }, + ), + ], + }, + ), + ) + "### + ); + } + + #[test] + fn expressions_with_nested_braces() { + assert_debug_snapshot!( + HeaderValue::from_str("const{$config.one { two { three } }}another-const"), + @r###" + Ok( + HeaderValue( + StringTemplate { + parts: [ + Constant( + Constant { + value: "const", + location: 0..5, + }, + ), + Expression( + Expression { + expression: Path( + PathSelection { + path: WithRange { + node: Var( + WithRange { + node: $config, + range: Some( + 0..7, + ), + }, + WithRange { + node: Key( + WithRange { + node: Field( + "one", + ), + range: Some( + 8..11, + ), + }, + WithRange { + node: Selection( + SubSelection { + selections: [ + Field( + None, + WithRange { + node: Field( + "two", + ), + range: Some( + 14..17, + ), + }, + Some( + SubSelection { + selections: [ + Field( + None, + WithRange { + node: Field( + "three", + ), + range: Some( + 20..25, + ), + }, + None, + ), + ], + range: Some( + 18..27, + ), + }, + ), + ), + ], + range: Some( + 12..29, + ), + }, + ), + range: Some( + 12..29, + ), + }, + ), + range: Some( + 7..29, + ), + }, + ), + range: Some( + 0..29, + ), + }, + }, + ), + location: 6..35, + }, + ), + Constant( + Constant { + value: "another-const", + location: 36..49, + }, + ), + ], + }, + ), + ) + "### + ); + } + + #[test] + fn invalid_header_values() { + assert_debug_snapshot!( + HeaderValue::from_str("\n"), + @r###" + Err( + Error { + message: "invalid value `\n`", + location: 0..1, + }, + ) + "### + ) + } + + #[test] + fn missing_closing_braces() { + assert_debug_snapshot!( + HeaderValue::from_str("{$config.one"), + @r###" + Err( + Error { + message: "Invalid expression, missing closing }", + location: 0..12, + }, + ) + "### + ) + } +} + +#[cfg(test)] +mod test_interpolate { + use insta::assert_debug_snapshot; + use pretty_assertions::assert_eq; + use serde_json_bytes::json; + + use super::*; + #[test] + fn test_interpolate() { + let value = HeaderValue::from_str("before {$config.one} after").unwrap(); + let mut vars = IndexMap::default(); + vars.insert("$config".to_string(), json!({"one": "foo"})); + assert_eq!( + value.interpolate(&vars), + Ok(http::HeaderValue::from_static("before foo after")) + ); + } + + #[test] + fn test_interpolate_missing_value() { + let value = HeaderValue::from_str("{$config.one}").unwrap(); + let vars = IndexMap::default(); + assert_eq!( + value.interpolate(&vars), + Ok(http::HeaderValue::from_static("")) + ); + } + + #[test] + fn test_interpolate_value_array() { + let header_value = HeaderValue::from_str("{$config.one}").unwrap(); + let mut vars = IndexMap::default(); + vars.insert("$config".to_string(), json!({"one": ["one", "two"]})); + assert_eq!( + header_value.interpolate(&vars), + Err("Expressions can't evaluate to arrays or objects.".to_string()) + ); + } + + #[test] + fn test_interpolate_value_bool() { + let header_value = HeaderValue::from_str("{$config.one}").unwrap(); + let mut vars = IndexMap::default(); + vars.insert("$config".to_string(), json!({"one": true})); + assert_eq!( + Ok(http::HeaderValue::from_static("true")), + header_value.interpolate(&vars) + ); + } + + #[test] + fn test_interpolate_value_null() { + let header_value = HeaderValue::from_str("{$config.one}").unwrap(); + let mut vars = IndexMap::default(); + vars.insert("$config".to_string(), json!({"one": null})); + assert_eq!( + Ok(http::HeaderValue::from_static("")), + header_value.interpolate(&vars) + ); + } + + #[test] + fn test_interpolate_value_number() { + let header_value = HeaderValue::from_str("{$config.one}").unwrap(); + let mut vars = IndexMap::default(); + vars.insert("$config".to_string(), json!({"one": 1})); + assert_eq!( + Ok(http::HeaderValue::from_static("1")), + header_value.interpolate(&vars) + ); + } + + #[test] + fn test_interpolate_value_object() { + let header_value = HeaderValue::from_str("{$config.one}").unwrap(); + let mut vars = IndexMap::default(); + vars.insert("$config".to_string(), json!({"one": {}})); + assert_debug_snapshot!( + header_value.interpolate(&vars), + @r###" + Err( + "Expressions can't evaluate to arrays or objects.", + ) + "### + ); + } + + #[test] + fn test_interpolate_value_string() { + let header_value = HeaderValue::from_str("{$config.one}").unwrap(); + let mut vars = IndexMap::default(); + vars.insert("$config".to_string(), json!({"one": "string"})); + assert_eq!( + Ok(http::HeaderValue::from_static("string")), + header_value.interpolate(&vars) + ); + } +} + +#[cfg(test)] +mod test_get_expressions { + use super::*; + + #[test] + fn test_variable_references() { + let value = + HeaderValue::from_str("a {$this.a.b.c} b {$args.a.b.c} c {$config.a.b.c}").unwrap(); + let references: Vec<_> = value + .expressions() + .map(|e| e.expression.to_string()) + .collect(); + assert_eq!( + references, + vec!["$this.a.b.c", "$args.a.b.c", "$config.a.b.c"] + ); + } +} diff --git a/apollo-federation/src/sources/connect/json_selection/README.md b/apollo-federation/src/sources/connect/json_selection/README.md index f194782395..12416ff44a 100644 --- a/apollo-federation/src/sources/connect/json_selection/README.md +++ b/apollo-federation/src/sources/connect/json_selection/README.md @@ -55,7 +55,16 @@ improvements, we should adhere to the following principles: avoided because it limits the developer's ability to subselect fields of the opaque `JSON` value in GraphQL operations. -3. Backwards compatibility should be maintained as we release new versions of +3. `JSONSelection` syntax may be _subsetted_ arbitrarily, either by generating a + reduced `JSONSelection` that serves the needs of a particular GraphQL + operation, or by skipping unneeded selections during `ApplyTo` execution. + When this subsetting happens, it would be highly undesirable for the behavior + of the remaining selections to change unexpectedly. Equivalently, but in the + other direction, `JSONSelection` syntax should always be _composable_, in the + sense that two `NamedSelection` items should continue to work as before when + used together in the same `SubSelection`. + +4. Backwards compatibility should be maintained as we release new versions of the `JSONSelection` syntax along with new versions of the (forthcoming) `@link(url: "https://specs.apollo.dev/connect/vX.Y")` specification. Wherever possible, we should only add new functionality, not remove or change existing @@ -72,34 +81,34 @@ worry if it doesn't seem helpful yet, as every rule will be explained in detail below. ```ebnf -JSONSelection ::= NakedSubSelection | PathSelection -NakedSubSelection ::= NamedSelection* StarSelection? -SubSelection ::= "{" NakedSubSelection "}" -NamedSelection ::= NamedPathSelection | NamedFieldSelection | NamedQuotedSelection | NamedGroupSelection +JSONSelection ::= PathSelection | NamedSelection* +SubSelection ::= "{" NamedSelection* "}" +NamedSelection ::= NamedPathSelection | PathWithSubSelection | NamedFieldSelection | NamedGroupSelection NamedPathSelection ::= Alias PathSelection -NamedFieldSelection ::= Alias? Identifier SubSelection? -NamedQuotedSelection ::= Alias StringLiteral SubSelection? +NamedFieldSelection ::= Alias? Key SubSelection? NamedGroupSelection ::= Alias SubSelection -Alias ::= Identifier ":" -PathSelection ::= (VarPath | KeyPath) SubSelection? +Alias ::= Key ":" +Path ::= VarPath | KeyPath | AtPath | ExprPath +PathSelection ::= Path SubSelection? +PathWithSubSelection ::= Path SubSelection VarPath ::= "$" (NO_SPACE Identifier)? PathStep* KeyPath ::= Key PathStep+ +AtPath ::= "@" PathStep* +ExprPath ::= "$(" LitExpr ")" PathStep* PathStep ::= "." Key | "->" Identifier MethodArgs? -Key ::= Identifier | StringLiteral +Key ::= Identifier | LitString Identifier ::= [a-zA-Z_] NO_SPACE [0-9a-zA-Z_]* -StringLiteral ::= "'" ("\\'" | [^'])* "'" | '"' ('\\"' | [^"])* '"' -MethodArgs ::= "(" (JSLiteral ("," JSLiteral)*)? ")" -JSLiteral ::= JSPrimitive | JSObject | JSArray | PathSelection -JSPrimitive ::= StringLiteral | JSNumber | "true" | "false" | "null" -JSNumber ::= "-"? (UnsignedInt ("." [0-9]*)? | "." [0-9]+) -UnsignedInt ::= "0" | [1-9] NO_SPACE [0-9]* -JSObject ::= "{" (JSProperty ("," JSProperty)*)? "}" -JSProperty ::= Key ":" JSLiteral -JSArray ::= "[" (JSLiteral ("," JSLiteral)*)? "]" -StarSelection ::= Alias? "*" SubSelection? +MethodArgs ::= "(" (LitExpr ("," LitExpr)* ","?)? ")" +LitExpr ::= LitPrimitive | LitObject | LitArray | PathSelection +LitPrimitive ::= LitString | LitNumber | "true" | "false" | "null" +LitString ::= "'" ("\\'" | [^'])* "'" | '"' ('\\"' | [^"])* '"' +LitNumber ::= "-"? ([0-9]+ ("." [0-9]*)? | "." [0-9]+) +LitObject ::= "{" (LitProperty ("," LitProperty)* ","?)? "}" +LitProperty ::= Key ":" LitExpr +LitArray ::= "[" (LitExpr ("," LitExpr)* ","?)? "]" NO_SPACE ::= !SpacesOrComments SpacesOrComments ::= (Spaces | Comment)+ -Spaces ::= (" " | "\t" | "\r" | "\n")+ +Spaces ::= ("⎵" | "\t" | "\r" | "\n")+ Comment ::= "#" [^\n]* ``` @@ -153,13 +162,12 @@ in a few key places: ```ebnf VarPath ::= "$" (NO_SPACE Identifier)? PathStep* Identifier ::= [a-zA-Z_] NO_SPACE [0-9a-zA-Z_]* -UnsignedInt ::= "0" | [1-9] NO_SPACE [0-9]* ``` These rules mean the `$` of a `$variable` cannot be separated from the identifier part (so `$ var` is invalid), and the first character of a -multi-character `Identifier` or `UnsignedInt` must not be separated from the -remaining characters. +multi-character `Identifier` must not be separated from the remaining +characters. Make sure you use `spaces_or_comments` generously when modifying or adding to the grammar implementation, or parsing may fail in cryptic ways when the input @@ -170,7 +178,7 @@ contains seemingly harmless whitespace or comment characters. Since the `JSONSelection` syntax is meant to be embedded within GraphQL string literals, and GraphQL shares the same `'...'` and `"..."` string literal syntax as `JSONSelection`, it can be visually confusing to embed a `JSONSelection` -string literal (denoted by the `StringLiteral` non-terminal) within a GraphQL +string literal (denoted by the `LitString` non-terminal) within a GraphQL string. Fortunately, GraphQL also supports multi-line string literals, delimited by @@ -211,39 +219,24 @@ feel free to take your time and enjoy the journey. The `JSONSelection` non-terminal is the top-level entry point for the grammar, and appears nowhere else within the rest of the grammar. It can be either a -`NakedSubSelection` (for selecting multiple named items) or a `PathSelection` -(for selecting a single anonymous value from a given path). When the -`PathSelection` option is chosen at this level, the entire `JSONSelection` must -be that single path, without any other named selections. +`PathSelection` (for selecting a single anonymous value from a given path) or a +`NamedSelection*` (for selecting zero or more named items). -### `NakedSubSelection ::=` - -![NakedSubSelection](./grammar/NakedSubSelection.svg) - -A `NakedSubSelection` is a `SubSelection` without the surrounding `{` and `}` -braces. It can appear at the top level of a `JSONSelection`, but otherwise -appears only as part of the `SubSelection` rule, meaning it must have braces -everywhere except at the top level. - -Because a `NakedSubSelection` can contain any number of `NamedSelection` items -(including zero), and may have no `StarSelection`, it's possible for the -`NakedSelection` to be fully empty. In these unusual cases, whitespace and -comments are still allowed, and the result of the selection will always be an -empty object. - -In the Rust implementation, there is no dedicated `NakedSubSelection` struct, as -we use the `SubSelection` struct to represent the meaningful contents of the -selection, regardless of whether it has braces. The `NakedSubSelection` -non-terminal is just a grammatical convenience, to avoid repetition between -`JSONSelection` and `SubSelection`. +When the `PathSelection` syntax is chosen at this level, and the path does not +have a trailing `SubSelection` (which ensures the result is an object with +statically known properties), the entire `JSONSelection` must be that single +path, without any other named selections. If the `PathSelection` does have a +trailing `SubSelection`, it may be mixed together with other named selections, +though in that case it will be parsed as a `PathWithSubSelection` within a +`SubSelection`, instead of a standalone `PathSelection`. ### `SubSelection ::=` ![SubSelection](./grammar/SubSelection.svg) -A `SubSelection` is a `NakedSubSelection` surrounded by `{` and `}`, and is used -to select specific properties from the preceding object, much like a nested -selection set in a GraphQL operation. +A `SubSelection` is a sequence of zero or more `NamedSelection` items surrounded +by `{` and `}`, and is used to select specific properties from the preceding +object, much like a nested selection set in a GraphQL operation. Note that `SubSelection` may appear recursively within itself, as part of one of the various `NamedSelection` rules. This recursion allows for arbitrarily deep @@ -253,9 +246,9 @@ nesting of selections, which is necessary to handle complex JSON structures. ![NamedSelection](./grammar/NamedSelection.svg) -Every possible production of the `NamedSelection` non-terminal corresponds to a -named property in the output object, though each one obtains its value from the -input object in a slightly different way. +Every production of the `NamedSelection` non-terminal adds named properties to +the output object, though they obtain their properties/values from the input +object in different ways. ### `NamedPathSelection ::=` @@ -263,65 +256,64 @@ input object in a slightly different way. Since `PathSelection` returns an anonymous value extracted from the given path, if you want to use a `PathSelection` alongside other `NamedSelection` items, you -have to prefix it with an `Alias`, turning it into a `NamedPathSelection`. +can either prefix it with an `Alias` or ensure the path has a trailing +`SubSelection` whose output fields will be merged into the larger selection set. + +For example, the `abc:` alias in this example causes the `{ a b c }` object +selected from `some.nested.path` to be nested under an `abc` output key: + +```graphql +id +name +abc: some.nested.path { a b c } +``` + +This selection produces an output object with keys `id`, `name`, and `abc`, +where `abc` is an object with keys `a`, `b`, and `c`. -For example, you cannot omit the `pathName:` alias in the following -`NakedSubSelection`, because `some.nested.path` has no output name by itself: +The `Alias`-free version is useful when you want to merge the output fields of a +path selection as siblings of other fields in a larger selection set: ```graphql -position { x y } -pathName: some.nested.path { a b c } -scalarField +id +name +some.nested.path { a b c } ``` -The ordering of alternatives in the `NamedSelection` rule is important, so the -`NamedPathSelection` alternative can be considered before `NamedFieldSelection` -and `NamedQuotedSelection`, because a `NamedPathSelection` such as `pathName: -some.nested.path` has a prefix that looks like a `NamedFieldSelection`: -`pathName: some`, causing an error when the parser encounters the remaining -`.nested.path` text. Some parsers would resolve this ambiguity by forbidding `.` -in the lookahead for `Named{Field,Quoted}Selection`, but negative lookahead is -tricky for this parser (see similar discussion regarding `NO_SPACE`), so instead -we greedily parse `NamedPathSelection` first, when possible, since that ensures -the whole path will be consumed. +This produces an output object with keys `id`, `name`, `a`, `b`, and `c`, all at +the same level, rather than grouping them under the `abc` alias`. + +#### Related syntax: `PathWithSubSelection` + +You can also flatten the `{ a b c }` properties to the top level by omitting the +alias, but this syntax parses as a `PathWithSubSelection` instead of a +`NamedPathSelection`: + +```graphql +id +name +some.nested.path { a b c } +``` + +This selection produces an output object with keys `id`, `name`, `a`, `b`, and +`c`. Additionally, `some.nested.path` must evaluate to a single object, rather +than an array objects or a scalar value, which is not a limitation of the +`NamedPathSelection` syntax. ### `NamedFieldSelection ::=` ![NamedFieldSelection](./grammar/NamedFieldSelection.svg) The `NamedFieldSelection` non-terminal is the option most closely resembling -GraphQL field selections, where the field name must be an `Identifier`, may have -an `Alias`, and may have a `SubSelection` to select nested properties (which -requires the field's value to be an object rather than a scalar). +GraphQL field selections, where the field name must be `Key` (`Identifier` or +quoted string literal), may have an `Alias`, and may have a `SubSelection` to +select nested properties (assuming the field's value is an object). In practice, whitespace is often required to keep multiple consecutive `NamedFieldSelection` identifiers separate, but is not strictly necessary when there is no ambiguity, as when an identifier follows a preceding subselection: `a{b}c`. -### `NamedQuotedSelection ::=` - -![NamedQuotedSelection](./grammar/NamedQuotedSelection.svg) - -Since arbitrary JSON objects can have properties that are not identifiers, we -need a version of `NamedFieldSelection` that allows for quoted property names as -opposed to identifiers. - -However, since our goal is always to produce an output that is safe for GraphQL -consumption, an `Alias` is strictly required in this case, and it must be a -valid GraphQL `Identifier`: - -```graphql -first -second: "second property" { x y z } -third { a b } -``` - -Besides extracting the `first` and `third` fields in typical GraphQL fashion, -this selection extracts the `second property` field as `second`, subselecting -`x`, `y`, and `z` from the extracted object. The final object will have the -properties `first`, `second`, and `third`. - ### `NamedGroupSelection ::=` ![NamedGroupSelection](./grammar/NamedGroupSelection.svg) @@ -370,22 +362,29 @@ Analogous to a GraphQL alias, the `Alias` syntax allows for renaming properties from the input JSON to match the desired output shape. In addition to renaming, `Alias` can provide names to otherwise anonymous -structures, such as those selected by `PathSelection`, `NamedGroupSelection`, or -`StarSelection` syntax. +structures, such as those selected by `PathSelection` or `NamedGroupSelection`. + +### `Path ::=` + +![Path](./grammar/Path.svg) -Because we always want to generate GraphQL-safe output properties, an `Alias` -must be a valid GraphQL identifier, rather than a quoted string. +A `Path` is a `VarPath`, `KeyPath`, `AtPath`, or `ExprPath`, which forms the +prefix of both `PathSelection` and `PathWithSubSelection`. + +In the Rust implementation, there is no separate `Path` struct, as we represent +both `PathSelection` and `PathWithSubSelection` using the `PathSelection` struct +and `PathList` enum. The `Path` non-terminal is just a grammatical convenience, +to avoid repetition between `PathSelection` and `PathWithSubSelection`. ### `PathSelection ::=` ![PathSelection](./grammar/PathSelection.svg) -A `PathSelection` is a `VarPath` or `KeyPath` followed by an optional -`SubSelection`. The purpose of a `PathSelection` is to extract a single -anonymous value from the input JSON, without preserving the nested structure of -the keys along the path. +A `PathSelection` is a `Path` followed by an optional `SubSelection`. The +purpose of a `PathSelection` is to extract a single anonymous value from the +input JSON, without preserving the nested structure of the keys along the path. -Since properties along the path may be either `Identifier` or `StringLiteral` +Since properties along the path may be either `Identifier` or `LitString` values, you are not limited to selecting only properties that are valid GraphQL field names, e.g. `myID: people."Ben Newman".id`. This is a slight departure from JavaScript syntax, which would use `people["Ben Newman"].id` to achieve the @@ -408,7 +407,7 @@ type Query { ``` If you need to select other named properties, you can still use a -`PathSelection` as part of a `NakedSubSelection`, as long as you give it an +`PathSelection` within a `NamedSelection*` sequence, as long as you give it an `Alias`: ```graphql @@ -425,6 +424,43 @@ type Query { } ``` +### `PathWithSubSelection ::=` + +![PathWithSubSelection](./grammar/PathWithSubSelection.svg) + +Although you can precede a `PathSelection` with an `Alias` to make it a +`NamedPathSelection`, this syntax has the effect of grouping the value of the +path under a single output key. + +If you want to select multiple properties from the same object-valued `Path`, +you can use a `PathWithSubSelection`, which does not require an `Alias` but does +require a trailing `SubSelection` to specify which properties to select: + +```graphql +id +created +model + +# The { role content } SubSelection is mandatory so the output keys +# can be statically determined: +choices->first.message { role content } + +# Multiple PathWithSubSelections are allowed in the same SubSelection: +choices->last.message { lastContent: content } +``` + +This selection results in an output object with the keys `id`, `created`, +`model`, `role`, `content`, and `lastContent` all at the top level. + +Since the final `PathWithSubSelection` selects only one property, it is +equivalent to `lastContent: choices->last.message.content`, which may be +slightly easier to read, since the output property appears first. + +As with `PathSelection`, if `choices->first.message` happens not to select an +object, `choices->first.message { role content }` will result in a runtime +error, because the `role` and `content` properties cannot be selected from a +non-object value. + ### `VarPath ::=` ![VarPath](./grammar/VarPath.svg) @@ -472,8 +508,7 @@ type User @key(fields: "id") { ``` In addition to variables like `$this` and `$args`, a special `$` variable is -always bound to the current value being processed, which allows you to transform -input data that looks like this +always bound to the value received by the closest enclosing `SubSelection`, which allows you to transform input data that looks like this ```json { @@ -544,7 +579,7 @@ $.data { id name } This will produce a single object with `id` and `name` fields, without the enclosing `data` property. Equivalently, you could manually unroll this example -to the following `NakedSubSelection`: +to the following `NamedSelection*` sequence: ```graphql id: data.id @@ -554,11 +589,108 @@ name: data.name In this case, the `$.` is no longer necessary because `data.id` and `data.name` are unambiguously `KeyPath` selections. -> For backwards compatibility with earlier versions of the `JSONSelection` -syntax that did not support the `$` variable, you can also use a leading `.` -character (so `.data { id name }`, or even `.data.id` or `.data.name`) to mean -the same thing as `$.`, but this is no longer recommended, since `.data` is easy -to mistype and misread, compared to `$.data`. +### `AtPath ::=` + +![AtPath](./grammar/AtPath.svg) + +Similar to the special `$` variable, the `@` character always represents the +current value being processed, which is often equal to `$`, but may differ from +the `$` variable when `@` is used within the arguments of `->` methods. + +For example, when you want to compute the logical conjunction of several +properties of the current object, you can keep using `$` with different property +selections: + +```graphql +all: $.first->and($.second)->and($.third) +``` + +If the `$` variable were rebound to the input value received by the `->and` +method, this style of method chaining would not work, because the `$.second` +expression would attempt to select a `second` property from the value of +`$.first`. Instead, the `$` remains bound to the same value received by the +closest enclosing `{...}` selection set, or the root value when used at the top +level of a `JSONSelection`. + +The `@` character becomes useful when you need to refer to the input value +received by a `->` method, as when using the `->echo` method to wrap a given +input value: + +```graphql +wrapped: field->echo({ fieldValue: @ }) +children: parent->echo([@.child1, @.child2, @.child3]) +``` + +The `->map` method has the special ability to apply its argument to each element +of its input array, so `@` will take on the value of each of those elements, +rather than referring to the array itself: + +```graphql +doubled: numbers->map({ value: @->mul(2) }) +types: values->map(@->typeof) +``` + +This special behavior of `@` within `->map` is available to any method +implementation, since method arguments are not evaluated before calling the +method, but are passed in as expressions that the method may choose to evaluate +(or even repeatedly reevaluate) however it chooses. + +### `ExprPath ::=` + +![ExprPath](./grammar/ExprPath.svg) + +Another syntax for beginning a `PathSelection` is the `ExprPath` rule, which is +a `LitExpr` enclosed by `$(...)`, followed by zero or more `PathStep` items. + +This syntax is especially useful for embedding literal values, allowing + +```graphql +__typename: $("Product") +condition: $(true) + +# Probably incorrect because "Product" and true parse as field names: +# __typename: "Product" +# condition: true + +# Best alternative option without ExprPath: +# __typename: $->echo("Product") +# condition: $->echo(true) +``` + +In addition to embedding a single value, this syntax also makes it easier to use +a literal expression as the input value for a `.key` or `->method` application, +as in + +```graphql +alphabetSlice: $("abcdefghijklmnopqrstuvwxyz")->slice($args.start, $args.end) + +# Instead of using $->echo(...): +# alphabetSlice: $->echo("abcdefghijklmnopqrstuvwxyz")->slice($args.start, $args.end) +``` + +The `->echo` method is still useful when you want to do something with the input +value (which is bound to `@` within the echoed expression), rather than ignoring +the input value (using `@` nowhere in the expression). + +The `$(...)` syntax can be useful within a `LitExpr` as well: + +```graphql +# $(-1) needs wrapping in order to apply the ->mul method +suffix: results.slice($(-1)->mul($args.suffixLength)) + +# Instead of something like this: +# suffix: results.slice($->echo(-1)->mul($args.suffixLength)) +``` + +In fairness, due to the commutavity of multiplication, this particular case +could have been written as `suffix: results.slice($args.suffixLength->mul(-1))`, +but not all methods allow reversing the input and arguments so easily, and this +syntax works in part because it still parenthesizes the `-1` literal value, +forcing `LitExpr` parsing, much like the new `ExprPath` syntax. + +When you don't need to apply a `.key` or `->method` to a literal value within a +`LitExpr`, you do not need to wrap it with `$(...)`, so the `ExprPath` syntax is +relatively uncommon within `LitExpr` expressions. ### `PathStep ::=` @@ -567,32 +699,93 @@ to mistype and misread, compared to `$.data`. A `PathStep` is a single step along a `VarPath` or `KeyPath`, which can either select a nested key using `.` or invoke a method using `->`. -Keys selected using `.` can be either `Identifier` or `StringLiteral` names, but +Keys selected using `.` can be either `Identifier` or `LitString` names, but method names invoked using `->` must be `Identifier` names, and must be registered in the `JSONSelection` parser in order to be recognized. For the time being, only a fixed set of known methods are supported, though this list may grow and/or become user-configurable in the future: -> Full disclosure: even this list is still aspirational, but suggestive of the -> kinds of methods that are likely to be supported in the next version of the -> `JSONSelection` parser. - ```graphql -list->first { id name } -list->last.name -list->slice($args.start, $args.end) -list->reverse -some.value->times(2) -some.value->plus($addend) -some.value->minus(100) -some.value->div($divisor) -isDog: kind->eq("dog") -isNotCat: kind->neq("cat") -__typename: kind->match({ "dog": "Dog", "cat": "Cat" }) -decoded: utf8Bytes->decode("utf-8") -utf8Bytes: string->encode("utf-8") -encoded: bytes->encode("base64") +# The ->echo method returns its first input argument as-is, ignoring +# the input data. Useful for embedding literal values, as in +# $->echo("give me this string"), or wrapping the input value. +__typename: $->echo("Book") +wrapped: field->echo({ fieldValue: @ }) + +# Returns the type of the data as a string, e.g. "object", "array", +# "string", "number", "boolean", or "null". Note that `typeof null` is +# "object" in JavaScript but "null" for our purposes. +typeOfValue: value->typeof + +# When invoked against an array, ->map evaluates its first argument +# against each element of the array, binding the element values to `@`, +# and returns an array of the results. When invoked against a non-array, +# ->map evaluates its first argument against that value and returns the +# result without wrapping it in an array. +doubled: numbers->map(@->mul(2)) +types: values->map(@->typeof) + +# Returns true if the data is deeply equal to the first argument, false +# otherwise. Equality is solely value-based (all JSON), no references. +isObject: value->typeof->eq("object") + +# Takes any number of pairs [candidate, value], and returns value for +# the first candidate that equals the input data. If none of the +# pairs match, a runtime error is reported, but a single-element +# [] array as the final argument guarantees a default value. +__typename: kind->match( + ["dog", "Canine"], + ["cat", "Feline"], + ["Exotic"] +) + +# Like ->match, but expects the first element of each pair to evaluate +# to a boolean, returning the second element of the first pair whose +# first element is true. This makes providing a final catch-all case +# easy, since the last pair can be [true, ]. +__typename: kind->matchIf( + [@->eq("dog"), "Canine"], + [@->eq("cat"), "Feline"], + [true, "Exotic"] +) + +# Arithmetic methods, supporting both integers and floating point values, +# similar to JavaScript. +sum: $.a->add($.b)->add($.c) +difference: $.a->sub($.b)->sub($.c) +product: $.a->mul($.b, $.c) +quotient: $.a->div($.b) +remainder: $.a->mod($.b) + +# Array/string methods +first: list->first +last: list->last +index3: list->get(3) +secondToLast: list->get(-2) +slice: list->slice(0, 5) +substring: string->slice(2, 5) +arraySize: array->size +stringLength: string->size + +# Object methods +aValue: $->echo({ a: 123 })->get("a") +hasKey: object->has("key") +hasAB: object->has("a")->and(object->has("b")) +numberOfProperties: object->size +keys: object->keys +values: object->values +entries: object->entries +keysFromEntries: object->entries.key +valuesFromEntries: object->entries.value + +# Logical methods +negation: $.condition->not +bangBang: $.condition->not->not +disjunction: $.a->or($.b)->or($.c) +conjunction: $.a->and($.b, $.c) +aImpliesB: $.a->not->or($.b) +excludedMiddle: $.toBe->or($.toBe->not)->eq(true) ``` ### `MethodArgs ::=` @@ -600,8 +793,8 @@ encoded: bytes->encode("base64") ![MethodArgs](./grammar/MethodArgs.svg) When a `PathStep` invokes an `->operator` method, the method invocation may -optionally take a sequence of comma-separated `JSLiteral` arguments in -parentheses, as in `list->slice(0, 5)` or `kilometers: miles->times(1.60934)`. +optionally take a sequence of comma-separated `LitExpr` arguments in +parentheses, as in `list->slice(0, 5)` or `kilometers: miles->mul(1.60934)`. Methods do not have to take arguments, as in `list->first` or `list->last`, which is why `MethodArgs` is optional in `PathStep`. @@ -611,7 +804,7 @@ which is why `MethodArgs` is optional in `PathStep`. ![Key](./grammar/Key.svg) A property name occurring along a dotted `PathSelection`, either an `Identifier` -or a `StringLiteral`. +or a `LitString`. ### `Identifier ::=` @@ -624,9 +817,33 @@ In some languages, identifiers can include `$` characters, but `JSONSelection` syntax aims to match GraphQL grammar, which does not allow `$` in field names. Instead, the `$` is reserved for denoting variables in `VarPath` selections. -### `StringLiteral ::=` +### `LitExpr ::=` -![StringLiteral](./grammar/StringLiteral.svg) +![LitExpr](./grammar/LitExpr.svg) + +A `LitExpr` (short for _literal expression_) represents a JSON-like value that +can be passed inline as part of `MethodArgs`. + +The `LitExpr` mini-language diverges from JSON by allowing symbolic +`PathSelection` values (which may refer to variables or fields) in addition to +the usual JSON primitives. This allows `->` methods to be parameterized in +powerful ways, e.g. `page: list->slice(0, $limit)`. + +Also, as a minor syntactic convenience, `LitObject` literals can have +`Identifier` or `LitString` keys, whereas JSON objects can have only +double-quoted string literal keys. + +### `LitPrimitive ::=` + +![LitPrimitive](./grammar/LitPrimitive.svg) + +Analogous to a JSON primitive value, with the only differences being that +`LitNumber` does not currently support the exponential syntax, and `LitString` +values can be single-quoted as well as double-quoted. + +### `LitString ::=` + +![LitString](./grammar/LitString.svg) A string literal that can be single-quoted or double-quoted, and may contain any characters except the quote character that delimits the string. The backslash @@ -642,129 +859,41 @@ You can avoid most of the headaches of escaping by choosing your outer quote characters wisely. If your string contains many double quotes, use single quotes to delimit the string, and vice versa, as in JavaScript. -### `JSLiteral ::=` - -![JSLiteral](./grammar/JSLiteral.svg) - -A `JSLiteral` represents a JSON-like value that can be passed inline as part of -`MethodArgs`. - -The `JSLiteral` mini-language diverges from JSON by allowing symbolic -`PathSelection` values (which may refer to variables or fields) in addition to -the usual JSON primitives. This allows `->` methods to be parameterized in -powerful ways, e.g. `page: list->slice(0, $limit)`. - -Also, as a minor syntactic convenience, `JSObject` literals can have -`Identifier` or `StringLiteral` keys, whereas JSON objects can have only -double-quoted string literal keys. - -### `JSPrimitive ::=` - -![JSPrimitive](./grammar/JSPrimitive.svg) - -Analogous to a JSON primitive value, with the only differences being that -`JSNumber` does not currently support the exponential syntax, and -`StringLiteral` values can be single-quoted as well as double-quoted. - -### `JSNumber ::=` +### `LitNumber ::=` -![JSNumber](./grammar/JSNumber.svg) +![LitNumber](./grammar/LitNumber.svg) A numeric literal that is possibly negative and may contain a fractional component. The integer component is required unless a fractional component is present, and the fractional component can have zero digits when the integer component is present (as in `-123.`), but the fractional component must have at least one digit when there is no integer component, since `.` is not a valid -numeric literal by itself. Leading and trailing zeroes are essential for the -fractional component, but leading zeroes are disallowed for the integer -component, except when the integer component is exactly zero. - -### `UnsignedInt ::=` - -![UnsignedInt](./grammar/UnsignedInt.svg) +numeric literal by itself. -The integer component of a `JSNumber`, which must be either `0` or an integer -without any leading zeroes. +### `LitObject ::=` -### `JSObject ::=` +![LitObject](./grammar/LitObject.svg) -![JSObject](./grammar/JSObject.svg) - -A sequence of `JSProperty` items within curly braces, as in JavaScript. +A sequence of `LitProperty` items within curly braces, as in JavaScript. Trailing commas are not currently allowed, but could be supported in the future. -### `JSProperty ::=` +### `LitProperty ::=` -![JSProperty](./grammar/JSProperty.svg) +![LitProperty](./grammar/LitProperty.svg) -A key-value pair within a `JSObject`. Note that the `Key` may be either an -`Identifier` or a `StringLiteral`, as in JavaScript. This is a little different +A key-value pair within a `LitObject`. Note that the `Key` may be either an +`Identifier` or a `LitString`, as in JavaScript. This is a little different from JSON, which allows double-quoted strings only. -### `JSArray ::=` +### `LitArray ::=` -![JSArray](./grammar/JSArray.svg) +![LitArray](./grammar/LitArray.svg) -A list of `JSLiteral` items within square brackets, as in JavaScript. +A list of `LitExpr` items within square brackets, as in JavaScript. Trailing commas are not currently allowed, but could be supported in the future. -### `StarSelection ::=` - -![StarSelection](./grammar/StarSelection.svg) - -The `StarSelection` non-terminal is uncommon when working with GraphQL, since it -selects all remaining properties of an object, which can be difficult to -represent using static GraphQL types, without resorting to the catch-all `JSON` -scalar type. Still, a `StarSelection` can be useful for consuming JSON -dictionaries with dynamic keys, or for capturing unexpected properties for -debugging purposes. - -When used, a `StarSelection` must come after any `NamedSelection` items within a -given `NakedSubSelection`. - -A common use case for `StarSelection` is capturing all properties not otherwise -selected using a field called `allOtherFields`, which must have a generic `JSON` -type in the GraphQL schema: - -```graphql -knownField -anotherKnownField -allOtherFields: * -``` - -Note that `knownField` and `anotherKnownField` will not be included in the -`allOtherFields` output, since they are selected explicitly. In this sense, the -`*` functions a bit like object `...rest` syntax in JavaScript. - -If you happen to know these other fields all have certain properties, you can -restrict the `*` selection to just those properties: - -```graphql -knownField { id name } -allOtherFields: * { id } -``` - -Sometimes a REST API will return a dictionary result with an unknown set of -dynamic keys but values of some known type, such as a map of ISBN numbers to -`Book` objects: - -```graphql -booksByISBN: result.books { * { title author { name } } -``` - -Because the set of ISBN numbers is statically unknowable, the type of -`booksByISBN` would have to be `JSON` in the GraphQL schema, but it can still be -useful to select known properties from the `Book` objects within the -`result.books` dictionary, so you don't return more GraphQL data than necessary. - -The grammar technically allows a `StarSelection` with neither an `Alias` nor a -`SubSelection`, but this is not a useful construct from a GraphQL perspective, -since it provides no output fields that can be reliably typed by a GraphQL -schema. This form has some use cases when working with `JSONSelection` outside -of GraphQL, but they are not relevant here. - ### `NO_SPACE ::= !SpacesOrComments` The `NO_SPACE` non-terminal is used to enforce the absence of whitespace or diff --git a/apollo-federation/src/sources/connect/json_selection/apply_to.rs b/apollo-federation/src/sources/connect/json_selection/apply_to.rs index 5c76c91414..25b9838f41 100644 --- a/apollo-federation/src/sources/connect/json_selection/apply_to.rs +++ b/apollo-federation/src/sources/connect/json_selection/apply_to.rs @@ -1,151 +1,268 @@ /// ApplyTo is a trait for applying a JSONSelection to a JSON value, collecting /// any/all errors encountered in the process. use std::hash::Hash; -use std::hash::Hasher; use apollo_compiler::collections::IndexMap; use apollo_compiler::collections::IndexSet; -use itertools::Itertools; use serde_json_bytes::json; -use serde_json_bytes::Map; +use serde_json_bytes::Map as JSONMap; use serde_json_bytes::Value as JSON; +use shape::Shape; +use shape::ShapeCase; +use super::helpers::json_merge; use super::helpers::json_type_name; +use super::immutable::InputPath; +use super::known_var::KnownVariable; +use super::lit_expr::LitExpr; +use super::location::OffsetRange; +use super::location::Ranged; +use super::location::WithRange; +use super::methods::ArrowMethod; use super::parser::*; -pub trait ApplyTo { +pub(super) type VarsWithPathsMap<'a> = IndexMap)>; + +impl JSONSelection { // Applying a selection to a JSON value produces a new JSON value, along // with any/all errors encountered in the process. The value is represented // as an Option to allow for undefined/missing values (which JSON does not // explicitly support), which are distinct from null values (which it does // support). - fn apply_to(&self, data: &JSON) -> (Option, Vec) { + pub fn apply_to(&self, data: &JSON) -> (Option, Vec) { self.apply_with_vars(data, &IndexMap::default()) } - fn apply_with_vars( + pub fn apply_with_vars( &self, data: &JSON, vars: &IndexMap, ) -> (Option, Vec) { - let mut input_path = vec![]; // Using IndexSet over HashSet to preserve the order of the errors. let mut errors = IndexSet::default(); - let value = self.apply_to_path(data, vars, &mut input_path, &mut errors); + + let mut vars_with_paths: VarsWithPathsMap = IndexMap::default(); + for (var_name, var_data) in vars { + if let Some(known_var) = KnownVariable::from_str(var_name.as_str()) { + vars_with_paths.insert( + known_var, + (var_data, InputPath::empty().append(json!(var_name))), + ); + } else { + errors.insert(ApplyToError::new( + format!("Unknown variable {}", var_name), + vec![json!(var_name)], + None, + )); + } + } + // The $ variable initially refers to the root data value, but is + // rebound by nested selection sets to refer to the root value the + // selection set was applied to. + vars_with_paths.insert(KnownVariable::Dollar, (data, InputPath::empty())); + + let (value, apply_errors) = self.apply_to_path(data, &vars_with_paths, &InputPath::empty()); + + // Since errors is an IndexSet, this line effectively deduplicates the + // errors, in an attempt to make them less verbose. However, now that we + // include both path and range information in the errors, there's an + // argument to be made that errors can no longer be meaningfully + // deduplicated, so we might consider sticking with a Vec. + errors.extend(apply_errors); + (value, errors.into_iter().collect()) } + pub fn shape(&self) -> Shape { + self.compute_output_shape( + // If we don't know anything about the shape of the input data, we + // can represent the data symbolically using the $root variable + // shape. Subproperties needed from this shape will show up as + // subpaths like $root.books.4.isbn in the output shape. + // + // While we do not currently have a $root variable available as a + // KnownVariable during apply_to_path execution, we might consider + // adding it, since it would align with the way we process other + // variable shapes. For now, $root exists only as a shape name that + // we are inventing right here. + Shape::name("$root"), + // If we wanted to specify anything about the shape of the $root + // variable, we could define a shape for "$root" in this map. + &IndexMap::default(), + ) + } + + pub fn compute_output_shape( + &self, + input_shape: Shape, + named_var_shapes: &IndexMap<&str, Shape>, + ) -> Shape { + match self { + Self::Named(selection) => selection.compute_output_shape( + input_shape.clone(), + input_shape.clone(), + named_var_shapes, + ), + Self::Path(path_selection) => path_selection.compute_output_shape( + input_shape.clone(), + input_shape.clone(), + named_var_shapes, + ), + } + } +} + +pub(super) trait ApplyToInternal { // This is the trait method that should be implemented and called // recursively by the various JSONSelection types. fn apply_to_path( &self, data: &JSON, - vars: &IndexMap, - input_path: &mut Vec, - errors: &mut IndexSet, - ) -> Option; + vars: &VarsWithPathsMap, + input_path: &InputPath, + ) -> (Option, Vec); // When array is encountered, the Self selection will be applied to each // element of the array, producing a new array. fn apply_to_array( &self, data_array: &[JSON], - vars: &IndexMap, - input_path: &mut Vec, - errors: &mut IndexSet, - ) -> Option { + vars: &VarsWithPathsMap, + input_path: &InputPath, + ) -> (Option, Vec) { let mut output = Vec::with_capacity(data_array.len()); + let mut errors = Vec::new(); for (i, element) in data_array.iter().enumerate() { - input_path.push(JSON::Number(i.into())); - let value = self.apply_to_path(element, vars, input_path, errors); - input_path.pop(); + let input_path_with_index = input_path.append(json!(i)); + let (applied, apply_errors) = self.apply_to_path(element, vars, &input_path_with_index); + errors.extend(apply_errors); // When building an Object, we can simply omit missing properties // and report an error, but when building an Array, we need to // insert null values to preserve the original array indices/length. - output.push(value.unwrap_or(JSON::Null)); + output.push(applied.unwrap_or(JSON::Null)); } - Some(JSON::Array(output)) + (Some(JSON::Array(output)), errors) } + + /// Computes the static output shape produced by a JSONSelection, by + /// traversing the selection AST, recursively calling `compute_output_shape` + /// on the current data/variable shapes at each level. + fn compute_output_shape( + &self, + // Shape of the `@` variable, which typically changes with each + // recursive call to compute_output_shape. + input_shape: Shape, + // Shape of the `$` variable, which is bound to the closest enclosing + // subselection object, or the root data object if there is no enclosing + // subselection. + dollar_shape: Shape, + // Shapes of other named variables, with the variable name `String` + // including the initial `$` character. This map typically does not + // change during the compute_output_shape recursion, and so can be + // passed down by immutable reference. + named_var_shapes: &IndexMap<&str, Shape>, + ) -> Shape; } -#[derive(Debug, Eq, PartialEq, Clone)] -pub struct ApplyToError(JSON); - -impl Hash for ApplyToError { - fn hash(&self, hasher: &mut H) { - // Although serde_json::Value (aka JSON) does not implement the Hash - // trait, we can convert self.0 to a JSON string and hash that. To do - // this properly, we should ensure all object keys are serialized in - // lexicographic order before hashing, but the only object keys we use - // are "message" and "path", and they always appear in that order. - self.0.to_string().hash(hasher) - } +#[derive(Debug, Eq, PartialEq, Clone, Hash)] +pub struct ApplyToError { + message: String, + path: Vec, + range: OffsetRange, } impl ApplyToError { - fn new(message: &str, path: &[JSON]) -> Self { - Self(json!({ - "message": message, - "path": JSON::Array(path.to_vec()), - })) + pub(crate) fn new(message: String, path: Vec, range: OffsetRange) -> Self { + Self { + message, + path, + range, + } } // This macro is useful for tests, but it absolutely should never be used with // dynamic input at runtime, since it panics for any input that's not JSON. #[cfg(test)] - fn from_json(json: &JSON) -> Self { - if let JSON::Object(error) = json { - if let Some(JSON::String(message)) = error.get("message") { - if let Some(JSON::Array(path)) = error.get("path") { - if path - .iter() - .all(|element| matches!(element, JSON::String(_) | JSON::Number(_))) - { - // Instead of simply returning Self(json.clone()), we - // enforce that the "message" and "path" properties are - // always in that order, as promised in the comment in - // the hash method above. - return Self(json!({ - "message": message, - "path": path, - })); - } - } - } + pub(crate) fn from_json(json: &JSON) -> Self { + let error = json.as_object().unwrap(); + let message = error.get("message").unwrap().as_str().unwrap().to_string(); + let path = error.get("path").unwrap().as_array().unwrap().clone(); + let range = error.get("range").unwrap().as_array().unwrap(); + + Self { + message, + path, + range: if range.len() == 2 { + let start = range[0].as_u64().unwrap() as usize; + let end = range[1].as_u64().unwrap() as usize; + Some(start..end) + } else { + None + }, } - panic!("invalid ApplyToError JSON: {:?}", json); } - pub fn message(&self) -> Option<&str> { - self.0 - .as_object() - .and_then(|v| v.get("message")) - .and_then(|s| s.as_str()) + pub fn message(&self) -> &str { + self.message.as_str() + } + + pub fn path(&self) -> &[JSON] { + self.path.as_slice() + } + + pub fn range(&self) -> OffsetRange { + self.range.clone() + } +} + +// Rust doesn't allow implementing methods directly on tuples like +// (Option, Vec), so we define a trait to provide the +// methods we need, and implement the trait for the tuple in question. +pub(super) trait ApplyToResultMethods { + fn prepend_errors(self, errors: Vec) -> Self; + + fn and_then_collecting_errors( + self, + f: impl FnOnce(&JSON) -> (Option, Vec), + ) -> (Option, Vec); +} + +impl ApplyToResultMethods for (Option, Vec) { + // Intentionally taking ownership of self to avoid cloning, since we pretty + // much always use this method to replace the previous (value, errors) tuple + // before returning. + fn prepend_errors(self, mut errors: Vec) -> Self { + if errors.is_empty() { + self + } else { + let (value_opt, apply_errors) = self; + errors.extend(apply_errors); + (value_opt, errors) + } } - pub fn path(&self) -> Option { - self.0 - .as_object() - .and_then(|v| v.get("path")) - .and_then(|p| p.as_array()) - .map(|l| l.iter().filter_map(|v| v.as_str()).join(".")) + // A substitute for Option<_>::and_then that accumulates errors behind the + // scenes. I'm no Haskell programmer, but this feels monadic? ¯\_(ツ)_/¯ + fn and_then_collecting_errors( + self, + f: impl FnOnce(&JSON) -> (Option, Vec), + ) -> (Option, Vec) { + match self { + (Some(data), errors) => f(&data).prepend_errors(errors), + (None, errors) => (None, errors), + } } } -impl ApplyTo for JSONSelection { +impl ApplyToInternal for JSONSelection { fn apply_to_path( &self, data: &JSON, - vars: &IndexMap, - input_path: &mut Vec, - errors: &mut IndexSet, - ) -> Option { - if let JSON::Array(array) = data { - return self.apply_to_array(array, vars, input_path, errors); - } - + vars: &VarsWithPathsMap, + input_path: &InputPath, + ) -> (Option, Vec) { match self { // Because we represent a JSONSelection::Named as a SubSelection, we // can fully delegate apply_to_path to SubSelection::apply_to_path. @@ -153,300 +270,662 @@ impl ApplyTo for JSONSelection { // could still delegate to SubSelection::apply_to_path, but we would // need to create a temporary SubSelection to wrap the selections // Vec. - Self::Named(named_selections) => { - named_selections.apply_to_path(data, vars, input_path, errors) + Self::Named(named_selections) => named_selections.apply_to_path(data, vars, input_path), + Self::Path(path_selection) => path_selection.apply_to_path(data, vars, input_path), + } + } + + fn compute_output_shape( + &self, + input_shape: Shape, + dollar_shape: Shape, + named_var_shapes: &IndexMap<&str, Shape>, + ) -> Shape { + match self { + Self::Named(selection) => { + selection.compute_output_shape(input_shape, dollar_shape, named_var_shapes) } Self::Path(path_selection) => { - path_selection.apply_to_path(data, vars, input_path, errors) + path_selection.compute_output_shape(input_shape, dollar_shape, named_var_shapes) } } } } -impl ApplyTo for NamedSelection { +impl ApplyToInternal for NamedSelection { fn apply_to_path( &self, data: &JSON, - vars: &IndexMap, - input_path: &mut Vec, - errors: &mut IndexSet, - ) -> Option { - if let JSON::Array(array) = data { - return self.apply_to_array(array, vars, input_path, errors); - } + vars: &VarsWithPathsMap, + input_path: &InputPath, + ) -> (Option, Vec) { + let mut output: Option = None; + let mut errors = Vec::new(); - let mut output = Map::new(); - - #[rustfmt::skip] // cargo fmt butchers this closure's formatting - let mut field_quoted_helper = | - alias: Option<&Alias>, - key: Key, - selection: &Option, - input_path: &mut Vec, - | { - input_path.push(key.to_json()); - let name = key.as_string(); - if let Some(child) = data.get(name.clone()) { - let output_name = alias.map_or(&name, |alias| &alias.name); - if let Some(selection) = selection { - let value = selection.apply_to_path(child, vars, input_path, errors); - if let Some(value) = value { - output.insert(output_name.clone(), value); + match self { + Self::Field(alias, key, selection) => { + let input_path_with_key = input_path.append(key.to_json()); + let name = key.as_str(); + if let Some(child) = data.get(name) { + let output_name = alias.as_ref().map_or(name, |alias| alias.name()); + if let Some(selection) = selection { + let (value, apply_errors) = + selection.apply_to_path(child, vars, &input_path_with_key); + errors.extend(apply_errors); + if let Some(value) = value { + output = Some(json!({ output_name: value })); + } + } else { + output = Some(json!({ output_name: child.clone() })); } } else { - output.insert(output_name.clone(), child.clone()); + errors.push(ApplyToError::new( + format!( + "Property {} not found in {}", + key.dotted(), + json_type_name(data), + ), + input_path_with_key.to_vec(), + key.range(), + )); + } + } + Self::Path { + alias, + path, + inline, + } => { + let (value_opt, apply_errors) = path.apply_to_path(data, vars, input_path); + errors.extend(apply_errors); + + if let Some(alias) = alias { + // Handle the NamedPathSelection case. + if let Some(value) = value_opt { + output = Some(json!({ alias.name(): value })); + } + } else if *inline { + match value_opt { + Some(JSON::Object(map)) => { + output = Some(JSON::Object(map.clone())); + } + Some(JSON::Null) => { + output = Some(JSON::Null); + } + Some(value) => { + errors.push(ApplyToError::new( + format!("Expected object or null, not {}", json_type_name(&value)), + input_path.to_vec(), + path.range(), + )); + } + None => { + errors.push(ApplyToError::new( + "Expected object or null, not nothing".to_string(), + input_path.to_vec(), + path.range(), + )); + } + } + } else { + errors.push(ApplyToError::new( + "Named path must have an alias, a trailing subselection, or be inlined with ... and produce an object or null".to_string(), + input_path.to_vec(), + path.range(), + )); + } + } + Self::Group(alias, sub_selection) => { + let (value_opt, apply_errors) = sub_selection.apply_to_path(data, vars, input_path); + errors.extend(apply_errors); + if let Some(value) = value_opt { + output = Some(json!({ alias.name(): value })); } - } else { - errors.insert(ApplyToError::new( - format!( - "Property {} not found in {}", - key.dotted(), - json_type_name(data), - ).as_str(), - input_path, - )); } - input_path.pop(); }; + (output, errors) + } + + fn compute_output_shape( + &self, + input_shape: Shape, + dollar_shape: Shape, + named_var_shapes: &IndexMap<&str, Shape>, + ) -> Shape { + let mut output = Shape::empty_map(); + match self { - Self::Field(alias, name, selection) => { - field_quoted_helper( - alias.as_ref(), - Key::Field(name.clone()), - selection, - input_path, - ); - } - Self::Quoted(alias, name, selection) => { - field_quoted_helper( - Some(alias), - Key::Quoted(name.clone()), - selection, - input_path, + Self::Field(alias_opt, key, selection) => { + let output_key = alias_opt + .as_ref() + .map_or(key.as_str(), |alias| alias.name()); + let field_shape = dollar_shape.field(key.as_str()); + output.insert( + output_key.to_string(), + if let Some(selection) = selection { + selection.compute_output_shape(field_shape, dollar_shape, named_var_shapes) + } else { + field_shape + }, ); } - Self::Path(alias, path_selection) => { - let value = path_selection.apply_to_path(data, vars, input_path, errors); - if let Some(value) = value { - output.insert(alias.name.clone(), value); + Self::Path { alias, path, .. } => { + let path_shape = + path.compute_output_shape(input_shape, dollar_shape, named_var_shapes); + if let Some(alias) = alias { + output.insert(alias.name().to_string(), path_shape); + } else { + return path_shape; } } Self::Group(alias, sub_selection) => { - let value = sub_selection.apply_to_path(data, vars, input_path, errors); - if let Some(value) = value { - output.insert(alias.name.clone(), value); - } + output.insert( + alias.name().to_string(), + sub_selection.compute_output_shape(input_shape, dollar_shape, named_var_shapes), + ); } }; - Some(JSON::Object(output)) + Shape::object(output, Shape::none()) } } -impl ApplyTo for PathSelection { +impl ApplyToInternal for PathSelection { fn apply_to_path( &self, data: &JSON, - vars: &IndexMap, - input_path: &mut Vec, - errors: &mut IndexSet, - ) -> Option { - if let JSON::Array(array) = data { - return self.apply_to_array(array, vars, input_path, errors); + vars: &VarsWithPathsMap, + input_path: &InputPath, + ) -> (Option, Vec) { + match (self.path.as_ref(), vars.get(&KnownVariable::Dollar)) { + // If this is a KeyPath, instead of using data as given, we need to + // evaluate the path starting from the current value of $. To evaluate + // the KeyPath against data, prefix it with @. This logic supports + // method chaining like obj->has('a')->and(obj->has('b')), where both + // obj references are interpreted as $.obj. + (PathList::Key(_, _), Some((dollar_data, dollar_path))) => { + self.path.apply_to_path(dollar_data, vars, dollar_path) + } + + // If $ is undefined for some reason, fall back to using data... + // TODO: Since $ should never be undefined, we might want to + // guarantee its existence at compile time, somehow. + // (PathList::Key(_, _), None) => todo!(), + _ => self.path.apply_to_path(data, vars, input_path), } + } - match self { - Self::Var(var_name, tail) => { - if var_name == "$" { - // Because $ refers to the current value, we keep using - // input_path instead of creating a new var_path here. - tail.apply_to_path(data, vars, input_path, errors) - } else if let Some(var_data) = vars.get(var_name) { - let mut var_path = vec![json!(var_name)]; - tail.apply_to_path(var_data, vars, &mut var_path, errors) + fn compute_output_shape( + &self, + input_shape: Shape, + dollar_shape: Shape, + named_var_shapes: &IndexMap<&str, Shape>, + ) -> Shape { + match self.path.as_ref() { + PathList::Key(_, _) => { + // If this is a KeyPath, we need to evaluate the path starting + // from the current $ shape, so we pass dollar_shape as the data + // *and* dollar_shape to self.path.compute_output_shape. + self.path.compute_output_shape( + dollar_shape.clone(), + dollar_shape.clone(), + named_var_shapes, + ) + } + // If this is not a KeyPath, keep evaluating against input_shape. + // This logic parallels PathSelection::apply_to_path (above). + _ => self + .path + .compute_output_shape(input_shape, dollar_shape, named_var_shapes), + } + } +} + +impl ApplyToInternal for WithRange { + fn apply_to_path( + &self, + data: &JSON, + vars: &VarsWithPathsMap, + input_path: &InputPath, + ) -> (Option, Vec) { + match self.as_ref() { + PathList::Var(ranged_var_name, tail) => { + let var_name = ranged_var_name.as_ref(); + if var_name == &KnownVariable::AtSign { + // We represent @ as a variable name in PathList::Var, but + // it is never stored in the vars map, because it is always + // shorthand for the current data value. + tail.apply_to_path(data, vars, input_path) + } else if let Some((var_data, var_path)) = vars.get(var_name) { + // Variables are associated with a path, which is always + // just the variable name for named $variables other than $. + // For the special variable $, the path represents the + // sequence of keys from the root input data to the $ data. + tail.apply_to_path(var_data, vars, var_path) } else { - errors.insert(ApplyToError::new( - format!("Variable {} not found", var_name).as_str(), - &[json!(var_name)], - )); - None + ( + None, + vec![ApplyToError::new( + format!("Variable {} not found", var_name.as_str()), + input_path.to_vec(), + ranged_var_name.range(), + )], + ) } } - Self::Key(key, tail) => { - input_path.push(key.to_json()); + PathList::Key(key, tail) => { + if let JSON::Array(array) = data { + return self.apply_to_array(array, vars, input_path); + } + + let input_path_with_key = input_path.append(key.to_json()); if !matches!(data, JSON::Object(_)) { - errors.insert(ApplyToError::new( - format!( - "Property {} not found in {}", - key.dotted(), - json_type_name(data), - ) - .as_str(), - input_path, - )); - input_path.pop(); - return None; + return ( + None, + vec![ApplyToError::new( + format!( + "Property {} not found in {}", + key.dotted(), + json_type_name(data), + ), + input_path_with_key.to_vec(), + key.range(), + )], + ); } - let result = if let Some(child) = match key { - Key::Field(name) => data.get(name), - Key::Quoted(name) => data.get(name), - Key::Index(index) => data.get(index), - } { - tail.apply_to_path(child, vars, input_path, errors) + if let Some(child) = data.get(key.as_str()) { + tail.apply_to_path(child, vars, &input_path_with_key) } else { - errors.insert(ApplyToError::new( - format!( - "Property {} not found in {}", - key.dotted(), - json_type_name(data), - ) - .as_str(), - input_path, - )); - None - }; - - input_path.pop(); - - result + ( + None, + vec![ApplyToError::new( + format!( + "Property {} not found in {}", + key.dotted(), + json_type_name(data), + ), + input_path_with_key.to_vec(), + key.range(), + )], + ) + } } - Self::Selection(selection) => { - // If data is not an object here, this recursive apply_to_path - // call will handle the error. - selection.apply_to_path(data, vars, input_path, errors) + PathList::Expr(expr, tail) => expr + .apply_to_path(data, vars, input_path) + .and_then_collecting_errors(|value| tail.apply_to_path(value, vars, input_path)), + PathList::Method(method_name, method_args, tail) => { + let method_path = + input_path.append(JSON::String(format!("->{}", method_name.as_ref()).into())); + + if let Some(method) = ArrowMethod::lookup(method_name) { + method.apply( + method_name, + method_args.as_ref(), + data, + vars, + &method_path, + tail, + ) + } else { + ( + None, + vec![ApplyToError::new( + format!("Method ->{} not found", method_name.as_ref()), + method_path.to_vec(), + method_name.range(), + )], + ) + } } - Self::Empty => { + PathList::Selection(selection) => selection.apply_to_path(data, vars, input_path), + PathList::Empty => { // If data is not an object here, we want to preserve its value // without an error. - Some(data.clone()) + (Some(data.clone()), vec![]) } } } -} -impl ApplyTo for SubSelection { - fn apply_to_path( + fn compute_output_shape( &self, - data: &JSON, - vars: &IndexMap, - input_path: &mut Vec, - errors: &mut IndexSet, - ) -> Option { - if let JSON::Array(array) = data { - return self.apply_to_array(array, vars, input_path, errors); - } - - let (data_map, data_really_primitive) = match data { - JSON::Object(data_map) => (data_map.clone(), false), - _primitive => (Map::new(), true), - }; + input_shape: Shape, + dollar_shape: Shape, + named_var_shapes: &IndexMap<&str, Shape>, + ) -> Shape { + match self.as_ref() { + PathList::Var(ranged_var_name, tail) => { + let var_name = ranged_var_name.as_ref(); + let var_shape = if var_name == &KnownVariable::AtSign { + input_shape + } else if var_name == &KnownVariable::Dollar { + dollar_shape.clone() + } else if let Some(shape) = named_var_shapes.get(var_name.as_str()) { + shape.clone() + } else { + Shape::name(var_name.as_str()) + }; + tail.compute_output_shape(var_shape, dollar_shape, named_var_shapes) + } - let mut output = Map::new(); - let mut input_names = IndexSet::default(); + PathList::Key(key, rest) => { + // If this is the first key in the path, + // PathSelection::compute_output_shape will have set our + // input_shape equal to its dollar_shape, thereby ensuring that + // some.nested.path is equivalent to $.some.nested.path. + if input_shape.is_none() { + // Following WithRange::apply_to_path, we do not + // want to call rest.compute_output_shape recursively with + // an input data shape corresponding to missing data, though + // it might do the right thing. + return input_shape; + } - for named_selection in &self.selections { - let value = named_selection.apply_to_path(data, vars, input_path, errors); + if let ShapeCase::Array { prefix, tail } = input_shape.case() { + // Map rest.compute_output_shape over the prefix and rest + // elements of the array shape, so we don't have to map + // array shapes for the other PathList variants. + let mapped_prefix = prefix + .iter() + .map(|shape| { + if shape.is_none() { + shape.clone() + } else { + rest.compute_output_shape( + shape.field(key.as_str()), + dollar_shape.clone(), + named_var_shapes, + ) + } + }) + .collect::>(); + + let mapped_rest = if tail.is_none() { + tail.clone() + } else { + rest.compute_output_shape( + tail.field(key.as_str()), + dollar_shape.clone(), + named_var_shapes, + ) + }; - // If value is an object, extend output with its keys and their values. - if let Some(JSON::Object(key_and_value)) = value { - output.extend(key_and_value); + Shape::array(mapped_prefix, mapped_rest) + } else { + rest.compute_output_shape( + input_shape.field(key.as_str()), + dollar_shape.clone(), + named_var_shapes, + ) + } } - // If there is a star selection, we need to keep track of the - // *original* names of the fields that were explicitly selected, - // because we will need to omit them from what the * matches. - if self.star.is_some() { - match named_selection { - NamedSelection::Field(_, name, _) => { - input_names.insert(name.as_str()); - } - NamedSelection::Quoted(_, name, _) => { - input_names.insert(name.as_str()); - } - NamedSelection::Path(_, path_selection) => { - if let PathSelection::Key(key, _) = path_selection { - match key { - Key::Field(name) | Key::Quoted(name) => { - input_names.insert(name.as_str()); - } - // While Property::Index may be used to - // represent the input_path during apply_to_path - // when arrays are encountered, it will never be - // used to represent the parsed structure of any - // actual selection string, becase arrays are - // processed automatically/implicitly and their - // indices are never explicitly selected. This - // means the numeric Property::Index case cannot - // affect the keys selected by * selections, so - // input_names does not need updating here. - Key::Index(_) => {} - }; - } + PathList::Expr(expr, tail) => tail.compute_output_shape( + expr.compute_output_shape(input_shape, dollar_shape.clone(), named_var_shapes), + dollar_shape.clone(), + named_var_shapes, + ), + + PathList::Method(method_name, method_args, tail) => { + if input_shape.is_none() { + // Following WithRange::apply_to_path, we do not + // want to apply methods to missing input data. + return input_shape; + } + + if let Some(method) = ArrowMethod::lookup(method_name) { + let method_result_shape = if let ShapeCase::One(cases) = input_shape.case() { + Shape::one(cases.iter().map(|case| { + self.compute_output_shape( + case.clone(), + dollar_shape.clone(), + named_var_shapes, + ) + })) + } else { + method.shape( + method_name, + method_args.as_ref(), + input_shape, + dollar_shape.clone(), + named_var_shapes, + ) + }; + + if method_result_shape.is_none() { + method_result_shape.clone() + } else { + tail.compute_output_shape( + method_result_shape, + dollar_shape.clone(), + named_var_shapes, + ) } - // The contents of groups do not affect the keys matched by - // * selections in the parent object (outside the group). - NamedSelection::Group(_, _) => {} - }; + } else { + let message = format!("Method ->{} not found", method_name.as_str()); + Shape::error_with_range(message.as_str(), method_name.range()) + } + } + + PathList::Selection(selection) => { + selection.compute_output_shape(input_shape, dollar_shape, named_var_shapes) } + + PathList::Empty => input_shape, } + } +} - match &self.star { - // Aliased but not subselected, e.g. "a b c rest: *" - Some(StarSelection(Some(alias), None)) => { - let mut star_output = Map::new(); - for (key, value) in &data_map { - if !input_names.contains(key.as_str()) { - star_output.insert(key.clone(), value.clone()); +impl ApplyToInternal for WithRange { + fn apply_to_path( + &self, + data: &JSON, + vars: &VarsWithPathsMap, + input_path: &InputPath, + ) -> (Option, Vec) { + match self.as_ref() { + LitExpr::String(s) => (Some(JSON::String(s.clone().into())), vec![]), + LitExpr::Number(n) => (Some(JSON::Number(n.clone())), vec![]), + LitExpr::Bool(b) => (Some(JSON::Bool(*b)), vec![]), + LitExpr::Null => (Some(JSON::Null), vec![]), + LitExpr::Object(map) => { + let mut output = JSONMap::with_capacity(map.len()); + let mut errors = Vec::new(); + for (key, value) in map { + let (value_opt, apply_errors) = value.apply_to_path(data, vars, input_path); + errors.extend(apply_errors); + if let Some(value_json) = value_opt { + output.insert(key.as_str(), value_json); } } - output.insert(alias.name.clone(), JSON::Object(star_output)); + (Some(JSON::Object(output)), errors) } - // Aliased and subselected, e.g. "alias: * { hello }" - Some(StarSelection(Some(alias), Some(selection))) => { - let mut star_output = Map::new(); - for (key, value) in &data_map { - if !input_names.contains(key.as_str()) { - if let Some(selected) = - selection.apply_to_path(value, vars, input_path, errors) - { - star_output.insert(key.clone(), selected); - } - } + LitExpr::Array(vec) => { + let mut output = Vec::with_capacity(vec.len()); + let mut errors = Vec::new(); + for value in vec { + let (value_opt, apply_errors) = value.apply_to_path(data, vars, input_path); + errors.extend(apply_errors); + output.push(value_opt.unwrap_or(JSON::Null)); } - output.insert(alias.name.clone(), JSON::Object(star_output)); + (Some(JSON::Array(output)), errors) } - // Not aliased but subselected, e.g. "parent { * { hello } }" - Some(StarSelection(None, Some(selection))) => { - for (key, value) in &data_map { - if !input_names.contains(key.as_str()) { - if let Some(selected) = - selection.apply_to_path(value, vars, input_path, errors) - { - output.insert(key.clone(), selected); - } - } + LitExpr::Path(path) => path.apply_to_path(data, vars, input_path), + } + } + + fn compute_output_shape( + &self, + input_shape: Shape, + dollar_shape: Shape, + named_var_shapes: &IndexMap<&str, Shape>, + ) -> Shape { + match self.as_ref() { + LitExpr::Null => Shape::null(), + LitExpr::Bool(value) => Shape::bool_value(*value), + LitExpr::String(value) => Shape::string_value(value.as_str()), + + LitExpr::Number(value) => { + if let Some(n) = value.as_i64() { + Shape::int_value(n) + } else if value.is_f64() { + Shape::float() + } else { + Shape::error("Number neither Int nor Float") } } - // Neither aliased nor subselected, e.g. "parent { * }" or just "*" - Some(StarSelection(None, None)) => { - for (key, value) in &data_map { - if !input_names.contains(key.as_str()) { - output.insert(key.clone(), value.clone()); - } + + LitExpr::Object(map) => { + let mut fields = Shape::empty_map(); + for (key, value) in map { + fields.insert( + key.as_string(), + value.compute_output_shape( + input_shape.clone(), + dollar_shape.clone(), + named_var_shapes, + ), + ); + } + Shape::object(fields, Shape::none()) + } + + LitExpr::Array(vec) => { + let mut shapes = Vec::with_capacity(vec.len()); + for value in vec { + shapes.push(value.compute_output_shape( + input_shape.clone(), + dollar_shape.clone(), + named_var_shapes, + )); } + Shape::array(shapes, Shape::none()) + } + + LitExpr::Path(path) => { + path.compute_output_shape(input_shape, dollar_shape, named_var_shapes) } - // No * selection present, e.g. "parent { just some properties }" - None => {} + } + } +} + +impl ApplyToInternal for SubSelection { + fn apply_to_path( + &self, + data: &JSON, + vars: &VarsWithPathsMap, + input_path: &InputPath, + ) -> (Option, Vec) { + if let JSON::Array(array) = data { + return self.apply_to_array(array, vars, input_path); + } + + let vars: VarsWithPathsMap = { + let mut vars = vars.clone(); + vars.insert(KnownVariable::Dollar, (data, input_path.clone())); + vars }; - if data_really_primitive && output.is_empty() { - return Some(data.clone()); + let mut output = JSON::Object(JSONMap::new()); + let mut errors = Vec::new(); + + for named_selection in self.selections.iter() { + let (named_output_opt, apply_errors) = + named_selection.apply_to_path(data, &vars, input_path); + errors.extend(apply_errors); + + let (merged, merge_errors) = json_merge(Some(&output), named_output_opt.as_ref()); + + errors.extend( + merge_errors + .into_iter() + .map(|message| ApplyToError::new(message, input_path.to_vec(), self.range())), + ); + + if let Some(merged) = merged { + output = merged; + } + } + + if !matches!(data, JSON::Object(_)) { + let output_is_empty = match &output { + JSON::Object(map) => map.is_empty(), + _ => false, + }; + if output_is_empty { + // If data was a primitive value (neither array nor object), and + // no output properties were generated, return data as is, along + // with any errors that occurred. + return (Some(data.clone()), errors); + } + } + + (Some(output), errors) + } + + fn compute_output_shape( + &self, + input_shape: Shape, + _previous_dollar_shape: Shape, + named_var_shapes: &IndexMap<&str, Shape>, + ) -> Shape { + // Just as SubSelection::apply_to_path calls apply_to_array when data is + // an array, so compute_output_shape recursively computes the output + // shapes of each array element shape. + if let ShapeCase::Array { prefix, tail } = input_shape.case() { + let new_prefix = prefix + .iter() + .map(|shape| { + self.compute_output_shape(shape.clone(), shape.clone(), named_var_shapes) + }) + .collect::>(); + + let new_tail = if tail.is_none() { + tail.clone() + } else { + self.compute_output_shape(tail.clone(), tail.clone(), named_var_shapes) + }; + + return Shape::array(new_prefix, new_tail); + } + + // If the input shape is a named shape, it might end up being an array, + // so we need to hedge the output shape using a wildcard that maps over + // array elements. + let input_shape = input_shape.any_item(); + + // The SubSelection rebinds the $ variable to the selected input object, + // so we can ignore _previous_dollar_shape. + let dollar_shape = input_shape.clone(); + + // Build up the merged object shape using Shape::all to merge the + // individual named_selection object shapes. + let mut all_shape = Shape::empty_object(); + + for named_selection in self.selections.iter() { + // Simplifying as we go with Shape::all keeps all_shape relatively + // small in the common case when all named_selection items return an + // object shape, since those object shapes can all be merged + // together into one object. + all_shape = Shape::all([ + all_shape, + named_selection.compute_output_shape( + input_shape.clone(), + dollar_shape.clone(), + named_var_shapes, + ), + ]); + + // If any named_selection item returns null instead of an object, + // that nullifies the whole object and allows shape computation to + // bail out early. + if all_shape.is_null() { + break; + } } - Some(JSON::Object(output)) + all_shape } } @@ -488,10 +967,10 @@ mod tests { }), ); - check_ok(selection!(".nested.hello"), json!("world")); + check_ok(selection!("nested.hello"), json!("world")); check_ok(selection!("$.nested.hello"), json!("world")); - check_ok(selection!(".nested.world"), json!("hello")); + check_ok(selection!("nested.world"), json!("hello")); check_ok(selection!("$.nested.world"), json!("hello")); check_ok( @@ -528,7 +1007,7 @@ mod tests { ); check_ok( - selection!(".array { hello }"), + selection!("$.array { hello }"), json!([ { "hello": "world 0" }, { "hello": "world 1" }, @@ -537,7 +1016,7 @@ mod tests { ); check_ok( - selection!("worlds: .array.hello"), + selection!("worlds: array.hello"), json!({ "worlds": [ "world 0", @@ -559,7 +1038,7 @@ mod tests { ); check_ok( - selection!(".array.hello"), + selection!("array.hello"), json!(["world 0", "world 1", "world 2",]), ); @@ -569,7 +1048,7 @@ mod tests { ); check_ok( - selection!("nested grouped: { hello worlds: .array.hello }"), + selection!("nested grouped: { hello worlds: array.hello }"), json!({ "nested": { "hello": "world", @@ -606,290 +1085,138 @@ mod tests { } #[test] - fn test_apply_to_star_selections() { + fn test_apply_to_errors() { let data = json!({ - "englishAndGreekLetters": { - "a": { "en": "ay", "gr": "alpha" }, - "b": { "en": "bee", "gr": "beta" }, - "c": { "en": "see", "gr": "gamma" }, - "d": { "en": "dee", "gr": "delta" }, - "e": { "en": "ee", "gr": "epsilon" }, - "f": { "en": "eff", "gr": "phi" }, + "hello": "world", + "nested": { + "hello": 123, + "world": true, }, - "englishAndSpanishNumbers": [ - { "en": "one", "es": "uno" }, - { "en": "two", "es": "dos" }, - { "en": "three", "es": "tres" }, - { "en": "four", "es": "cuatro" }, - { "en": "five", "es": "cinco" }, - { "en": "six", "es": "seis" }, + "array": [ + { "hello": 1, "goodbye": "farewell" }, + { "hello": "two" }, + { "hello": 3.0, "smello": "yellow" }, ], - "asciiCharCodes": { - "A": 65, - "B": 66, - "C": 67, - "D": 68, - "E": 69, - "F": 70, - "G": 71, - }, - "books": { - "9780262533751": { - "title": "The Geometry of Meaning", - "author": "Peter Gärdenfors", - }, - "978-1492674313": { - "title": "P is for Pterodactyl: The Worst Alphabet Book Ever", - "author": "Raj Haldar", - }, - "9780262542456": { - "title": "A Biography of the Pixel", - "author": "Alvy Ray Smith", - }, - } }); - let check_ok = |selection: JSONSelection, expected_json: JSON| { - let (actual_json, errors) = selection.apply_to(&data); - assert_eq!(actual_json, Some(expected_json)); - assert_eq!(errors, vec![]); - }; + assert_eq!( + selection!("hello").apply_to(&data), + (Some(json!({"hello": "world"})), vec![],) + ); - check_ok( - selection!("englishAndGreekLetters { * { en }}"), - json!({ - "englishAndGreekLetters": { - "a": { "en": "ay" }, - "b": { "en": "bee" }, - "c": { "en": "see" }, - "d": { "en": "dee" }, - "e": { "en": "ee" }, - "f": { "en": "eff" }, - }, - }), + fn make_yellow_errors_expected(yellow_range: std::ops::Range) -> Vec { + vec![ApplyToError::new( + "Property .yellow not found in object".to_string(), + vec![json!("yellow")], + Some(yellow_range), + )] + } + assert_eq!( + selection!("yellow").apply_to(&data), + (Some(json!({})), make_yellow_errors_expected(0..6)), + ); + assert_eq!( + selection!("$.yellow").apply_to(&data), + (None, make_yellow_errors_expected(2..8)), ); - check_ok( - selection!("englishAndGreekLetters { C: .c.en * { gr }}"), - json!({ - "englishAndGreekLetters": { - "a": { "gr": "alpha" }, - "b": { "gr": "beta" }, - "C": "see", - "d": { "gr": "delta" }, - "e": { "gr": "epsilon" }, - "f": { "gr": "phi" }, - }, - }), - ); - - check_ok( - selection!("englishAndGreekLetters { A: a B: b rest: * }"), - json!({ - "englishAndGreekLetters": { - "A": { "en": "ay", "gr": "alpha" }, - "B": { "en": "bee", "gr": "beta" }, - "rest": { - "c": { "en": "see", "gr": "gamma" }, - "d": { "en": "dee", "gr": "delta" }, - "e": { "en": "ee", "gr": "epsilon" }, - "f": { "en": "eff", "gr": "phi" }, - }, - }, - }), - ); - - check_ok( - selection!(".'englishAndSpanishNumbers' { en rest: * }"), - json!([ - { "en": "one", "rest": { "es": "uno" } }, - { "en": "two", "rest": { "es": "dos" } }, - { "en": "three", "rest": { "es": "tres" } }, - { "en": "four", "rest": { "es": "cuatro" } }, - { "en": "five", "rest": { "es": "cinco" } }, - { "en": "six", "rest": { "es": "seis" } }, - ]), - ); - - // To include/preserve all remaining properties from an object in the output - // object, we support a naked * selection (no alias or subselection). This - // is useful when the values of the properties are scalar, so a subselection - // isn't possible, and we want to preserve all properties of the original - // object. These unnamed properties may not be useful for GraphQL unless the - // whole object is considered as opaque JSON scalar data, but we still need - // to support preserving JSON when it has scalar properties. - check_ok( - selection!("asciiCharCodes { ay: A bee: B * }"), - json!({ - "asciiCharCodes": { - "ay": 65, - "bee": 66, - "C": 67, - "D": 68, - "E": 69, - "F": 70, - "G": 71, - }, - }), - ); - - check_ok( - selection!("asciiCharCodes { * } gee: .asciiCharCodes.G"), - json!({ - "asciiCharCodes": data.get("asciiCharCodes").unwrap(), - "gee": 71, - }), - ); - - check_ok( - selection!("books { * { title } }"), - json!({ - "books": { - "9780262533751": { - "title": "The Geometry of Meaning", - }, - "978-1492674313": { - "title": "P is for Pterodactyl: The Worst Alphabet Book Ever", - }, - "9780262542456": { - "title": "A Biography of the Pixel", - }, - }, - }), - ); - - check_ok( - selection!("books { authorsByISBN: * { author } }"), - json!({ - "books": { - "authorsByISBN": { - "9780262533751": { - "author": "Peter Gärdenfors", - }, - "978-1492674313": { - "author": "Raj Haldar", - }, - "9780262542456": { - "author": "Alvy Ray Smith", - }, - }, - }, - }), - ); - } - - #[test] - fn test_apply_to_errors() { - let data = json!({ - "hello": "world", - "nested": { - "hello": 123, - "world": true, - }, - "array": [ - { "hello": 1, "goodbye": "farewell" }, - { "hello": "two" }, - { "hello": 3.0, "smello": "yellow" }, - ], - }); - - assert_eq!( - selection!("hello").apply_to(&data), - (Some(json!({"hello": "world"})), vec![],) - ); - - let yellow_errors_expected = vec![ApplyToError::from_json(&json!({ - "message": "Property .yellow not found in object", - "path": ["yellow"], - }))]; - assert_eq!( - selection!("yellow").apply_to(&data), - (Some(json!({})), yellow_errors_expected.clone()) - ); - assert_eq!( - selection!(".yellow").apply_to(&data), - (None, yellow_errors_expected.clone()) - ); - assert_eq!( - selection!("$.yellow").apply_to(&data), - (None, yellow_errors_expected.clone()) + assert_eq!( + selection!("nested.hello").apply_to(&data), + (Some(json!(123)), vec![],) ); + fn make_quoted_yellow_expected( + yellow_range: std::ops::Range, + ) -> (Option, Vec) { + ( + None, + vec![ApplyToError::new( + "Property .\"yellow\" not found in object".to_string(), + vec![json!("nested"), json!("yellow")], + Some(yellow_range), + )], + ) + } assert_eq!( - selection!(".nested.hello").apply_to(&data), - (Some(json!(123)), vec![],) - ); - - let quoted_yellow_expected = ( - None, - vec![ApplyToError::from_json(&json!({ - "message": "Property .\"yellow\" not found in object", - "path": ["nested", "yellow"], - }))], + selection!("nested.'yellow'").apply_to(&data), + make_quoted_yellow_expected(7..15), ); assert_eq!( - selection!(".nested.'yellow'").apply_to(&data), - quoted_yellow_expected, + selection!("nested.\"yellow\"").apply_to(&data), + make_quoted_yellow_expected(7..15), ); assert_eq!( selection!("$.nested.'yellow'").apply_to(&data), - quoted_yellow_expected, + make_quoted_yellow_expected(9..17), ); - let nested_path_expected = ( - Some(json!({ - "world": true, - })), - vec![ - ApplyToError::from_json(&json!({ - "message": "Property .hola not found in object", - "path": ["nested", "hola"], - })), - ApplyToError::from_json(&json!({ - "message": "Property .yellow not found in object", - "path": ["nested", "yellow"], + fn make_nested_path_expected( + hola_range: (usize, usize), + yellow_range: (usize, usize), + ) -> (Option, Vec) { + ( + Some(json!({ + "world": true, })), - ], - ); + vec![ + ApplyToError::from_json(&json!({ + "message": "Property .hola not found in object", + "path": ["nested", "hola"], + "range": hola_range, + })), + ApplyToError::from_json(&json!({ + "message": "Property .yellow not found in object", + "path": ["nested", "yellow"], + "range": yellow_range, + })), + ], + ) + } assert_eq!( - selection!(".nested { hola yellow world }").apply_to(&data), - nested_path_expected, + selection!("$.nested { hola yellow world }").apply_to(&data), + make_nested_path_expected((11, 15), (16, 22)), ); assert_eq!( - selection!("$.nested { hola yellow world }").apply_to(&data), - nested_path_expected, + selection!(" $ . nested { hola yellow world } ").apply_to(&data), + make_nested_path_expected((14, 18), (19, 25)), ); - let partial_array_expected = ( - Some(json!({ - "partial": [ - { "hello": 1, "goodbye": "farewell" }, - { "hello": "two" }, - { "hello": 3.0 }, - ], - })), - vec![ - ApplyToError::from_json(&json!({ - "message": "Property .goodbye not found in object", - "path": ["array", 1, "goodbye"], - })), - ApplyToError::from_json(&json!({ - "message": "Property .goodbye not found in object", - "path": ["array", 2, "goodbye"], + fn make_partial_array_expected( + goodbye_range: (usize, usize), + ) -> (Option, Vec) { + ( + Some(json!({ + "partial": [ + { "hello": 1, "goodbye": "farewell" }, + { "hello": "two" }, + { "hello": 3.0 }, + ], })), - ], - ); + vec![ + ApplyToError::from_json(&json!({ + "message": "Property .goodbye not found in object", + "path": ["array", 1, "goodbye"], + "range": goodbye_range, + })), + ApplyToError::from_json(&json!({ + "message": "Property .goodbye not found in object", + "path": ["array", 2, "goodbye"], + "range": goodbye_range, + })), + ], + ) + } assert_eq!( - selection!("partial: .array { hello goodbye }").apply_to(&data), - partial_array_expected, + selection!("partial: $.array { hello goodbye }").apply_to(&data), + make_partial_array_expected((25, 32)), ); assert_eq!( - selection!("partial: $.array { hello goodbye }").apply_to(&data), - partial_array_expected, + selection!(" partial : $ . array { hello goodbye } ").apply_to(&data), + make_partial_array_expected((29, 36)), ); assert_eq!( - selection!("good: .array.hello bad: .array.smello").apply_to(&data), + selection!("good: array.hello bad: array.smello").apply_to(&data), ( Some(json!({ "good": [ @@ -907,10 +1234,12 @@ mod tests { ApplyToError::from_json(&json!({ "message": "Property .smello not found in object", "path": ["array", 0, "smello"], + "range": [29, 35], })), ApplyToError::from_json(&json!({ "message": "Property .smello not found in object", "path": ["array", 1, "smello"], + "range": [29, 35], })), ], ) @@ -930,17 +1259,19 @@ mod tests { ApplyToError::from_json(&json!({ "message": "Property .smello not found in object", "path": ["array", 0, "smello"], + "range": [14, 20], })), ApplyToError::from_json(&json!({ "message": "Property .smello not found in object", "path": ["array", 1, "smello"], + "range": [14, 20], })), ], ) ); assert_eq!( - selection!(".nested { grouped: { hello smelly world } }").apply_to(&data), + selection!("$.nested { grouped: { hello smelly world } }").apply_to(&data), ( Some(json!({ "grouped": { @@ -951,12 +1282,13 @@ mod tests { vec![ApplyToError::from_json(&json!({ "message": "Property .smelly not found in object", "path": ["nested", "smelly"], + "range": [28, 34], })),], ) ); assert_eq!( - selection!("alias: .nested { grouped: { hello smelly world } }").apply_to(&data), + selection!("alias: $.nested { grouped: { hello smelly world } }").apply_to(&data), ( Some(json!({ "alias": { @@ -969,7 +1301,8 @@ mod tests { vec![ApplyToError::from_json(&json!({ "message": "Property .smelly not found in object", "path": ["nested", "smelly"], - })),], + "range": [35, 41], + }))], ) ); } @@ -1001,58 +1334,71 @@ mod tests { ], }); - let array_of_arrays_x_expected = ( - Some(json!([[0], [1, 1, 1], [2, 2], [], [null, 4, 4, null, 4],])), - vec![ - ApplyToError::from_json(&json!({ - "message": "Property .x not found in null", - "path": ["arrayOfArrays", 4, 0, "x"], - })), - ApplyToError::from_json(&json!({ - "message": "Property .x not found in null", - "path": ["arrayOfArrays", 4, 3, "x"], - })), - ], - ); + fn make_array_of_arrays_x_expected( + x_range: (usize, usize), + ) -> (Option, Vec) { + ( + Some(json!([[0], [1, 1, 1], [2, 2], [], [null, 4, 4, null, 4]])), + vec![ + ApplyToError::from_json(&json!({ + "message": "Property .x not found in null", + "path": ["arrayOfArrays", 4, 0, "x"], + "range": x_range, + })), + ApplyToError::from_json(&json!({ + "message": "Property .x not found in null", + "path": ["arrayOfArrays", 4, 3, "x"], + "range": x_range, + })), + ], + ) + } assert_eq!( - selection!(".arrayOfArrays.x").apply_to(&data), - array_of_arrays_x_expected, + selection!("arrayOfArrays.x").apply_to(&data), + make_array_of_arrays_x_expected((14, 15)), ); assert_eq!( selection!("$.arrayOfArrays.x").apply_to(&data), - array_of_arrays_x_expected, + make_array_of_arrays_x_expected((16, 17)), ); - let array_of_arrays_y_expected = ( - Some(json!([ - [0], - [0, 1, 2], - [0, 1], - [], - [null, 1, null, null, 4], - ])), - vec![ - ApplyToError::from_json(&json!({ - "message": "Property .y not found in null", - "path": ["arrayOfArrays", 4, 0, "y"], - })), - ApplyToError::from_json(&json!({ - "message": "Property .y not found in object", - "path": ["arrayOfArrays", 4, 2, "y"], - })), - ApplyToError::from_json(&json!({ - "message": "Property .y not found in null", - "path": ["arrayOfArrays", 4, 3, "y"], - })), - ], - ); + fn make_array_of_arrays_y_expected( + y_range: (usize, usize), + ) -> (Option, Vec) { + ( + Some(json!([ + [0], + [0, 1, 2], + [0, 1], + [], + [null, 1, null, null, 4], + ])), + vec![ + ApplyToError::from_json(&json!({ + "message": "Property .y not found in null", + "path": ["arrayOfArrays", 4, 0, "y"], + "range": y_range, + })), + ApplyToError::from_json(&json!({ + "message": "Property .y not found in object", + "path": ["arrayOfArrays", 4, 2, "y"], + "range": y_range, + })), + ApplyToError::from_json(&json!({ + "message": "Property .y not found in null", + "path": ["arrayOfArrays", 4, 3, "y"], + "range": y_range, + })), + ], + ) + } assert_eq!( - selection!(".arrayOfArrays.y").apply_to(&data), - array_of_arrays_y_expected + selection!("arrayOfArrays.y").apply_to(&data), + make_array_of_arrays_y_expected((14, 15)), ); assert_eq!( selection!("$.arrayOfArrays.y").apply_to(&data), - array_of_arrays_y_expected + make_array_of_arrays_y_expected((16, 17)), ); assert_eq!( @@ -1086,76 +1432,91 @@ mod tests { ApplyToError::from_json(&json!({ "message": "Property .x not found in null", "path": ["arrayOfArrays", 4, 0, "x"], + "range": [23, 24], })), ApplyToError::from_json(&json!({ "message": "Property .y not found in null", "path": ["arrayOfArrays", 4, 0, "y"], + "range": [25, 26], })), ApplyToError::from_json(&json!({ "message": "Property .y not found in object", "path": ["arrayOfArrays", 4, 2, "y"], + "range": [25, 26], })), ApplyToError::from_json(&json!({ "message": "Property .x not found in null", "path": ["arrayOfArrays", 4, 3, "x"], + "range": [23, 24], })), ApplyToError::from_json(&json!({ "message": "Property .y not found in null", "path": ["arrayOfArrays", 4, 3, "y"], + "range": [25, 26], })), ], ), ); - let array_of_arrays_x_y_expected = ( - Some(json!({ - "ys": [ - [0], - [0, 1, 2], - [0, 1], - [], - [null, 1, null, null, 4], - ], - "xs": [ - [0], - [1, 1, 1], - [2, 2], - [], - [null, 4, 4, null, 4], - ], - })), - vec![ - ApplyToError::from_json(&json!({ - "message": "Property .y not found in null", - "path": ["arrayOfArrays", 4, 0, "y"], - })), - ApplyToError::from_json(&json!({ - "message": "Property .y not found in object", - "path": ["arrayOfArrays", 4, 2, "y"], - })), - ApplyToError::from_json(&json!({ - // Reversing the order of "path" and "message" here to make - // sure that doesn't affect the deduplication logic. - "path": ["arrayOfArrays", 4, 3, "y"], - "message": "Property .y not found in null", - })), - ApplyToError::from_json(&json!({ - "message": "Property .x not found in null", - "path": ["arrayOfArrays", 4, 0, "x"], - })), - ApplyToError::from_json(&json!({ - "message": "Property .x not found in null", - "path": ["arrayOfArrays", 4, 3, "x"], + fn make_array_of_arrays_x_y_expected( + x_range: (usize, usize), + y_range: (usize, usize), + ) -> (Option, Vec) { + ( + Some(json!({ + "ys": [ + [0], + [0, 1, 2], + [0, 1], + [], + [null, 1, null, null, 4], + ], + "xs": [ + [0], + [1, 1, 1], + [2, 2], + [], + [null, 4, 4, null, 4], + ], })), - ], - ); + vec![ + ApplyToError::from_json(&json!({ + "message": "Property .y not found in null", + "path": ["arrayOfArrays", 4, 0, "y"], + "range": y_range, + })), + ApplyToError::from_json(&json!({ + "message": "Property .y not found in object", + "path": ["arrayOfArrays", 4, 2, "y"], + "range": y_range, + })), + ApplyToError::from_json(&json!({ + // Reversing the order of "path" and "message" here to make + // sure that doesn't affect the deduplication logic. + "path": ["arrayOfArrays", 4, 3, "y"], + "message": "Property .y not found in null", + "range": y_range, + })), + ApplyToError::from_json(&json!({ + "message": "Property .x not found in null", + "path": ["arrayOfArrays", 4, 0, "x"], + "range": x_range, + })), + ApplyToError::from_json(&json!({ + "message": "Property .x not found in null", + "path": ["arrayOfArrays", 4, 3, "x"], + "range": x_range, + })), + ], + ) + } assert_eq!( - selection!("ys: .arrayOfArrays.y xs: .arrayOfArrays.x").apply_to(&data), - array_of_arrays_x_y_expected, + selection!("ys: arrayOfArrays.y xs: arrayOfArrays.x").apply_to(&data), + make_array_of_arrays_x_y_expected((38, 39), (18, 19)), ); assert_eq!( selection!("ys: $.arrayOfArrays.y xs: $.arrayOfArrays.x").apply_to(&data), - array_of_arrays_x_y_expected, + make_array_of_arrays_x_y_expected((42, 43), (20, 21)), ); } @@ -1199,14 +1560,19 @@ mod tests { ), ); assert_eq!( - selection!("id: $args.id name").apply_to(&data), + selection!("nested.path { id: $args.id name }").apply_to(&json!({ + "nested": { + "path": data.clone(), + }, + })), ( Some(json!({ "name": "Ben" })), vec![ApplyToError::from_json(&json!({ "message": "Variable $args not found", - "path": ["$args"], + "path": ["nested", "path"], + "range": [18, 23], }))], ), ); @@ -1221,9 +1587,811 @@ mod tests { vec![ApplyToError::from_json(&json!({ "message": "Property .id not found in object", "path": ["$args", "id"], + "range": [10, 12], }))], ), ); + + // A single variable path should not be mapped over an input array. + assert_eq!( + selection!("$args.id").apply_with_vars(&json!([1, 2, 3]), &vars), + (Some(json!("id from args")), vec![]), + ); + } + + #[test] + fn test_apply_to_variable_expressions_typename() { + let typename_object = + selection!("__typename: $->echo('Product') reviews { __typename: $->echo('Review') }") + .apply_to(&json!({"reviews": [{}]})); + assert_eq!( + typename_object, + ( + Some(json!({"__typename": "Product", "reviews": [{ "__typename": "Review" }] })), + vec![] + ) + ); + } + + #[test] + fn test_literal_expressions_in_parentheses() { + assert_eq!( + selection!("__typename: $('Product')").apply_to(&json!({})), + (Some(json!({"__typename": "Product"})), vec![]), + ); + + assert_eq!( + selection!(" __typename : 'Product' ").apply_to(&json!({})), + ( + Some(json!({})), + vec![ApplyToError::new( + "Property .\"Product\" not found in object".to_string(), + vec![json!("Product")], + Some(14..23), + )], + ), + ); + + assert_eq!( + selection!( + r#" + one: $(1) + two: $(2) + negativeThree: $(- 3) + true: $(true ) + false: $( false) + null: $(null) + string: $("string") + array: $( [ 1 , 2 , 3 ] ) + object: $( { "key" : "value" } ) + path: $(nested.path) + "# + ) + .apply_to(&json!({ + "nested": { + "path": "nested path value" + } + })), + ( + Some(json!({ + "one": 1, + "two": 2, + "negativeThree": -3, + "true": true, + "false": false, + "null": null, + "string": "string", + "array": [1, 2, 3], + "object": { "key": "value" }, + "path": "nested path value", + })), + vec![], + ), + ); + + assert_eq!( + selection!( + r#" + one: $(1)->typeof + two: $(2)->typeof + negativeThree: $(-3)->typeof + true: $(true)->typeof + false: $(false)->typeof + null: $(null)->typeof + string: $("string")->typeof + array: $([1, 2, 3])->typeof + object: $({ "key": "value" })->typeof + path: $(nested.path)->typeof + "# + ) + .apply_to(&json!({ + "nested": { + "path": 12345 + } + })), + ( + Some(json!({ + "one": "number", + "two": "number", + "negativeThree": "number", + "true": "boolean", + "false": "boolean", + "null": "null", + "string": "string", + "array": "array", + "object": "object", + "path": "number", + })), + vec![], + ), + ); + + assert_eq!( + selection!( + r#" + items: $([ + 1, + -2.0, + true, + false, + null, + "string", + [1, 2, 3], + { "key": "value" }, + nested.path, + ])->map(@->typeof) + "# + ) + .apply_to(&json!({ + "nested": { + "path": { "deeply": "nested" } + } + })), + ( + Some(json!({ + "items": [ + "number", + "number", + "boolean", + "boolean", + "null", + "string", + "array", + "object", + "object", + ], + })), + vec![], + ), + ); + + assert_eq!( + selection!( + r#" + $({ + one: 1, + two: 2, + negativeThree: -3, + true: true, + false: false, + null: null, + string: "string", + array: [1, 2, 3], + object: { "key": "value" }, + path: $ . nested . path , + })->entries + "# + ) + .apply_to(&json!({ + "nested": { + "path": "nested path value" + } + })), + ( + Some(json!([ + { "key": "one", "value": 1 }, + { "key": "two", "value": 2 }, + { "key": "negativeThree", "value": -3 }, + { "key": "true", "value": true }, + { "key": "false", "value": false }, + { "key": "null", "value": null }, + { "key": "string", "value": "string" }, + { "key": "array", "value": [1, 2, 3] }, + { "key": "object", "value": { "key": "value" } }, + { "key": "path", "value": "nested path value" }, + ])), + vec![], + ), + ); + + assert_eq!( + selection!( + r#" + $({ + string: $("string")->slice(1, 4), + array: $([1, 2, 3])->map(@->add(10)), + object: $({ "key": "value" })->get("key"), + path: nested.path->slice($("nested ")->size), + needlessParens: $("oyez"), + withoutParens: "oyez", + }) + "# + ) + .apply_to(&json!({ + "nested": { + "path": "nested path value" + } + })), + ( + Some(json!({ + "string": "tri", + "array": [11, 12, 13], + "object": "value", + "path": "path value", + "needlessParens": "oyez", + "withoutParens": "oyez", + })), + vec![], + ), + ); + + assert_eq!( + selection!( + r#" + string: $("string")->slice(1, 4) + array: $([1, 2, 3])->map(@->add(10)) + object: $({ "key": "value" })->get("key") + path: nested.path->slice($("nested ")->size) + "# + ) + .apply_to(&json!({ + "nested": { + "path": "nested path value" + } + })), + ( + Some(json!({ + "string": "tri", + "array": [11, 12, 13], + "object": "value", + "path": "path value", + })), + vec![], + ), + ); + } + + #[test] + fn test_inline_paths_with_subselections() { + let data = json!({ + "id": 123, + "created": "2021-01-01T00:00:00Z", + "model": "gpt-4o", + "choices": [{ + "index": 0, + "message": { + "role": "assistant", + "content": "The capital of Australia is Canberra.", + }, + }, { + "index": 1, + "message": { + "role": "assistant", + "content": "The capital of Australia is Sydney.", + }, + }], + }); + + { + let expected = ( + Some(json!({ + "id": 123, + "created": "2021-01-01T00:00:00Z", + "model": "gpt-4o", + "role": "assistant", + "content": "The capital of Australia is Canberra.", + })), + vec![], + ); + + assert_eq!( + selection!( + r#" + id + created + model + role: choices->first.message.role + content: choices->first.message.content + "# + ) + .apply_to(&data), + expected.clone(), + ); + + assert_eq!( + selection!( + r#" + id + created + model + choices->first.message { + role + content + } + "# + ) + .apply_to(&data), + expected.clone(), + ); + + assert_eq!( + selection!( + r#" + id + choices->first.message { + role + content + } + created + model + "# + ) + .apply_to(&data), + expected.clone(), + ); + } + + { + let expected = ( + Some(json!({ + "id": 123, + "created": "2021-01-01T00:00:00Z", + "model": "gpt-4o", + "role": "assistant", + "message": "The capital of Australia is Sydney.", + })), + vec![], + ); + + assert_eq!( + selection!( + r#" + id + created + model + role: choices->last.message.role + message: choices->last.message.content + "# + ) + .apply_to(&data), + expected.clone(), + ); + + assert_eq!( + selection!( + r#" + id + created + model + choices->last.message { + role + message: content + } + "# + ) + .apply_to(&data), + expected.clone(), + ); + + assert_eq!( + selection!( + r#" + created + choices->last.message { + message: content + role + } + model + id + "# + ) + .apply_to(&data), + expected.clone(), + ); + } + + { + let expected = ( + Some(json!({ + "id": 123, + "created": "2021-01-01T00:00:00Z", + "model": "gpt-4o", + "role": "assistant", + "correct": "The capital of Australia is Canberra.", + "incorrect": "The capital of Australia is Sydney.", + })), + vec![], + ); + + assert_eq!( + selection!( + r#" + id + created + model + role: choices->first.message.role + correct: choices->first.message.content + incorrect: choices->last.message.content + "# + ) + .apply_to(&data), + expected.clone(), + ); + + assert_eq!( + selection!( + r#" + id + created + model + choices->first.message { + role + correct: content + } + choices->last.message { + incorrect: content + } + "# + ) + .apply_to(&data), + expected.clone(), + ); + + assert_eq!( + selection!( + r#" + id + created + model + choices->first.message { + role + correct: content + } + incorrect: choices->last.message.content + "# + ) + .apply_to(&data), + expected.clone(), + ); + + assert_eq!( + selection!( + r#" + id + created + model + choices->first.message { + correct: content + } + choices->last.message { + role + incorrect: content + } + "# + ) + .apply_to(&data), + expected.clone(), + ); + + assert_eq!( + selection!( + r#" + id + created + correct: choices->first.message.content + choices->last.message { + role + incorrect: content + } + model + "# + ) + .apply_to(&data), + expected.clone(), + ); + } + + { + let data = json!({ + "from": "data", + }); + + let vars = { + let mut vars = IndexMap::default(); + vars.insert( + "$this".to_string(), + json!({ + "id": 1234, + }), + ); + vars.insert( + "$args".to_string(), + json!({ + "input": { + "title": "The capital of Australia", + "body": "Canberra", + }, + "extra": "extra", + }), + ); + vars + }; + + let expected = ( + Some(json!({ + "id": 1234, + "title": "The capital of Australia", + "body": "Canberra", + "from": "data", + })), + vec![], + ); + + assert_eq!( + selection!( + r#" + id: $this.id + $args.input { + title + body + } + from + "# + ) + .apply_with_vars(&data, &vars), + expected.clone(), + ); + + assert_eq!( + selection!( + r#" + from + $args.input { title body } + id: $this.id + "# + ) + .apply_with_vars(&data, &vars), + expected.clone(), + ); + + assert_eq!( + selection!( + r#" + $args.input { body title } + from + id: $this.id + "# + ) + .apply_with_vars(&data, &vars), + expected.clone(), + ); + + assert_eq!( + selection!( + r#" + id: $this.id + $args { $.input { title body } } + from + "# + ) + .apply_with_vars(&data, &vars), + expected.clone(), + ); + + assert_eq!( + selection!( + r#" + id: $this.id + $args { $.input { title body } extra } + from: $.from + "# + ) + .apply_with_vars(&data, &vars), + ( + Some(json!({ + "id": 1234, + "title": "The capital of Australia", + "body": "Canberra", + "extra": "extra", + "from": "data", + })), + vec![], + ), + ); + + assert_eq!( + selection!( + r#" + # Equivalent to id: $this.id + $this { id } + + $args { + __typename: $("Args") + + # Requiring $. instead of just . prevents .input from + # parsing as a key applied to the $("Args") string. + $.input { title body } + + extra + } + + from: $.from + "# + ) + .apply_with_vars(&data, &vars), + ( + Some(json!({ + "id": 1234, + "title": "The capital of Australia", + "body": "Canberra", + "__typename": "Args", + "extra": "extra", + "from": "data", + })), + vec![], + ), + ); + } + } + + #[test] + fn test_inline_path_errors() { + { + let data = json!({ + "id": 123, + "created": "2021-01-01T00:00:00Z", + "model": "gpt-4o", + "choices": [{ + "message": "The capital of Australia is Canberra.", + }, { + "message": "The capital of Australia is Sydney.", + }], + }); + + let expected = ( + Some(json!({ + "id": 123, + "created": "2021-01-01T00:00:00Z", + "model": "gpt-4o", + })), + vec![ + ApplyToError::new( + "Property .role not found in string".to_string(), + vec![ + json!("choices"), + json!("->first"), + json!("message"), + json!("role"), + ], + Some(123..127), + ), + ApplyToError::new( + "Property .content not found in string".to_string(), + vec![ + json!("choices"), + json!("->first"), + json!("message"), + json!("content"), + ], + Some(128..135), + ), + ApplyToError::new( + "Expected object or null, not string".to_string(), + vec![], + // This is the range of the whole + // `choices->first.message { role content }` + // subselection. + Some(98..137), + ), + ], + ); + + assert_eq!( + selection!( + r#" + id + created + model + choices->first.message { role content } + "# + ) + .apply_to(&data), + expected.clone(), + ); + } + + assert_eq!( + selection!("id nested.path.nonexistent { name }").apply_to(&json!({ + "id": 2345, + "nested": { + "path": "nested path value", + }, + })), + ( + Some(json!({ + "id": 2345, + })), + vec![ + ApplyToError::new( + "Property .nonexistent not found in string".to_string(), + vec![json!("nested"), json!("path"), json!("nonexistent")], + Some(15..26), + ), + ApplyToError::new( + "Expected object or null, not nothing".to_string(), + vec![], + // This is the range of the whole + // `nested.path.nonexistent { name }` path selection. + Some(3..35), + ), + ], + ), + ); + + // We have to construct this invalid selection manually because we want + // to test an error case requiring a PathWithSubSelection that does not + // actually have a SubSelection, which should not be possible to + // construct through normal parsing. + let invalid_inline_path_selection = JSONSelection::Named(SubSelection { + selections: vec![NamedSelection::Path { + alias: None, + inline: false, + path: PathSelection { + path: PathList::Key( + Key::field("some").into_with_range(), + PathList::Key( + Key::field("number").into_with_range(), + PathList::Empty.into_with_range(), + ) + .into_with_range(), + ) + .into_with_range(), + }, + }], + ..Default::default() + }); + + assert_eq!( + invalid_inline_path_selection.apply_to(&json!({ + "some": { + "number": 579, + }, + })), + ( + Some(json!({})), + vec![ApplyToError::new( + "Named path must have an alias, a trailing subselection, or be inlined with ... and produce an object or null".to_string(), + vec![], + // No range because this is a manually constructed selection. + None, + ),], + ), + ); + + let valid_inline_path_selection = JSONSelection::Named(SubSelection { + selections: vec![NamedSelection::Path { + alias: None, + inline: true, // This makes it valid. + path: PathSelection { + path: PathList::Key( + Key::field("some").into_with_range(), + PathList::Key( + Key::field("object").into_with_range(), + PathList::Empty.into_with_range(), + ) + .into_with_range(), + ) + .into_with_range(), + }, + }], + ..Default::default() + }); + + assert_eq!( + valid_inline_path_selection.apply_to(&json!({ + "some": { + "object": { + "key": "value", + }, + }, + })), + ( + Some(json!({ + "key": "value", + })), + vec![], + ), + ); } #[test] @@ -1261,12 +2429,17 @@ mod tests { ); assert_eq!( - selection!(".'not an identifier'.'also.not.an.identifier'").apply_to(&data), + selection!("'not an identifier'.'also.not.an.identifier'").apply_to(&data), + (Some(json!([0, 1, 2])), vec![],), + ); + + assert_eq!( + selection!("$.'not an identifier'.'also.not.an.identifier'").apply_to(&data), (Some(json!([0, 1, 2])), vec![],), ); assert_eq!( - selection!(".\"not an identifier\" { safe: \"also.not.an.identifier\" }") + selection!("$.\"not an identifier\" { safe: \"also.not.an.identifier\" }") .apply_to(&data), ( Some(json!([ @@ -1302,13 +2475,109 @@ mod tests { ); assert_eq!( - selection!(".another.'pesky string literal!'.'{ evil braces }'").apply_to(&data), + selection!("another.'pesky string literal!'.'{ evil braces }'").apply_to(&data), (Some(json!(true)), vec![],), ); assert_eq!( - selection!(".another.'pesky string literal!'.\"identifier\"").apply_to(&data), + selection!("another.'pesky string literal!'.\"identifier\"").apply_to(&data), + (Some(json!(123)), vec![],), + ); + + assert_eq!( + selection!("$.another.'pesky string literal!'.\"identifier\"").apply_to(&data), (Some(json!(123)), vec![],), ); } + + #[test] + fn test_compute_output_shape() { + assert_eq!(selection!("").shape().pretty_print(), "{}"); + + assert_eq!( + selection!("id name").shape().pretty_print(), + "{ id: $root.*.id, name: $root.*.name }", + ); + + // // On hold until variadic $(...) is merged (PR #6456). + // assert_eq!( + // selection!("$.data { thisOrThat: $(maybe.this, maybe.that) }") + // .shape() + // .pretty_print(), + // // Technically $.data could be an array, so this should be a union + // // of this shape and a list of this shape, except with + // // $root.data.0.maybe.{this,that} shape references. + // // + // // We could try to say that any { ... } shape represents either an + // // object or a list of objects, by policy, to avoid having to write + // // One<{...}, List<{...}>> everywhere a SubSelection appears. + // // + // // But then we don't know where the array indexes should go... + // "{ thisOrThat: One<$root.data.*.maybe.this, $root.data.*.maybe.that> }", + // ); + + assert_eq!( + selection!(r#" + id + name + friends: friend_ids { id: @ } + alias: arrayOfArrays { x y } + ys: arrayOfArrays.y xs: arrayOfArrays.x + "#).shape().pretty_print(), + + // This output shape is wrong if $root.friend_ids turns out to be an + // array, and it's tricky to see how to transform the shape to what + // it would have been if we knew that, where friends: List<{ id: + // $root.friend_ids.* }> (note the * meaning any array index), + // because who's to say it's not the id field that should become the + // List, rather than the friends field? + "{ alias: { x: $root.*.arrayOfArrays.*.x, y: $root.*.arrayOfArrays.*.y }, friends: { id: $root.*.friend_ids.* }, id: $root.*.id, name: $root.*.name, xs: $root.*.arrayOfArrays.x, ys: $root.*.arrayOfArrays.y }", + ); + + assert_eq!( + selection!(r#" + id + name + friends: friend_ids->map({ id: @ }) + alias: arrayOfArrays { x y } + ys: arrayOfArrays.y xs: arrayOfArrays.x + "#).shape().pretty_print(), + "{ alias: { x: $root.*.arrayOfArrays.*.x, y: $root.*.arrayOfArrays.*.y }, friends: Unknown, id: $root.*.id, name: $root.*.name, xs: $root.*.arrayOfArrays.x, ys: $root.*.arrayOfArrays.y }", + ); + + assert_eq!( + selection!("$->echo({ thrice: [@, @, @] })") + .shape() + .pretty_print(), + "{ thrice: [$root, $root, $root] }", + ); + + assert_eq!( + selection!("$->echo({ thrice: [@, @, @] })->entries") + .shape() + .pretty_print(), + "[{ key: \"thrice\", value: [$root, $root, $root] }]", + ); + + assert_eq!( + selection!("$->echo({ thrice: [@, @, @] })->entries.key") + .shape() + .pretty_print(), + "[\"thrice\"]", + ); + + assert_eq!( + selection!("$->echo({ thrice: [@, @, @] })->entries.value") + .shape() + .pretty_print(), + "[[$root, $root, $root]]", + ); + + assert_eq!( + selection!("$->echo({ wrapped: @ })->entries { k: key v: value }") + .shape() + .pretty_print(), + "[{ k: \"wrapped\", v: $root }]", + ); + } } diff --git a/apollo-federation/src/sources/connect/json_selection/grammar/Alias.svg b/apollo-federation/src/sources/connect/json_selection/grammar/Alias.svg index 5c2a8db39b..386cb90fdd 100644 --- a/apollo-federation/src/sources/connect/json_selection/grammar/Alias.svg +++ b/apollo-federation/src/sources/connect/json_selection/grammar/Alias.svg @@ -1,53 +1,53 @@ - + - - - Identifier + xlink:href="#Key" + xlink:title="Key"> + + + Key - - + - : + : - - + d="m17 17 h2 m0 0 h10 m42 0 h10 m0 0 h10 m24 0 h10 m3 0 h-3"/> + + diff --git a/apollo-federation/src/sources/connect/json_selection/grammar/AtPath.svg b/apollo-federation/src/sources/connect/json_selection/grammar/AtPath.svg new file mode 100644 index 0000000000..f9e37d05d9 --- /dev/null +++ b/apollo-federation/src/sources/connect/json_selection/grammar/AtPath.svg @@ -0,0 +1,53 @@ + + + + + + + + + + @ + + + + PathStep + + + + + diff --git a/apollo-federation/src/sources/connect/json_selection/grammar/Comment.svg b/apollo-federation/src/sources/connect/json_selection/grammar/Comment.svg index a28134cc17..ff67c91647 100644 --- a/apollo-federation/src/sources/connect/json_selection/grammar/Comment.svg +++ b/apollo-federation/src/sources/connect/json_selection/grammar/Comment.svg @@ -3,30 +3,30 @@ diff --git a/apollo-federation/src/sources/connect/json_selection/grammar/ExprPath.svg b/apollo-federation/src/sources/connect/json_selection/grammar/ExprPath.svg new file mode 100644 index 0000000000..f1936b302c --- /dev/null +++ b/apollo-federation/src/sources/connect/json_selection/grammar/ExprPath.svg @@ -0,0 +1,68 @@ + + + + + + + + + + $( + + + + LitExpr + + + + ) + + + + PathStep + + + + + diff --git a/apollo-federation/src/sources/connect/json_selection/grammar/Identifier.svg b/apollo-federation/src/sources/connect/json_selection/grammar/Identifier.svg index 03a7bb0abf..2e096eb115 100644 --- a/apollo-federation/src/sources/connect/json_selection/grammar/Identifier.svg +++ b/apollo-federation/src/sources/connect/json_selection/grammar/Identifier.svg @@ -3,30 +3,30 @@ diff --git a/apollo-federation/src/sources/connect/json_selection/grammar/JSArray.svg b/apollo-federation/src/sources/connect/json_selection/grammar/JSArray.svg deleted file mode 100644 index cd7d51ed75..0000000000 --- a/apollo-federation/src/sources/connect/json_selection/grammar/JSArray.svg +++ /dev/null @@ -1,69 +0,0 @@ - - - - - - - - - - [ - - - - JSLiteral - - - - , - - - ] - - - - diff --git a/apollo-federation/src/sources/connect/json_selection/grammar/JSNumber.svg b/apollo-federation/src/sources/connect/json_selection/grammar/JSNumber.svg deleted file mode 100644 index 413b1fe835..0000000000 --- a/apollo-federation/src/sources/connect/json_selection/grammar/JSNumber.svg +++ /dev/null @@ -1,75 +0,0 @@ - - - - - - - - - - - - - - - UnsignedInt - - - - . - - - [0-9] - - - . - - - [0-9] - - - - diff --git a/apollo-federation/src/sources/connect/json_selection/grammar/JSONSelection.svg b/apollo-federation/src/sources/connect/json_selection/grammar/JSONSelection.svg index c828cdaf35..d12580f0e9 100644 --- a/apollo-federation/src/sources/connect/json_selection/grammar/JSONSelection.svg +++ b/apollo-federation/src/sources/connect/json_selection/grammar/JSONSelection.svg @@ -1,52 +1,52 @@ - + - - + + - - - NakedSubSelection + xlink:href="#NamedSelection" + xlink:title="NamedSelection"> + + + NamedSelection - - - PathSelection + + + PathSelection - - + d="m17 51 h2 m40 0 h10 m0 0 h132 m-162 0 l20 0 m-1 0 q-9 0 -9 -10 l0 -14 q0 -10 10 -10 m142 34 l20 0 m-20 0 q10 0 10 -10 l0 -14 q0 -10 -10 -10 m-142 0 h10 m122 0 h10 m-182 34 h20 m182 0 h20 m-222 0 q10 0 10 10 m202 0 q0 -10 10 -10 m-212 10 v12 m202 0 v-12 m-202 12 q0 10 10 10 m182 0 q10 0 10 -10 m-192 10 h10 m106 0 h10 m0 0 h56 m23 -32 h-3"/> + + diff --git a/apollo-federation/src/sources/connect/json_selection/grammar/JSObject.svg b/apollo-federation/src/sources/connect/json_selection/grammar/JSObject.svg deleted file mode 100644 index d305abdacd..0000000000 --- a/apollo-federation/src/sources/connect/json_selection/grammar/JSObject.svg +++ /dev/null @@ -1,69 +0,0 @@ - - - - - - - - - - { - - - - JSProperty - - - - , - - - } - - - - diff --git a/apollo-federation/src/sources/connect/json_selection/grammar/JSPrimitive.svg b/apollo-federation/src/sources/connect/json_selection/grammar/JSPrimitive.svg deleted file mode 100644 index 1b3d452739..0000000000 --- a/apollo-federation/src/sources/connect/json_selection/grammar/JSPrimitive.svg +++ /dev/null @@ -1,76 +0,0 @@ - - - - - - - - - - - StringLiteral - - - - - JSNumber - - - - true - - - false - - - null - - - - diff --git a/apollo-federation/src/sources/connect/json_selection/grammar/Key.svg b/apollo-federation/src/sources/connect/json_selection/grammar/Key.svg index a41054011a..bd9802b224 100644 --- a/apollo-federation/src/sources/connect/json_selection/grammar/Key.svg +++ b/apollo-federation/src/sources/connect/json_selection/grammar/Key.svg @@ -1,32 +1,32 @@ - + @@ -39,14 +39,14 @@ Identifier - - - StringLiteral + xlink:href="#LitString" + xlink:title="LitString"> + + + LitString - - + d="m17 17 h2 m20 0 h10 m78 0 h10 m-118 0 h20 m98 0 h20 m-138 0 q10 0 10 10 m118 0 q0 -10 10 -10 m-128 10 v24 m118 0 v-24 m-118 24 q0 10 10 10 m98 0 q10 0 10 -10 m-108 10 h10 m72 0 h10 m0 0 h6 m23 -44 h-3"/> + + diff --git a/apollo-federation/src/sources/connect/json_selection/grammar/KeyPath.svg b/apollo-federation/src/sources/connect/json_selection/grammar/KeyPath.svg index 191de27c46..df43495053 100644 --- a/apollo-federation/src/sources/connect/json_selection/grammar/KeyPath.svg +++ b/apollo-federation/src/sources/connect/json_selection/grammar/KeyPath.svg @@ -3,30 +3,30 @@ diff --git a/apollo-federation/src/sources/connect/json_selection/grammar/LitArray.svg b/apollo-federation/src/sources/connect/json_selection/grammar/LitArray.svg new file mode 100644 index 0000000000..391e2907ff --- /dev/null +++ b/apollo-federation/src/sources/connect/json_selection/grammar/LitArray.svg @@ -0,0 +1,77 @@ + + + + + + + + + + [ + + + + LitExpr + + + + , + + + , + + + ] + + + + diff --git a/apollo-federation/src/sources/connect/json_selection/grammar/JSLiteral.svg b/apollo-federation/src/sources/connect/json_selection/grammar/LitExpr.svg similarity index 54% rename from apollo-federation/src/sources/connect/json_selection/grammar/JSLiteral.svg rename to apollo-federation/src/sources/connect/json_selection/grammar/LitExpr.svg index 74a592e7e7..200f1d9bb6 100644 --- a/apollo-federation/src/sources/connect/json_selection/grammar/JSLiteral.svg +++ b/apollo-federation/src/sources/connect/json_selection/grammar/LitExpr.svg @@ -3,54 +3,54 @@ - - - JSPrimitive + xlink:href="#LitPrimitive" + xlink:title="LitPrimitive"> + + + LitPrimitive - - - JSObject + xlink:href="#LitObject" + xlink:title="LitObject"> + + + LitObject - - - JSArray + xlink:href="#LitArray" + xlink:title="LitArray"> + + + LitArray PathSelection + d="m17 17 h2 m20 0 h10 m90 0 h10 m0 0 h16 m-146 0 h20 m126 0 h20 m-166 0 q10 0 10 10 m146 0 q0 -10 10 -10 m-156 10 v24 m146 0 v-24 m-146 24 q0 10 10 10 m126 0 q10 0 10 -10 m-136 10 h10 m76 0 h10 m0 0 h30 m-136 -10 v20 m146 0 v-20 m-146 20 v24 m146 0 v-24 m-146 24 q0 10 10 10 m126 0 q10 0 10 -10 m-136 10 h10 m68 0 h10 m0 0 h38 m-136 -10 v20 m146 0 v-20 m-146 20 v24 m146 0 v-24 m-146 24 q0 10 10 10 m126 0 q10 0 10 -10 m-136 10 h10 m106 0 h10 m23 -132 h-3"/> diff --git a/apollo-federation/src/sources/connect/json_selection/grammar/LitNumber.svg b/apollo-federation/src/sources/connect/json_selection/grammar/LitNumber.svg new file mode 100644 index 0000000000..4eb2cc458c --- /dev/null +++ b/apollo-federation/src/sources/connect/json_selection/grammar/LitNumber.svg @@ -0,0 +1,71 @@ + + + + + + + + + + - + + + [0-9] + + + . + + + [0-9] + + + . + + + [0-9] + + + + diff --git a/apollo-federation/src/sources/connect/json_selection/grammar/LitObject.svg b/apollo-federation/src/sources/connect/json_selection/grammar/LitObject.svg new file mode 100644 index 0000000000..67f5cc44fb --- /dev/null +++ b/apollo-federation/src/sources/connect/json_selection/grammar/LitObject.svg @@ -0,0 +1,77 @@ + + + + + + + + + + { + + + + LitProperty + + + + , + + + , + + + } + + + + diff --git a/apollo-federation/src/sources/connect/json_selection/grammar/LitPrimitive.svg b/apollo-federation/src/sources/connect/json_selection/grammar/LitPrimitive.svg new file mode 100644 index 0000000000..c7ac6846e1 --- /dev/null +++ b/apollo-federation/src/sources/connect/json_selection/grammar/LitPrimitive.svg @@ -0,0 +1,76 @@ + + + + + + + + + + + LitString + + + + + LitNumber + + + + true + + + false + + + null + + + + diff --git a/apollo-federation/src/sources/connect/json_selection/grammar/JSProperty.svg b/apollo-federation/src/sources/connect/json_selection/grammar/LitProperty.svg similarity index 59% rename from apollo-federation/src/sources/connect/json_selection/grammar/JSProperty.svg rename to apollo-federation/src/sources/connect/json_selection/grammar/LitProperty.svg index 320035e1e4..f46525478b 100644 --- a/apollo-federation/src/sources/connect/json_selection/grammar/JSProperty.svg +++ b/apollo-federation/src/sources/connect/json_selection/grammar/LitProperty.svg @@ -1,32 +1,32 @@ - + @@ -47,14 +47,14 @@ rx="10"/> : - - - JSLiteral + xlink:href="#LitExpr" + xlink:title="LitExpr"> + + + LitExpr - - + d="m17 17 h2 m0 0 h10 m42 0 h10 m0 0 h10 m24 0 h10 m0 0 h10 m64 0 h10 m3 0 h-3"/> + + diff --git a/apollo-federation/src/sources/connect/json_selection/grammar/StringLiteral.svg b/apollo-federation/src/sources/connect/json_selection/grammar/LitString.svg similarity index 86% rename from apollo-federation/src/sources/connect/json_selection/grammar/StringLiteral.svg rename to apollo-federation/src/sources/connect/json_selection/grammar/LitString.svg index 229fe4e594..ebdf1b3123 100644 --- a/apollo-federation/src/sources/connect/json_selection/grammar/StringLiteral.svg +++ b/apollo-federation/src/sources/connect/json_selection/grammar/LitString.svg @@ -3,30 +3,30 @@ diff --git a/apollo-federation/src/sources/connect/json_selection/grammar/MethodArgs.svg b/apollo-federation/src/sources/connect/json_selection/grammar/MethodArgs.svg index c000e67e4a..de10e75c16 100644 --- a/apollo-federation/src/sources/connect/json_selection/grammar/MethodArgs.svg +++ b/apollo-federation/src/sources/connect/json_selection/grammar/MethodArgs.svg @@ -1,32 +1,32 @@ - + @@ -40,11 +40,11 @@ rx="10"/> ( - - - JSLiteral + xlink:href="#LitExpr" + xlink:title="LitExpr"> + + + LitExpr , - - + + , + + - ) + ) - - + d="m17 61 h2 m0 0 h10 m26 0 h10 m40 0 h10 m64 0 h10 m-104 0 l20 0 m-1 0 q-9 0 -9 -10 l0 -24 q0 -10 10 -10 m84 44 l20 0 m-20 0 q10 0 10 -10 l0 -24 q0 -10 -10 -10 m-84 0 h10 m24 0 h10 m0 0 h40 m40 44 h10 m0 0 h34 m-64 0 h20 m44 0 h20 m-84 0 q10 0 10 10 m64 0 q0 -10 10 -10 m-74 10 v12 m64 0 v-12 m-64 12 q0 10 10 10 m44 0 q10 0 10 -10 m-54 10 h10 m24 0 h10 m-208 -32 h20 m208 0 h20 m-248 0 q10 0 10 10 m228 0 q0 -10 10 -10 m-238 10 v46 m228 0 v-46 m-228 46 q0 10 10 10 m208 0 q10 0 10 -10 m-218 10 h10 m0 0 h198 m20 -66 h10 m26 0 h10 m3 0 h-3"/> + + diff --git a/apollo-federation/src/sources/connect/json_selection/grammar/NakedSubSelection.svg b/apollo-federation/src/sources/connect/json_selection/grammar/NakedSubSelection.svg deleted file mode 100644 index c7ec2b04a5..0000000000 --- a/apollo-federation/src/sources/connect/json_selection/grammar/NakedSubSelection.svg +++ /dev/null @@ -1,52 +0,0 @@ - - - - - - - - - - - NamedSelection - - - - - StarSelection - - - - - diff --git a/apollo-federation/src/sources/connect/json_selection/grammar/NamedFieldSelection.svg b/apollo-federation/src/sources/connect/json_selection/grammar/NamedFieldSelection.svg index d2934e6c78..d3f6f77acd 100644 --- a/apollo-federation/src/sources/connect/json_selection/grammar/NamedFieldSelection.svg +++ b/apollo-federation/src/sources/connect/json_selection/grammar/NamedFieldSelection.svg @@ -1,32 +1,32 @@ - + @@ -39,21 +39,21 @@ Alias - - - Identifier + xlink:href="#Key" + xlink:title="Key"> + + + Key - - - SubSelection + + + SubSelection - - + d="m17 17 h2 m20 0 h10 m0 0 h60 m-90 0 h20 m70 0 h20 m-110 0 q10 0 10 10 m90 0 q0 -10 10 -10 m-100 10 v12 m90 0 v-12 m-90 12 q0 10 10 10 m70 0 q10 0 10 -10 m-80 10 h10 m50 0 h10 m20 -32 h10 m42 0 h10 m20 0 h10 m0 0 h112 m-142 0 h20 m122 0 h20 m-162 0 q10 0 10 10 m142 0 q0 -10 10 -10 m-152 10 v12 m142 0 v-12 m-142 12 q0 10 10 10 m122 0 q10 0 10 -10 m-132 10 h10 m102 0 h10 m23 -32 h-3"/> + + diff --git a/apollo-federation/src/sources/connect/json_selection/grammar/NamedGroupSelection.svg b/apollo-federation/src/sources/connect/json_selection/grammar/NamedGroupSelection.svg index 743cbaf26d..c4305cb536 100644 --- a/apollo-federation/src/sources/connect/json_selection/grammar/NamedGroupSelection.svg +++ b/apollo-federation/src/sources/connect/json_selection/grammar/NamedGroupSelection.svg @@ -1,32 +1,32 @@ - + @@ -41,12 +41,12 @@ - - + + SubSelection - - + d="m17 17 h2 m0 0 h10 m50 0 h10 m0 0 h10 m102 0 h10 m3 0 h-3"/> + + diff --git a/apollo-federation/src/sources/connect/json_selection/grammar/NamedPathSelection.svg b/apollo-federation/src/sources/connect/json_selection/grammar/NamedPathSelection.svg index c00795281d..402d3eacc5 100644 --- a/apollo-federation/src/sources/connect/json_selection/grammar/NamedPathSelection.svg +++ b/apollo-federation/src/sources/connect/json_selection/grammar/NamedPathSelection.svg @@ -3,30 +3,30 @@ diff --git a/apollo-federation/src/sources/connect/json_selection/grammar/NamedQuotedSelection.svg b/apollo-federation/src/sources/connect/json_selection/grammar/NamedQuotedSelection.svg deleted file mode 100644 index 0a28dac9b3..0000000000 --- a/apollo-federation/src/sources/connect/json_selection/grammar/NamedQuotedSelection.svg +++ /dev/null @@ -1,59 +0,0 @@ - - - - - - - - - - - Alias - - - - - StringLiteral - - - - - SubSelection - - - - - diff --git a/apollo-federation/src/sources/connect/json_selection/grammar/NamedSelection.svg b/apollo-federation/src/sources/connect/json_selection/grammar/NamedSelection.svg index 5ee79391fb..d257fb4826 100644 --- a/apollo-federation/src/sources/connect/json_selection/grammar/NamedSelection.svg +++ b/apollo-federation/src/sources/connect/json_selection/grammar/NamedSelection.svg @@ -1,56 +1,56 @@ - + + xlink:href="#NamedPathSelection" + xlink:title="NamedPathSelection"> - NamedFieldSelection + NamedPathSelection - - - NamedQuotedSelection + xlink:href="#PathWithSubSelection" + xlink:title="PathWithSubSelection"> + + + PathWithSubSelection + xlink:href="#NamedFieldSelection" + xlink:title="NamedFieldSelection"> - NamedPathSelection + NamedFieldSelection NamedGroupSelection - - + d="m17 17 h2 m20 0 h10 m152 0 h10 m0 0 h8 m-200 0 h20 m180 0 h20 m-220 0 q10 0 10 10 m200 0 q0 -10 10 -10 m-210 10 v24 m200 0 v-24 m-200 24 q0 10 10 10 m180 0 q10 0 10 -10 m-190 10 h10 m160 0 h10 m-190 -10 v20 m200 0 v-20 m-200 20 v24 m200 0 v-24 m-200 24 q0 10 10 10 m180 0 q10 0 10 -10 m-190 10 h10 m152 0 h10 m0 0 h8 m-190 -10 v20 m200 0 v-20 m-200 20 v24 m200 0 v-24 m-200 24 q0 10 10 10 m180 0 q10 0 10 -10 m-190 10 h10 m160 0 h10 m23 -132 h-3"/> + + diff --git a/apollo-federation/src/sources/connect/json_selection/grammar/Path.svg b/apollo-federation/src/sources/connect/json_selection/grammar/Path.svg new file mode 100644 index 0000000000..3b0de667da --- /dev/null +++ b/apollo-federation/src/sources/connect/json_selection/grammar/Path.svg @@ -0,0 +1,66 @@ + + + + + + + + + + + VarPath + + + + + KeyPath + + + + + AtPath + + + + + ExprPath + + + + + diff --git a/apollo-federation/src/sources/connect/json_selection/grammar/PathSelection.svg b/apollo-federation/src/sources/connect/json_selection/grammar/PathSelection.svg index 575d9840a5..e396bafba0 100644 --- a/apollo-federation/src/sources/connect/json_selection/grammar/PathSelection.svg +++ b/apollo-federation/src/sources/connect/json_selection/grammar/PathSelection.svg @@ -1,59 +1,52 @@ - + - - - VarPath - - - - - KeyPath + xlink:href="#Path" + xlink:title="Path"> + + + Path - - - SubSelection + + + SubSelection - - + d="m17 17 h2 m0 0 h10 m48 0 h10 m20 0 h10 m0 0 h112 m-142 0 h20 m122 0 h20 m-162 0 q10 0 10 10 m142 0 q0 -10 10 -10 m-152 10 v12 m142 0 v-12 m-142 12 q0 10 10 10 m122 0 q10 0 10 -10 m-132 10 h10 m102 0 h10 m23 -32 h-3"/> + + diff --git a/apollo-federation/src/sources/connect/json_selection/grammar/PathStep.svg b/apollo-federation/src/sources/connect/json_selection/grammar/PathStep.svg index 299a06d8cc..313830841d 100644 --- a/apollo-federation/src/sources/connect/json_selection/grammar/PathStep.svg +++ b/apollo-federation/src/sources/connect/json_selection/grammar/PathStep.svg @@ -3,30 +3,30 @@ diff --git a/apollo-federation/src/sources/connect/json_selection/grammar/PathWithSubSelection.svg b/apollo-federation/src/sources/connect/json_selection/grammar/PathWithSubSelection.svg new file mode 100644 index 0000000000..0274e256a7 --- /dev/null +++ b/apollo-federation/src/sources/connect/json_selection/grammar/PathWithSubSelection.svg @@ -0,0 +1,52 @@ + + + + + + + + + + + Path + + + + + SubSelection + + + + + diff --git a/apollo-federation/src/sources/connect/json_selection/grammar/Spaces.svg b/apollo-federation/src/sources/connect/json_selection/grammar/Spaces.svg index a08f826870..3dc54e66f5 100644 --- a/apollo-federation/src/sources/connect/json_selection/grammar/Spaces.svg +++ b/apollo-federation/src/sources/connect/json_selection/grammar/Spaces.svg @@ -3,30 +3,30 @@ diff --git a/apollo-federation/src/sources/connect/json_selection/grammar/SpacesOrComments.svg b/apollo-federation/src/sources/connect/json_selection/grammar/SpacesOrComments.svg index 2e9c815c1d..74299b7733 100644 --- a/apollo-federation/src/sources/connect/json_selection/grammar/SpacesOrComments.svg +++ b/apollo-federation/src/sources/connect/json_selection/grammar/SpacesOrComments.svg @@ -3,30 +3,30 @@ diff --git a/apollo-federation/src/sources/connect/json_selection/grammar/StarSelection.svg b/apollo-federation/src/sources/connect/json_selection/grammar/StarSelection.svg deleted file mode 100644 index 2b4615d2e9..0000000000 --- a/apollo-federation/src/sources/connect/json_selection/grammar/StarSelection.svg +++ /dev/null @@ -1,60 +0,0 @@ - - - - - - - - - - - Alias - - - - * - - - - SubSelection - - - - - diff --git a/apollo-federation/src/sources/connect/json_selection/grammar/SubSelection.svg b/apollo-federation/src/sources/connect/json_selection/grammar/SubSelection.svg index 12284c811c..6c912b7439 100644 --- a/apollo-federation/src/sources/connect/json_selection/grammar/SubSelection.svg +++ b/apollo-federation/src/sources/connect/json_selection/grammar/SubSelection.svg @@ -1,61 +1,61 @@ - + - - - + + + - { + { - - - NakedSubSelection + xlink:href="#NamedSelection" + xlink:title="NamedSelection"> + + + NamedSelection - - + - } + } - - + d="m17 51 h2 m0 0 h10 m28 0 h10 m20 0 h10 m0 0 h132 m-162 0 l20 0 m-1 0 q-9 0 -9 -10 l0 -14 q0 -10 10 -10 m142 34 l20 0 m-20 0 q10 0 10 -10 l0 -14 q0 -10 -10 -10 m-142 0 h10 m122 0 h10 m20 34 h10 m28 0 h10 m3 0 h-3"/> + + diff --git a/apollo-federation/src/sources/connect/json_selection/grammar/UnsignedInt.svg b/apollo-federation/src/sources/connect/json_selection/grammar/UnsignedInt.svg deleted file mode 100644 index 3d8fdde47e..0000000000 --- a/apollo-federation/src/sources/connect/json_selection/grammar/UnsignedInt.svg +++ /dev/null @@ -1,59 +0,0 @@ - - - - - - - - - - [1-9] - - - - NO_SPACE - - - - [0-9] - - - 0 - - - - diff --git a/apollo-federation/src/sources/connect/json_selection/grammar/VarPath.svg b/apollo-federation/src/sources/connect/json_selection/grammar/VarPath.svg index 0017afb15f..16daef9d21 100644 --- a/apollo-federation/src/sources/connect/json_selection/grammar/VarPath.svg +++ b/apollo-federation/src/sources/connect/json_selection/grammar/VarPath.svg @@ -3,30 +3,30 @@ diff --git a/apollo-federation/src/sources/connect/json_selection/graphql.rs b/apollo-federation/src/sources/connect/json_selection/graphql.rs deleted file mode 100644 index b67935c05f..0000000000 --- a/apollo-federation/src/sources/connect/json_selection/graphql.rs +++ /dev/null @@ -1,197 +0,0 @@ -// The JSONSelection syntax is intended to be more generic than GraphQL, capable -// of transforming any aribitrary JSON in arbitrary ways, without assuming the -// universal availability of __typename or other convenient GraphQL-isms. -// However, since we are using the JSONSelection syntax to generate -// GraphQL-shaped output JSON, it's helpful to have some GraphQL-specific -// utilities. -// -// This file contains several trait implementations that allow converting from -// the JSONSelection type to a corresponding GraphQL selection set, where (for -// example) PathSelection syntax is expanded to ordinary nested selection sets. -// The resulting JSON will retain the nested structure of the GraphQL selection -// sets, and thus be more verbose than the output of the JSONSelection syntax, -// but may be easier to use for validating the selection against a GraphQL -// schema, using existing code for validating GraphQL operations. - -use apollo_compiler::ast; -use apollo_compiler::ast::Selection as GraphQLSelection; -use apollo_compiler::Name; - -use super::parser::JSONSelection; -use super::parser::NamedSelection; -use super::parser::PathSelection; -use super::parser::StarSelection; -use super::parser::SubSelection; - -#[derive(Default)] -struct GraphQLSelections(Vec>); - -impl GraphQLSelections { - fn valid_selections(self) -> Vec { - self.0.into_iter().filter_map(|i| i.ok()).collect() - } -} - -impl From> for GraphQLSelections { - fn from(val: Vec) -> Self { - Self(val.into_iter().map(Ok).collect()) - } -} - -impl From for Vec { - fn from(val: JSONSelection) -> Vec { - match val { - JSONSelection::Named(named_selections) => { - GraphQLSelections::from(named_selections).valid_selections() - } - JSONSelection::Path(path_selection) => path_selection.into(), - } - } -} - -fn new_field(name: String, selection: Option) -> GraphQLSelection { - GraphQLSelection::Field( - apollo_compiler::ast::Field { - alias: None, - name: Name::new_unchecked(&name), - arguments: Default::default(), - directives: Default::default(), - selection_set: selection - .map(GraphQLSelections::valid_selections) - .unwrap_or_default(), - } - .into(), - ) -} - -impl From for Vec { - fn from(val: NamedSelection) -> Vec { - match val { - NamedSelection::Field(alias, name, selection) => vec![new_field( - alias.map(|a| a.name).unwrap_or(name), - selection.map(|s| s.into()), - )], - NamedSelection::Quoted(alias, _name, selection) => { - vec![new_field( - alias.name, - selection.map(GraphQLSelections::from), - )] - } - NamedSelection::Path(alias, path_selection) => { - let graphql_selection: Vec = path_selection.into(); - vec![new_field( - alias.name, - Some(GraphQLSelections::from(graphql_selection)), - )] - } - NamedSelection::Group(alias, sub_selection) => { - vec![new_field(alias.name, Some(sub_selection.into()))] - } - } - } -} - -impl From for Vec { - fn from(val: PathSelection) -> Vec { - match val { - PathSelection::Var(_, _) => { - // Variable references do not correspond to GraphQL fields. - vec![] - } - PathSelection::Key(_, tail) => { - let tail = *tail; - tail.into() - } - PathSelection::Selection(selection) => { - GraphQLSelections::from(selection).valid_selections() - } - PathSelection::Empty => vec![], - } - } -} - -impl From for GraphQLSelections { - // give as much as we can, yield errors for star selection without alias. - fn from(val: SubSelection) -> GraphQLSelections { - let mut selections = val - .selections - .into_iter() - .flat_map(|named_selection| { - let selections: Vec = named_selection.into(); - GraphQLSelections::from(selections).0 - }) - .collect::>>(); - - if let Some(StarSelection(alias, sub_selection)) = val.star { - if let Some(alias) = alias { - let star = new_field( - alias.name, - sub_selection.map(|s| GraphQLSelections::from(*s)), - ); - selections.push(Ok(star)); - } else { - selections.push(Err( - "star selection without alias cannot be converted to GraphQL".to_string(), - )); - } - } - GraphQLSelections(selections) - } -} - -#[cfg(test)] -mod tests { - use apollo_compiler::ast::Selection as GraphQLSelection; - - use crate::selection; - - fn print_set(set: &[apollo_compiler::ast::Selection]) -> String { - set.iter() - .map(|s| s.serialize().to_string()) - .collect::>() - .join(" ") - } - - #[test] - fn into_selection_set() { - let selection = selection!("f"); - let set: Vec = selection.into(); - assert_eq!(print_set(&set), "f"); - - let selection = selection!("f f2 f3"); - let set: Vec = selection.into(); - assert_eq!(print_set(&set), "f f2 f3"); - - let selection = selection!("f { f2 f3 }"); - let set: Vec = selection.into(); - assert_eq!(print_set(&set), "f {\n f2\n f3\n}"); - - let selection = selection!("a: f { b: f2 }"); - let set: Vec = selection.into(); - assert_eq!(print_set(&set), "a {\n b\n}"); - - let selection = selection!(".a { b c }"); - let set: Vec = selection.into(); - assert_eq!(print_set(&set), "b c"); - - let selection = selection!(".a.b { c: .d e }"); - let set: Vec = selection.into(); - assert_eq!(print_set(&set), "c e"); - - let selection = selection!("a: { b c }"); - let set: Vec = selection.into(); - assert_eq!(print_set(&set), "a {\n b\n c\n}"); - - let selection = selection!("a: 'quoted'"); - let set: Vec = selection.into(); - assert_eq!(print_set(&set), "a"); - - let selection = selection!("a b: *"); - let set: Vec = selection.into(); - assert_eq!(print_set(&set), "a b"); - - let selection = selection!("a *"); - let set: Vec = selection.into(); - assert_eq!(print_set(&set), "a"); - } -} diff --git a/apollo-federation/src/sources/connect/json_selection/helpers.rs b/apollo-federation/src/sources/connect/json_selection/helpers.rs index e811188a82..19689e2546 100644 --- a/apollo-federation/src/sources/connect/json_selection/helpers.rs +++ b/apollo-federation/src/sources/connect/json_selection/helpers.rs @@ -1,7 +1,13 @@ +use apollo_compiler::collections::IndexSet; use nom::character::complete::multispace0; -use nom::IResult; +use nom::Slice; +use serde_json_bytes::Map as JSONMap; use serde_json_bytes::Value as JSON; +use super::location::Span; +use super::location::WithRange; +use super::ParseResult; + // This macro is handy for tests, but it absolutely should never be used with // dynamic input at runtime, since it panics if the selection string fails to // parse for any reason. @@ -9,10 +15,7 @@ use serde_json_bytes::Value as JSON; #[macro_export] macro_rules! selection { ($input:expr) => { - if let Ok((remainder, parsed)) = - $crate::sources::connect::json_selection::JSONSelection::parse($input) - { - assert_eq!(remainder, ""); + if let Ok(parsed) = $crate::sources::connect::json_selection::JSONSelection::parse($input) { parsed } else { panic!("invalid selection: {:?}", $input); @@ -22,25 +25,49 @@ macro_rules! selection { // Consumes any amount of whitespace and/or comments starting with # until the // end of the line. -pub fn spaces_or_comments(input: &str) -> IResult<&str, &str> { +pub(crate) fn spaces_or_comments(input: Span) -> ParseResult> { let mut suffix = input; loop { - (suffix, _) = multispace0(suffix)?; - let mut chars = suffix.chars(); - if let Some('#') = chars.next() { - for c in chars.by_ref() { - if c == '\n' { - break; - } + let mut made_progress = false; + let suffix_and_spaces = multispace0(suffix)?; + suffix = suffix_and_spaces.0; + if !suffix_and_spaces.1.fragment().is_empty() { + made_progress = true; + } + let suffix_len = suffix.fragment().len(); + if suffix.fragment().starts_with('#') { + if let Some(newline) = suffix.fragment().find('\n') { + suffix = suffix.slice(newline + 1..); + } else { + suffix = suffix.slice(suffix_len..); } - suffix = chars.as_str(); - } else { - return Ok((suffix, &input[0..input.len() - suffix.len()])); + made_progress = true; + } + if !made_progress { + let end_of_slice = input.fragment().len() - suffix_len; + let start = input.location_offset(); + let end = suffix.location_offset(); + return Ok(( + suffix, + WithRange::new( + input.slice(0..end_of_slice).fragment(), + // The location of the parsed spaces and comments + Some(start..end), + ), + )); } } } -pub fn json_type_name(v: &JSON) -> &str { +#[allow(unused)] +pub(crate) fn span_is_all_spaces_or_comments(input: Span) -> bool { + match spaces_or_comments(input) { + Ok((remainder, _)) => remainder.fragment().is_empty(), + _ => false, + } +} + +pub(crate) fn json_type_name(v: &JSON) -> &str { match v { JSON::Array(_) => "array", JSON::Object(_) => "object", @@ -51,101 +78,167 @@ pub fn json_type_name(v: &JSON) -> &str { } } +pub(crate) fn vec_push(mut vec: Vec, item: T) -> Vec { + vec.push(item); + vec +} + +pub(crate) fn json_merge(a: Option<&JSON>, b: Option<&JSON>) -> (Option, Vec) { + match (a, b) { + (Some(JSON::Object(a)), Some(JSON::Object(b))) => { + let mut merged = JSONMap::new(); + let mut errors = Vec::new(); + + for key in IndexSet::from_iter(a.keys().chain(b.keys())) { + let (child_opt, child_errors) = json_merge(a.get(key), b.get(key)); + if let Some(child) = child_opt { + merged.insert(key.clone(), child); + } + errors.extend(child_errors); + } + + (Some(JSON::Object(merged)), errors) + } + + (Some(JSON::Array(a)), Some(JSON::Array(b))) => { + let max_len = a.len().max(b.len()); + let mut merged = Vec::with_capacity(max_len); + let mut errors = Vec::new(); + + for i in 0..max_len { + let (child_opt, child_errors) = json_merge(a.get(i), b.get(i)); + if let Some(child) = child_opt { + merged.push(child); + } + errors.extend(child_errors); + } + + (Some(JSON::Array(merged)), errors) + } + + (Some(JSON::Null), _) => (Some(JSON::Null), Vec::new()), + (_, Some(JSON::Null)) => (Some(JSON::Null), Vec::new()), + + (Some(a), Some(b)) => { + if a == b { + (Some(a.clone()), Vec::new()) + } else { + let json_type_of_a = json_type_name(a); + let json_type_of_b = json_type_name(b); + ( + Some(b.clone()), + if json_type_of_a == json_type_of_b { + vec![] + } else { + vec![format!( + "Lossy merge replacing {} with {}", + json_type_of_a, json_type_of_b + )] + }, + ) + } + } + + (None, Some(b)) => (Some(b.clone()), Vec::new()), + (Some(a), None) => (Some(a.clone()), Vec::new()), + (None, None) => (None, Vec::new()), + } +} + #[cfg(test)] mod tests { use super::*; + use crate::sources::connect::json_selection::location::new_span; #[test] fn test_spaces_or_comments() { - assert_eq!(spaces_or_comments(""), Ok(("", ""))); - assert_eq!(spaces_or_comments(" "), Ok(("", " "))); - assert_eq!(spaces_or_comments(" "), Ok(("", " "))); - - assert_eq!(spaces_or_comments("#"), Ok(("", "#"))); - assert_eq!(spaces_or_comments("# "), Ok(("", "# "))); - assert_eq!(spaces_or_comments(" # "), Ok(("", " # "))); - assert_eq!(spaces_or_comments(" #"), Ok(("", " #"))); - - assert_eq!(spaces_or_comments("#\n"), Ok(("", "#\n"))); - assert_eq!(spaces_or_comments("# \n"), Ok(("", "# \n"))); - assert_eq!(spaces_or_comments(" # \n"), Ok(("", " # \n"))); - assert_eq!(spaces_or_comments(" #\n"), Ok(("", " #\n"))); - assert_eq!(spaces_or_comments(" # \n "), Ok(("", " # \n "))); - - assert_eq!(spaces_or_comments("hello"), Ok(("hello", ""))); - assert_eq!(spaces_or_comments(" hello"), Ok(("hello", " "))); - assert_eq!(spaces_or_comments("hello "), Ok(("hello ", ""))); - assert_eq!(spaces_or_comments("hello#"), Ok(("hello#", ""))); - assert_eq!(spaces_or_comments("hello #"), Ok(("hello #", ""))); - assert_eq!(spaces_or_comments("hello # "), Ok(("hello # ", ""))); - assert_eq!(spaces_or_comments(" hello # "), Ok(("hello # ", " "))); - assert_eq!( - spaces_or_comments(" hello # world "), - Ok(("hello # world ", " ")) - ); + fn check(input: &str, (exp_remainder, exp_spaces): (&str, &str)) { + match spaces_or_comments(new_span(input)) { + Ok((remainder, parsed)) => { + assert_eq!(*remainder.fragment(), exp_remainder); + assert_eq!(*parsed.as_ref(), exp_spaces); + } + Err(e) => panic!("error: {:?}", e), + } + } - assert_eq!(spaces_or_comments("#comment"), Ok(("", "#comment"))); - assert_eq!(spaces_or_comments(" #comment"), Ok(("", " #comment"))); - assert_eq!(spaces_or_comments("#comment "), Ok(("", "#comment "))); - assert_eq!(spaces_or_comments("#comment#"), Ok(("", "#comment#"))); - assert_eq!(spaces_or_comments("#comment #"), Ok(("", "#comment #"))); - assert_eq!(spaces_or_comments("#comment # "), Ok(("", "#comment # "))); - assert_eq!( - spaces_or_comments(" #comment # world "), - Ok(("", " #comment # world ")) - ); - assert_eq!( - spaces_or_comments(" # comment # world "), - Ok(("", " # comment # world ")) - ); + check("", ("", "")); + check(" ", ("", " ")); + check(" ", ("", " ")); + + check("#", ("", "#")); + check("# ", ("", "# ")); + check(" # ", ("", " # ")); + check(" #", ("", " #")); + + check("#\n", ("", "#\n")); + check("# \n", ("", "# \n")); + check(" # \n", ("", " # \n")); + check(" #\n", ("", " #\n")); + check(" # \n ", ("", " # \n ")); + + check("hello", ("hello", "")); + check(" hello", ("hello", " ")); + check("hello ", ("hello ", "")); + check("hello#", ("hello#", "")); + check("hello #", ("hello #", "")); + check("hello # ", ("hello # ", "")); + check(" hello # ", ("hello # ", " ")); + check(" hello # world ", ("hello # world ", " ")); + + check("#comment", ("", "#comment")); + check(" #comment", ("", " #comment")); + check("#comment ", ("", "#comment ")); + check("#comment#", ("", "#comment#")); + check("#comment #", ("", "#comment #")); + check("#comment # ", ("", "#comment # ")); + check(" #comment # world ", ("", " #comment # world ")); + check(" # comment # world ", ("", " # comment # world ")); - assert_eq!( - spaces_or_comments(" # comment\nnot a comment"), - Ok(("not a comment", " # comment\n")) + check( + " # comment\nnot a comment", + ("not a comment", " # comment\n"), ); - assert_eq!( - spaces_or_comments(" # comment\nnot a comment\n"), - Ok(("not a comment\n", " # comment\n")) + check( + " # comment\nnot a comment\n", + ("not a comment\n", " # comment\n"), ); - assert_eq!( - spaces_or_comments("not a comment\n # comment\nasdf"), - Ok(("not a comment\n # comment\nasdf", "")) + check( + "not a comment\n # comment\nasdf", + ("not a comment\n # comment\nasdf", ""), ); #[rustfmt::skip] - assert_eq!(spaces_or_comments(" + check(" # This is a comment # And so is this not a comment - "), - Ok(("not a comment + ", ("not a comment ", " # This is a comment # And so is this - "))); + ")); #[rustfmt::skip] - assert_eq!(spaces_or_comments(" + check(" # This is a comment not a comment # Another comment - "), - Ok(("not a comment + ", ("not a comment # Another comment ", " # This is a comment - "))); + ")); #[rustfmt::skip] - assert_eq!(spaces_or_comments(" + check(" not a comment # This is a comment # Another comment - "), - Ok(("not a comment + ", ("not a comment # This is a comment # Another comment ", " - "))); + ")); } } diff --git a/apollo-federation/src/sources/connect/json_selection/immutable.rs b/apollo-federation/src/sources/connect/json_selection/immutable.rs new file mode 100644 index 0000000000..c1e9acbb40 --- /dev/null +++ b/apollo-federation/src/sources/connect/json_selection/immutable.rs @@ -0,0 +1,56 @@ +use std::clone::Clone; +use std::rc::Rc; + +#[derive(Debug, Clone)] +pub(crate) struct InputPath { + path: Path, +} + +type Path = Option>>; + +#[derive(Debug, Clone)] +struct AppendPath { + prefix: Path, + last: T, +} + +impl InputPath { + pub(crate) fn empty() -> InputPath { + InputPath { path: None } + } + + pub(crate) fn append(&self, last: T) -> Self { + Self { + path: Some(Rc::new(AppendPath { + prefix: self.path.clone(), + last, + })), + } + } + + pub(crate) fn to_vec(&self) -> Vec { + // This method needs to be iterative rather than recursive, to be + // consistent with the paranoia of the drop method. + let mut vec = Vec::new(); + let mut path = self.path.as_deref(); + while let Some(p) = path { + vec.push(p.last.clone()); + path = p.prefix.as_deref(); + } + vec.reverse(); + vec + } +} + +impl Drop for InputPath { + fn drop(&mut self) { + let mut path = self.path.take(); + while let Some(rc) = path { + if let Ok(mut p) = Rc::try_unwrap(rc) { + path = p.prefix.take(); + } else { + break; + } + } + } +} diff --git a/apollo-federation/src/sources/connect/json_selection/known_var.rs b/apollo-federation/src/sources/connect/json_selection/known_var.rs new file mode 100644 index 0000000000..17de5a9e64 --- /dev/null +++ b/apollo-federation/src/sources/connect/json_selection/known_var.rs @@ -0,0 +1,53 @@ +use std::str::FromStr; + +#[cfg(test)] +use super::location::WithRange; +use crate::sources::connect::variable::Namespace; + +#[derive(PartialEq, Eq, Clone, Hash)] +pub(crate) enum KnownVariable { + External(Namespace), + Dollar, + AtSign, +} + +impl KnownVariable { + pub(crate) fn from_str(var_name: &str) -> Option { + match var_name { + "$" => Some(Self::Dollar), + "@" => Some(Self::AtSign), + s => Namespace::from_str(s).ok().map(Self::External), + } + } + + pub(crate) fn as_str(&self) -> &'static str { + match self { + Self::External(namespace) => namespace.as_str(), + Self::Dollar => "$", + Self::AtSign => "@", + } + } + + #[cfg(test)] + pub(super) fn into_with_range(self) -> WithRange { + WithRange::new(self, None) + } +} + +impl std::fmt::Debug for KnownVariable { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.as_str()) + } +} + +impl std::fmt::Display for KnownVariable { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.as_str()) + } +} + +impl From for KnownVariable { + fn from(namespace: Namespace) -> Self { + Self::External(namespace) + } +} diff --git a/apollo-federation/src/sources/connect/json_selection/lit_expr.rs b/apollo-federation/src/sources/connect/json_selection/lit_expr.rs new file mode 100644 index 0000000000..630fac6f85 --- /dev/null +++ b/apollo-federation/src/sources/connect/json_selection/lit_expr.rs @@ -0,0 +1,583 @@ +//! A LitExpr (short for LiteralExpression) is similar to a JSON value (or +//! serde_json::Value), with the addition of PathSelection as a possible leaf +//! value, so literal expressions passed to -> methods (via MethodArgs) can +//! incorporate dynamic $variable values in addition to the usual input data and +//! argument values. + +use apollo_compiler::collections::IndexMap; +use nom::branch::alt; +use nom::character::complete::char; +use nom::character::complete::one_of; +use nom::combinator::map; +use nom::combinator::opt; +use nom::combinator::recognize; +use nom::multi::many0; +use nom::multi::many1; +use nom::sequence::pair; +use nom::sequence::preceded; +use nom::sequence::tuple; + +use super::helpers::spaces_or_comments; +use super::location::merge_ranges; +use super::location::ranged_span; +use super::location::Ranged; +use super::location::Span; +use super::location::WithRange; +use super::nom_error_message; +use super::parser::parse_string_literal; +use super::parser::Key; +use super::parser::PathSelection; +use super::ExternalVarPaths; +use super::ParseResult; + +#[derive(Debug, PartialEq, Eq, Clone)] +pub(crate) enum LitExpr { + String(String), + Number(serde_json::Number), + Bool(bool), + Null, + Object(IndexMap, WithRange>), + Array(Vec>), + Path(PathSelection), +} + +impl LitExpr { + // LitExpr ::= LitPrimitive | LitObject | LitArray | PathSelection + // LitPrimitive ::= LitString | LitNumber | "true" | "false" | "null" + pub(crate) fn parse(input: Span) -> ParseResult> { + tuple(( + spaces_or_comments, + alt(( + map(parse_string_literal, |s| s.take_as(Self::String)), + Self::parse_number, + map(ranged_span("true"), |t| { + WithRange::new(Self::Bool(true), t.range()) + }), + map(ranged_span("false"), |f| { + WithRange::new(Self::Bool(false), f.range()) + }), + map(ranged_span("null"), |n| { + WithRange::new(Self::Null, n.range()) + }), + Self::parse_object, + Self::parse_array, + map(PathSelection::parse, |p| { + let range = p.range(); + WithRange::new(Self::Path(p), range) + }), + )), + ))(input) + .map(|(input, (_, value))| (input, value)) + } + + // LitNumber ::= "-"? ([0-9]+ ("." [0-9]*)? | "." [0-9]+) + fn parse_number(input: Span) -> ParseResult> { + let (suffix, (_, neg, _, num)) = tuple(( + spaces_or_comments, + opt(ranged_span("-")), + spaces_or_comments, + alt(( + map( + pair( + recognize(many1(one_of("0123456789"))), + opt(tuple(( + spaces_or_comments, + ranged_span("."), + spaces_or_comments, + recognize(many0(one_of("0123456789"))), + ))), + ), + |(int, frac)| { + let int_range = Some( + int.location_offset()..int.location_offset() + int.fragment().len(), + ); + + let mut s = String::new(); + s.push_str(int.fragment()); + + let full_range = if let Some((_, dot, _, frac)) = frac { + let frac_range = merge_ranges( + dot.range(), + if frac.len() > 0 { + Some( + frac.location_offset() + ..frac.location_offset() + frac.fragment().len(), + ) + } else { + None + }, + ); + s.push('.'); + if frac.fragment().is_empty() { + s.push('0'); + } else { + s.push_str(frac.fragment()); + } + merge_ranges(int_range, frac_range) + } else { + int_range + }; + + WithRange::new(s, full_range) + }, + ), + map( + tuple(( + spaces_or_comments, + ranged_span("."), + spaces_or_comments, + recognize(many1(one_of("0123456789"))), + )), + |(_, dot, _, frac)| { + let frac_range = Some( + frac.location_offset()..frac.location_offset() + frac.fragment().len(), + ); + let full_range = merge_ranges(dot.range(), frac_range); + WithRange::new(format!("0.{}", frac.fragment()), full_range) + }, + ), + )), + ))(input)?; + + let mut number = String::new(); + if neg.is_some() { + number.push('-'); + } + number.push_str(num.as_str()); + + if let Ok(lit_number) = number.parse().map(Self::Number) { + let range = merge_ranges(neg.and_then(|n| n.range()), num.range()); + Ok((suffix, WithRange::new(lit_number, range))) + } else { + Err(nom_error_message( + input, + // We could include the faulty number in the error message, but + // it will also appear at the beginning of the input span. + "Failed to parse numeric literal", + )) + } + } + + // LitObject ::= "{" (LitProperty ("," LitProperty)* ","?)? "}" + fn parse_object(input: Span) -> ParseResult> { + tuple(( + spaces_or_comments, + ranged_span("{"), + spaces_or_comments, + map( + opt(tuple(( + Self::parse_property, + many0(preceded( + tuple((spaces_or_comments, char(','))), + Self::parse_property, + )), + opt(tuple((spaces_or_comments, char(',')))), + ))), + |properties| { + let mut output = IndexMap::default(); + if let Some(((first_key, first_value), rest, _trailing_comma)) = properties { + output.insert(first_key, first_value); + for (key, value) in rest { + output.insert(key, value); + } + } + Self::Object(output) + }, + ), + spaces_or_comments, + ranged_span("}"), + ))(input) + .map(|(input, (_, open_brace, _, output, _, close_brace))| { + let range = merge_ranges(open_brace.range(), close_brace.range()); + (input, WithRange::new(output, range)) + }) + } + + // LitProperty ::= Key ":" LitExpr + fn parse_property(input: Span) -> ParseResult<(WithRange, WithRange)> { + tuple((Key::parse, spaces_or_comments, char(':'), Self::parse))(input) + .map(|(input, (key, _, _colon, value))| (input, (key, value))) + } + + // LitArray ::= "[" (LitExpr ("," LitExpr)* ","?)? "]" + fn parse_array(input: Span) -> ParseResult> { + tuple(( + spaces_or_comments, + ranged_span("["), + spaces_or_comments, + map( + opt(tuple(( + Self::parse, + many0(preceded( + tuple((spaces_or_comments, char(','))), + Self::parse, + )), + opt(tuple((spaces_or_comments, char(',')))), + ))), + |elements| { + let mut output = vec![]; + if let Some((first, rest, _trailing_comma)) = elements { + output.push(first); + output.extend(rest); + } + Self::Array(output) + }, + ), + spaces_or_comments, + ranged_span("]"), + ))(input) + .map(|(input, (_, open_bracket, _, output, _, close_bracket))| { + let range = merge_ranges(open_bracket.range(), close_bracket.range()); + (input, WithRange::new(output, range)) + }) + } + + #[cfg(test)] + pub(super) fn into_with_range(self) -> WithRange { + WithRange::new(self, None) + } + + #[allow(unused)] + pub(super) fn as_i64(&self) -> Option { + match self { + Self::Number(n) => n.as_i64(), + _ => None, + } + } +} + +impl ExternalVarPaths for LitExpr { + fn external_var_paths(&self) -> Vec<&PathSelection> { + let mut paths = vec![]; + match self { + Self::String(_) | Self::Number(_) | Self::Bool(_) | Self::Null => {} + Self::Object(map) => { + for value in map.values() { + paths.extend(value.external_var_paths()); + } + } + Self::Array(vec) => { + for value in vec { + paths.extend(value.external_var_paths()); + } + } + Self::Path(path) => { + paths.extend(path.external_var_paths()); + } + } + paths + } +} + +#[cfg(test)] +mod tests { + use super::super::known_var::KnownVariable; + use super::super::location::strip_ranges::StripRanges; + use super::*; + use crate::sources::connect::json_selection::helpers::span_is_all_spaces_or_comments; + use crate::sources::connect::json_selection::location::new_span; + use crate::sources::connect::json_selection::PathList; + use crate::sources::connect::variable::Namespace; + + fn check_parse(input: &str, expected: LitExpr) { + match LitExpr::parse(new_span(input)) { + Ok((remainder, parsed)) => { + assert!(span_is_all_spaces_or_comments(remainder)); + assert_eq!(parsed.strip_ranges(), WithRange::new(expected, None)); + } + Err(e) => panic!("Failed to parse '{}': {:?}", input, e), + }; + } + + #[test] + fn test_lit_expr_parse_primitives() { + check_parse("'hello'", LitExpr::String("hello".to_string())); + check_parse("\"hello\"", LitExpr::String("hello".to_string())); + check_parse(" 'hello' ", LitExpr::String("hello".to_string())); + check_parse(" \"hello\" ", LitExpr::String("hello".to_string())); + + check_parse("123", LitExpr::Number(serde_json::Number::from(123))); + check_parse("-123", LitExpr::Number(serde_json::Number::from(-123))); + check_parse(" - 123 ", LitExpr::Number(serde_json::Number::from(-123))); + check_parse( + "123.456", + LitExpr::Number(serde_json::Number::from_f64(123.456).unwrap()), + ); + check_parse( + ".456", + LitExpr::Number(serde_json::Number::from_f64(0.456).unwrap()), + ); + check_parse( + "-.456", + LitExpr::Number(serde_json::Number::from_f64(-0.456).unwrap()), + ); + check_parse( + "123.", + LitExpr::Number(serde_json::Number::from_f64(123.0).unwrap()), + ); + check_parse( + "-123.", + LitExpr::Number(serde_json::Number::from_f64(-123.0).unwrap()), + ); + + check_parse("true", LitExpr::Bool(true)); + check_parse(" true ", LitExpr::Bool(true)); + check_parse("false", LitExpr::Bool(false)); + check_parse(" false ", LitExpr::Bool(false)); + check_parse("null", LitExpr::Null); + check_parse(" null ", LitExpr::Null); + } + + #[test] + fn test_lit_expr_parse_objects() { + check_parse( + "{a: 1}", + LitExpr::Object({ + let mut map = IndexMap::default(); + map.insert( + Key::field("a").into_with_range(), + LitExpr::Number(serde_json::Number::from(1)).into_with_range(), + ); + map + }), + ); + + check_parse( + "{'a': 1}", + LitExpr::Object({ + let mut map = IndexMap::default(); + map.insert( + Key::quoted("a").into_with_range(), + LitExpr::Number(serde_json::Number::from(1)).into_with_range(), + ); + map + }), + ); + + { + fn make_expected(a_key: Key, b_key: Key) -> LitExpr { + let mut map = IndexMap::default(); + map.insert( + a_key.into_with_range(), + LitExpr::Number(serde_json::Number::from(1)).into_with_range(), + ); + map.insert( + b_key.into_with_range(), + LitExpr::Number(serde_json::Number::from(2)).into_with_range(), + ); + LitExpr::Object(map) + } + check_parse( + "{'a': 1, 'b': 2}", + make_expected(Key::quoted("a"), Key::quoted("b")), + ); + check_parse( + "{ a : 1, 'b': 2}", + make_expected(Key::field("a"), Key::quoted("b")), + ); + check_parse( + "{ a : 1, b: 2}", + make_expected(Key::field("a"), Key::field("b")), + ); + check_parse( + "{ \"a\" : 1, \"b\": 2 }", + make_expected(Key::quoted("a"), Key::quoted("b")), + ); + check_parse( + "{ \"a\" : 1, b: 2 }", + make_expected(Key::quoted("a"), Key::field("b")), + ); + check_parse( + "{ a : 1, \"b\": 2 }", + make_expected(Key::field("a"), Key::quoted("b")), + ); + } + } + + #[test] + fn test_lit_expr_parse_arrays() { + check_parse( + "[1, 2]", + LitExpr::Array(vec![ + WithRange::new(LitExpr::Number(serde_json::Number::from(1)), None), + WithRange::new(LitExpr::Number(serde_json::Number::from(2)), None), + ]), + ); + + check_parse( + "[1, true, 'three']", + LitExpr::Array(vec![ + WithRange::new(LitExpr::Number(serde_json::Number::from(1)), None), + WithRange::new(LitExpr::Bool(true), None), + WithRange::new(LitExpr::String("three".to_string()), None), + ]), + ); + } + + #[test] + fn test_lit_expr_parse_paths() { + { + let expected = LitExpr::Path(PathSelection { + path: PathList::Key( + Key::field("a").into_with_range(), + PathList::Key( + Key::field("b").into_with_range(), + PathList::Key( + Key::field("c").into_with_range(), + PathList::Empty.into_with_range(), + ) + .into_with_range(), + ) + .into_with_range(), + ) + .into_with_range(), + }); + + check_parse("a.b.c", expected.clone()); + check_parse(" a . b . c ", expected.clone()); + } + + { + let expected = LitExpr::Path(PathSelection { + path: PathList::Var( + KnownVariable::Dollar.into_with_range(), + PathList::Key( + Key::field("data").into_with_range(), + PathList::Empty.into_with_range(), + ) + .into_with_range(), + ) + .into_with_range(), + }); + check_parse("$.data", expected.clone()); + check_parse(" $ . data ", expected.clone()); + } + + { + let expected = LitExpr::Array(vec![ + LitExpr::Path(PathSelection { + path: PathList::Var( + KnownVariable::Dollar.into_with_range(), + PathList::Key( + Key::field("a").into_with_range(), + PathList::Empty.into_with_range(), + ) + .into_with_range(), + ) + .into_with_range(), + }) + .into_with_range(), + LitExpr::Path(PathSelection { + path: PathList::Key( + Key::field("b").into_with_range(), + PathList::Key( + Key::field("c").into_with_range(), + PathList::Empty.into_with_range(), + ) + .into_with_range(), + ) + .into_with_range(), + }) + .into_with_range(), + LitExpr::Path(PathSelection { + path: PathList::Key( + Key::field("d").into_with_range(), + PathList::Key( + Key::field("e").into_with_range(), + PathList::Key( + Key::field("f").into_with_range(), + PathList::Empty.into_with_range(), + ) + .into_with_range(), + ) + .into_with_range(), + ) + .into_with_range(), + }) + .into_with_range(), + ]); + + check_parse("[$.a, b.c, d.e.f]", expected.clone()); + check_parse("[$.a, b.c, d.e.f,]", expected.clone()); + check_parse("[ $ . a , b . c , d . e . f ]", expected.clone()); + check_parse("[ $ . a , b . c , d . e . f , ]", expected.clone()); + check_parse( + r#"[ + $.a, + b.c, + d.e.f, + ]"#, + expected.clone(), + ); + check_parse( + r#"[ + $ . a , + b . c , + d . e . f , + ]"#, + expected.clone(), + ); + } + + { + let expected = LitExpr::Object({ + let mut map = IndexMap::default(); + map.insert( + Key::field("a").into_with_range(), + LitExpr::Path(PathSelection { + path: PathList::Var( + KnownVariable::from(Namespace::Args).into_with_range(), + PathList::Key( + Key::field("a").into_with_range(), + PathList::Empty.into_with_range(), + ) + .into_with_range(), + ) + .into_with_range(), + }) + .into_with_range(), + ); + map.insert( + Key::field("b").into_with_range(), + LitExpr::Path(PathSelection { + path: PathList::Var( + KnownVariable::from(Namespace::This).into_with_range(), + PathList::Key( + Key::field("b").into_with_range(), + PathList::Empty.into_with_range(), + ) + .into_with_range(), + ) + .into_with_range(), + }) + .into_with_range(), + ); + map + }); + + check_parse( + r#"{ + a: $args.a, + b: $this.b, + }"#, + expected.clone(), + ); + + check_parse( + r#"{ + b: $this.b, + a: $args.a, + }"#, + expected.clone(), + ); + + check_parse( + r#" { + a : $args . a , + b : $this . b + ,} "#, + expected.clone(), + ); + } + } +} diff --git a/apollo-federation/src/sources/connect/json_selection/location.rs b/apollo-federation/src/sources/connect/json_selection/location.rs new file mode 100644 index 0000000000..8ac0c17b99 --- /dev/null +++ b/apollo-federation/src/sources/connect/json_selection/location.rs @@ -0,0 +1,355 @@ +use nom::bytes::complete::tag; +use nom::combinator::map; +use nom_locate::LocatedSpan; + +use super::ParseResult; + +// Currently, all our error messages are &'static str, which allows the Span +// type to remain Copy, which is convenient to avoid having to clone Spans +// frequently in the parser code. +// +// If we wanted to introduce any error messages computed using format!, we'd +// have to switch to Option here (or some other type containing owned +// String data), which would make Span no longer Copy, requiring more cloning. +// Not the end of the world, but something to keep in mind for the future. +// +// The cloning would still be relatively cheap because we use None throughout +// parsing and then only set Some(message) when we need to report an error, so +// we would not be cloning long String messages very often (and the rest of the +// Span fields are cheap to clone). +pub(crate) type Span<'a> = LocatedSpan<&'a str, Option<&'static str>>; + +pub(super) fn new_span(input: &str) -> Span { + Span::new_extra(input, None) +} + +// Some parsed AST structures, like PathSelection and NamedSelection, can +// produce a range directly from their children, so they do not need to be +// wrapped as WithRange or WithRange. +// Additionally, AST nodes that are structs can store their own range as a +// field, so they can implement Ranged without the WithRange wrapper. +pub(crate) trait Ranged { + fn range(&self) -> OffsetRange; +} + +// The ranges produced by the JSONSelection parser are pairs of character +// offsets into the original string. The first element of the pair is the offset +// of the first character, and the second element is the offset of the character +// just past the end of the range. Offsets start at 0 for the first character in +// the file, following nom_locate's span.location_offset() convention. +pub(crate) type OffsetRange = Option>; + +// The most common implementation of the Ranged trait is the WithRange +// struct, used to wrap any AST node that (a) needs its own location information +// (because that information is not derivable from its children) and (b) cannot +// easily store that information by adding another struct field (most often +// because T is an enum or primitive/String type, not a struct). +#[derive(Debug, PartialEq, Eq, Clone)] +pub struct WithRange { + node: Box, + range: OffsetRange, +} + +// We can recover some of the ergonomics of working with the inner type T by +// implementing Deref and DerefMut for WithRange. +impl std::ops::Deref for WithRange { + type Target = T; + + fn deref(&self) -> &Self::Target { + self.node.as_ref() + } +} +impl std::ops::DerefMut for WithRange { + fn deref_mut(&mut self) -> &mut Self::Target { + self.node.as_mut() + } +} + +impl AsRef for WithRange { + fn as_ref(&self) -> &T { + self.node.as_ref() + } +} + +impl AsMut for WithRange { + fn as_mut(&mut self) -> &mut T { + self.node.as_mut() + } +} + +impl PartialEq for WithRange +where + T: PartialEq, +{ + fn eq(&self, other: &T) -> bool { + self.node.as_ref() == other + } +} + +// Implement Hash if the inner type T implements Hash. +impl std::hash::Hash for WithRange { + fn hash(&self, state: &mut H) { + self.node.as_ref().hash(state) + } +} + +impl Ranged for WithRange { + fn range(&self) -> OffsetRange { + self.range.clone() + } +} + +impl WithRange { + pub(crate) fn new(node: T, range: OffsetRange) -> Self { + Self { + node: Box::new(node), + range, + } + } + + #[allow(unused)] + pub(crate) fn take(self) -> T { + *self.node + } + + pub(crate) fn take_as(self, f: impl FnOnce(T) -> U) -> WithRange { + WithRange::new(f(*self.node), self.range) + } +} + +pub(super) fn merge_ranges(left: OffsetRange, right: OffsetRange) -> OffsetRange { + match (left, right) { + // Tolerate out-of-order and overlapping ranges. + (Some(left_range), Some(right_range)) => { + Some(left_range.start.min(right_range.start)..left_range.end.max(right_range.end)) + } + (Some(left_range), None) => Some(left_range), + (None, Some(right_range)) => Some(right_range), + (None, None) => None, + } +} + +// Parser combinator that matches a &str and returns a WithRange<&str> with the +// matched string and the range of the match. +pub(super) fn ranged_span<'a, 'b>( + s: &'a str, +) -> impl FnMut(Span<'b>) -> ParseResult<'b, WithRange<&'b str>> + 'a +where + 'b: 'a, +{ + map(tag(s), |t: Span<'b>| { + let start = t.location_offset(); + let range = Some(start..start + s.len()); + WithRange::new(*t.fragment(), range) + }) +} + +#[cfg(test)] +pub(crate) mod strip_ranges { + use apollo_compiler::collections::IndexMap; + + use super::super::known_var::KnownVariable; + use super::super::lit_expr::LitExpr; + use super::super::parser::*; + use super::WithRange; + + /// Including location information in the AST introduces unnecessary + /// varation in many tests. StripLoc is a test-only trait allowing + /// participating AST nodes to remove their own and their descendants' + /// location information, thereby normalizing the AST for assert_eq! + /// comparisons. + pub(crate) trait StripRanges { + fn strip_ranges(&self) -> Self; + } + + impl StripRanges for WithRange { + fn strip_ranges(&self) -> Self { + WithRange::new(self.as_ref().clone(), None) + } + } + + impl StripRanges for JSONSelection { + fn strip_ranges(&self) -> Self { + match self { + JSONSelection::Named(subselect) => JSONSelection::Named(subselect.strip_ranges()), + JSONSelection::Path(path) => JSONSelection::Path(path.strip_ranges()), + } + } + } + + impl StripRanges for NamedSelection { + fn strip_ranges(&self) -> Self { + match self { + Self::Field(alias, key, sub) => Self::Field( + alias.as_ref().map(|a| a.strip_ranges()), + key.strip_ranges(), + sub.as_ref().map(|s| s.strip_ranges()), + ), + Self::Path { + alias, + path, + inline, + } => { + let stripped_alias = alias.as_ref().map(|a| a.strip_ranges()); + Self::Path { + alias: stripped_alias, + path: path.strip_ranges(), + inline: *inline, + } + } + Self::Group(alias, sub) => Self::Group(alias.strip_ranges(), sub.strip_ranges()), + } + } + } + + impl StripRanges for PathSelection { + fn strip_ranges(&self) -> Self { + Self { + path: self.path.strip_ranges(), + } + } + } + + impl StripRanges for WithRange { + fn strip_ranges(&self) -> Self { + WithRange::new( + match self.as_ref() { + PathList::Var(var, rest) => { + PathList::Var(var.strip_ranges(), rest.strip_ranges()) + } + PathList::Key(key, rest) => { + PathList::Key(key.strip_ranges(), rest.strip_ranges()) + } + PathList::Expr(expr, rest) => { + PathList::Expr(expr.strip_ranges(), rest.strip_ranges()) + } + PathList::Method(method, opt_args, rest) => PathList::Method( + method.strip_ranges(), + opt_args.as_ref().map(|args| args.strip_ranges()), + rest.strip_ranges(), + ), + PathList::Selection(sub) => PathList::Selection(sub.strip_ranges()), + PathList::Empty => PathList::Empty, + }, + None, + ) + } + } + + impl StripRanges for SubSelection { + fn strip_ranges(&self) -> Self { + SubSelection { + selections: self.selections.iter().map(|s| s.strip_ranges()).collect(), + ..Default::default() + } + } + } + + impl StripRanges for Alias { + fn strip_ranges(&self) -> Self { + Alias { + name: self.name.strip_ranges(), + range: None, + } + } + } + + impl StripRanges for WithRange { + fn strip_ranges(&self) -> Self { + WithRange::new(self.as_ref().clone(), None) + } + } + + impl StripRanges for MethodArgs { + fn strip_ranges(&self) -> Self { + MethodArgs { + args: self.args.iter().map(|arg| arg.strip_ranges()).collect(), + range: None, + } + } + } + + impl StripRanges for WithRange { + fn strip_ranges(&self) -> Self { + WithRange::new( + match self.as_ref() { + LitExpr::String(s) => LitExpr::String(s.clone()), + LitExpr::Number(n) => LitExpr::Number(n.clone()), + LitExpr::Bool(b) => LitExpr::Bool(*b), + LitExpr::Null => LitExpr::Null, + LitExpr::Object(map) => { + let mut new_map = IndexMap::default(); + for (key, value) in map { + new_map.insert(key.strip_ranges(), value.strip_ranges()); + } + LitExpr::Object(new_map) + } + LitExpr::Array(vec) => { + let mut new_vec = vec![]; + for value in vec { + new_vec.push(value.strip_ranges()); + } + LitExpr::Array(new_vec) + } + LitExpr::Path(path) => LitExpr::Path(path.strip_ranges()), + }, + None, + ) + } + } + + impl StripRanges for WithRange { + fn strip_ranges(&self) -> Self { + WithRange::new(self.as_ref().clone(), None) + } + } +} + +#[cfg(test)] +mod tests { + use insta::assert_debug_snapshot; + use insta::assert_snapshot; + + use super::*; + use crate::sources::connect::JSONSelection; + + #[test] + fn test_merge_ranges() { + // Simple cases: + assert_eq!(merge_ranges(None, None), None); + assert_eq!(merge_ranges(Some(0..1), None), Some(0..1)); + assert_eq!(merge_ranges(None, Some(0..1)), Some(0..1)); + assert_eq!(merge_ranges(Some(0..1), Some(1..2)), Some(0..2)); + + // Out-of-order and overlapping ranges: + assert_eq!(merge_ranges(Some(1..2), Some(0..1)), Some(0..2)); + assert_eq!(merge_ranges(Some(0..1), Some(1..2)), Some(0..2)); + assert_eq!(merge_ranges(Some(0..2), Some(1..3)), Some(0..3)); + assert_eq!(merge_ranges(Some(1..3), Some(0..2)), Some(0..3)); + } + + #[test] + fn test_arrow_path_ranges() { + let parsed = JSONSelection::parse(" __typename: @ -> echo ( \"Frog\" , ) ").unwrap(); + assert_debug_snapshot!(parsed); + } + + #[test] + fn test_parse_with_range_snapshots() { + let parsed = JSONSelection::parse( + r#" + path: some.nested.path { isbn author { name }} + alias: "not an identifier" { + # Inject "Frog" as the __typename + __typename: @->echo( "Frog" , ) + wrapped: $->echo({ wrapped : @ , }) + group: { a b c } + arg: $args . arg + field + } + "#, + ) + .unwrap(); + assert_snapshot!(format!("{:#?}", parsed)); + } +} diff --git a/apollo-federation/src/sources/connect/json_selection/methods.rs b/apollo-federation/src/sources/connect/json_selection/methods.rs new file mode 100644 index 0000000000..76c13f30c9 --- /dev/null +++ b/apollo-federation/src/sources/connect/json_selection/methods.rs @@ -0,0 +1,227 @@ +use apollo_compiler::collections::IndexMap; +use serde_json_bytes::Value as JSON; +use shape::Shape; + +use super::immutable::InputPath; +use super::location::WithRange; +use super::ApplyToError; +use super::MethodArgs; +use super::PathList; +use super::VarsWithPathsMap; + +// Two kinds of methods: public ones and not-yet-public ones. The future ones +// have proposed implementations and tests, and some are even used within the +// tests of other methods, but are not yet exposed for use in connector schemas. +// Graduating to public status requires updated documentation, careful review, +// and team discussion to make sure the method is one we want to support +// long-term. Once we have a better story for checking method type signatures +// and versioning any behavioral changes, we should be able to expand/improve +// the list of public::* methods more quickly/confidently. +mod future; +mod public; + +#[cfg(test)] +mod tests; + +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub(super) enum ArrowMethod { + // Public methods: + Echo, + Map, + Match, + First, + Last, + Slice, + Size, + Entries, + JsonStringify, + + // Future methods: + TypeOf, + Eq, + MatchIf, + Add, + Sub, + Mul, + Div, + Mod, + Has, + Get, + Keys, + Values, + Not, + Or, + And, +} + +#[macro_export] +macro_rules! impl_arrow_method { + ($struct_name:ident, $impl_fn_name:ident, $shape_fn_name:ident) => { + #[derive(Debug, Clone, Copy, PartialEq, Eq)] + pub(super) struct $struct_name; + impl $crate::sources::connect::json_selection::methods::ArrowMethodImpl for $struct_name { + fn apply( + &self, + method_name: &WithRange, + method_args: Option<&MethodArgs>, + data: &JSON, + vars: &VarsWithPathsMap, + input_path: &InputPath, + tail: &WithRange, + ) -> (Option, Vec) { + $impl_fn_name(method_name, method_args, data, vars, input_path, tail) + } + + fn shape( + &self, + method_name: &WithRange, + method_args: Option<&MethodArgs>, + input_shape: Shape, + dollar_shape: Shape, + named_var_shapes: &IndexMap<&str, Shape>, + ) -> Shape { + $shape_fn_name( + method_name, + method_args, + input_shape, + dollar_shape, + named_var_shapes, + ) + } + } + }; +} + +pub(super) trait ArrowMethodImpl { + fn apply( + &self, + method_name: &WithRange, + method_args: Option<&MethodArgs>, + data: &JSON, + vars: &VarsWithPathsMap, + input_path: &InputPath, + tail: &WithRange, + ) -> (Option, Vec); + + fn shape( + &self, + // Shape processing errors for methods can benefit from knowing the name + // of the method and its source range. Note that ArrowMethodImpl::shape + // is invoked for every invocation of a method, with appropriately + // different source ranges. + method_name: &WithRange, + // Most methods implementing ArrowMethodImpl::shape will need to know + // the shapes of their arguments, which can be computed from MethodArgs + // using the compute_output_shape method. + method_args: Option<&MethodArgs>, + // The input_shape is the shape of the @ variable, or the value from the + // left hand side of the -> token. + input_shape: Shape, + // The dollar_shape is the shape of the $ variable, or the input object + // associated with the closest enclosing subselection. + dollar_shape: Shape, + // Other variable shapes may also be provided here, though in general + // variables and their subproperties can be represented abstractly using + // $var.nested.property ShapeCase::Name shapes. + named_var_shapes: &IndexMap<&str, Shape>, + ) -> Shape; +} + +// This Deref implementation allows us to call .apply(...) directly on the +// ArrowMethod enum. +impl std::ops::Deref for ArrowMethod { + type Target = dyn ArrowMethodImpl; + + fn deref(&self) -> &Self::Target { + match self { + // Public methods: + Self::Echo => &public::EchoMethod, + Self::Map => &public::MapMethod, + Self::Match => &public::MatchMethod, + Self::First => &public::FirstMethod, + Self::Last => &public::LastMethod, + Self::Slice => &public::SliceMethod, + Self::Size => &public::SizeMethod, + Self::Entries => &public::EntriesMethod, + Self::JsonStringify => &public::JsonStringifyMethod, + + // Future methods: + Self::TypeOf => &future::TypeOfMethod, + Self::Eq => &future::EqMethod, + Self::MatchIf => &future::MatchIfMethod, + Self::Add => &future::AddMethod, + Self::Sub => &future::SubMethod, + Self::Mul => &future::MulMethod, + Self::Div => &future::DivMethod, + Self::Mod => &future::ModMethod, + Self::Has => &future::HasMethod, + Self::Get => &future::GetMethod, + Self::Keys => &future::KeysMethod, + Self::Values => &future::ValuesMethod, + Self::Not => &future::NotMethod, + Self::Or => &future::OrMethod, + Self::And => &future::AndMethod, + } + } +} + +impl ArrowMethod { + // This method is currently used at runtime to look up methods by &str name, + // but it could be hoisted parsing time, and then we'd store an ArrowMethod + // instead of a String for the method name in the AST. + pub(super) fn lookup(name: &str) -> Option { + let method_opt = match name { + "echo" => Some(Self::Echo), + "map" => Some(Self::Map), + "eq" => Some(Self::Eq), + "match" => Some(Self::Match), + // As this case suggests, we can't necessarily provide a name() + // method for ArrowMethod (the opposite of lookup), because method + // implementations can be used under multiple names. + "matchIf" | "match_if" => Some(Self::MatchIf), + "typeof" => Some(Self::TypeOf), + "add" => Some(Self::Add), + "sub" => Some(Self::Sub), + "mul" => Some(Self::Mul), + "div" => Some(Self::Div), + "mod" => Some(Self::Mod), + "first" => Some(Self::First), + "last" => Some(Self::Last), + "slice" => Some(Self::Slice), + "size" => Some(Self::Size), + "has" => Some(Self::Has), + "get" => Some(Self::Get), + "keys" => Some(Self::Keys), + "values" => Some(Self::Values), + "entries" => Some(Self::Entries), + "not" => Some(Self::Not), + "or" => Some(Self::Or), + "and" => Some(Self::And), + "jsonStringify" => Some(Self::JsonStringify), + _ => None, + }; + + match method_opt { + Some(method) if cfg!(test) || method.is_public() => Some(method), + _ => None, + } + } + + pub(super) fn is_public(&self) -> bool { + // This set controls which ->methods are exposed for use in connector + // schemas. Non-public methods are still implemented and tested, but + // will not be returned from lookup_arrow_method outside of tests. + matches!( + self, + Self::Echo + | Self::Map + | Self::Match + | Self::First + | Self::Last + | Self::Slice + | Self::Size + | Self::Entries + | Self::JsonStringify + ) + } +} diff --git a/apollo-federation/src/sources/connect/json_selection/methods/future.rs b/apollo-federation/src/sources/connect/json_selection/methods/future.rs new file mode 100644 index 0000000000..79f8b7e3d1 --- /dev/null +++ b/apollo-federation/src/sources/connect/json_selection/methods/future.rs @@ -0,0 +1,1081 @@ +// The future.rs module contains methods that are not yet exposed for use in +// JSONSelection strings in connector schemas, but have proposed implementations +// and tests. After careful review, they may one day move to public.rs. + +use apollo_compiler::collections::IndexMap; +use serde_json::Number; +use serde_json_bytes::Value as JSON; +use shape::Shape; +use shape::ShapeCase; + +use crate::impl_arrow_method; +use crate::sources::connect::json_selection::apply_to::ApplyToResultMethods; +use crate::sources::connect::json_selection::helpers::json_type_name; +use crate::sources::connect::json_selection::helpers::vec_push; +use crate::sources::connect::json_selection::immutable::InputPath; +use crate::sources::connect::json_selection::lit_expr::LitExpr; +use crate::sources::connect::json_selection::location::merge_ranges; +use crate::sources::connect::json_selection::location::Ranged; +use crate::sources::connect::json_selection::location::WithRange; +use crate::sources::connect::json_selection::ApplyToError; +use crate::sources::connect::json_selection::ApplyToInternal; +use crate::sources::connect::json_selection::MethodArgs; +use crate::sources::connect::json_selection::PathList; +use crate::sources::connect::json_selection::VarsWithPathsMap; + +impl_arrow_method!(TypeOfMethod, typeof_method, typeof_shape); +fn typeof_method( + method_name: &WithRange, + method_args: Option<&MethodArgs>, + data: &JSON, + vars: &VarsWithPathsMap, + input_path: &InputPath, + tail: &WithRange, +) -> (Option, Vec) { + if method_args.is_some() { + ( + None, + vec![ApplyToError::new( + format!( + "Method ->{} does not take any arguments", + method_name.as_ref() + ), + input_path.to_vec(), + method_name.range(), + )], + ) + } else { + let typeof_string = JSON::String(json_type_name(data).to_string().into()); + tail.apply_to_path(&typeof_string, vars, input_path) + } +} +fn typeof_shape( + _method_name: &WithRange, + _method_args: Option<&MethodArgs>, + _input_shape: Shape, + _dollar_shape: Shape, + _named_var_shapes: &IndexMap<&str, Shape>, +) -> Shape { + // TODO Compute this union type once and clone it here. + Shape::one([ + Shape::string_value("null"), + Shape::string_value("boolean"), + Shape::string_value("number"), + Shape::string_value("string"), + Shape::string_value("array"), + Shape::string_value("object"), + ]) +} + +impl_arrow_method!(EqMethod, eq_method, eq_shape); +fn eq_method( + method_name: &WithRange, + method_args: Option<&MethodArgs>, + data: &JSON, + vars: &VarsWithPathsMap, + input_path: &InputPath, + tail: &WithRange, +) -> (Option, Vec) { + if let Some(MethodArgs { args, .. }) = method_args { + if args.len() == 1 { + let (value_opt, arg_errors) = args[0].apply_to_path(data, vars, input_path); + let matches = if let Some(value) = value_opt { + data == &value + } else { + false + }; + return tail + .apply_to_path(&JSON::Bool(matches), vars, input_path) + .prepend_errors(arg_errors); + } + } + ( + None, + vec![ApplyToError::new( + format!( + "Method ->{} requires exactly one argument", + method_name.as_ref() + ), + input_path.to_vec(), + method_name.range(), + )], + ) +} +fn eq_shape( + _method_name: &WithRange, + _method_args: Option<&MethodArgs>, + _input_shape: Shape, + _dollar_shape: Shape, + _named_var_shapes: &IndexMap<&str, Shape>, +) -> Shape { + Shape::bool() +} + +// Like ->match, but expects the first element of each pair to evaluate to a +// boolean, returning the second element of the first pair whose first element +// is true. This makes providing a final catch-all case easy, since the last +// pair can be [true, ]. +impl_arrow_method!(MatchIfMethod, match_if_method, match_if_shape); +fn match_if_method( + method_name: &WithRange, + method_args: Option<&MethodArgs>, + data: &JSON, + vars: &VarsWithPathsMap, + input_path: &InputPath, + tail: &WithRange, +) -> (Option, Vec) { + let mut errors = Vec::new(); + + if let Some(MethodArgs { args, .. }) = method_args { + for pair in args { + if let LitExpr::Array(pair) = pair.as_ref() { + if pair.len() == 2 { + let (condition_opt, condition_errors) = + pair[0].apply_to_path(data, vars, input_path); + errors.extend(condition_errors); + + if let Some(JSON::Bool(true)) = condition_opt { + return pair[1] + .apply_to_path(data, vars, input_path) + .and_then_collecting_errors(|value| { + tail.apply_to_path(value, vars, input_path) + }) + .prepend_errors(errors); + }; + } + } + } + } + ( + None, + vec_push( + errors, + ApplyToError::new( + format!( + "Method ->{} did not match any [condition, value] pair", + method_name.as_ref(), + ), + input_path.to_vec(), + merge_ranges( + method_name.range(), + method_args.and_then(|args| args.range()), + ), + ), + ), + ) +} +fn match_if_shape( + method_name: &WithRange, + method_args: Option<&MethodArgs>, + input_shape: Shape, + dollar_shape: Shape, + named_var_shapes: &IndexMap<&str, Shape>, +) -> Shape { + use super::super::methods::public::match_shape; + // Since match_shape does not inspect the candidate expressions, we can + // reuse it for ->matchIf, where the only functional difference is that the + // candidate expressions are expected to be boolean. + match_shape( + method_name, + method_args, + input_shape, + dollar_shape, + named_var_shapes, + ) +} + +pub(super) fn arithmetic_method( + method_name: &WithRange, + method_args: Option<&MethodArgs>, + op: impl Fn(&Number, &Number) -> Option, + data: &JSON, + vars: &VarsWithPathsMap, + input_path: &InputPath, +) -> (Option, Vec) { + if let Some(MethodArgs { args, .. }) = method_args { + if let JSON::Number(result) = data { + let mut result = result.clone(); + let mut errors = Vec::new(); + for arg in args { + let (value_opt, arg_errors) = arg.apply_to_path(data, vars, input_path); + errors.extend(arg_errors); + if let Some(JSON::Number(n)) = value_opt { + if let Some(new_result) = op(&result, &n) { + result = new_result; + } else { + return ( + None, + vec_push( + errors, + ApplyToError::new( + format!( + "Method ->{} failed on argument {}", + method_name.as_ref(), + n + ), + input_path.to_vec(), + arg.range(), + ), + ), + ); + } + } else { + return ( + None, + vec_push( + errors, + ApplyToError::new( + format!( + "Method ->{} requires numeric arguments", + method_name.as_ref() + ), + input_path.to_vec(), + arg.range(), + ), + ), + ); + } + } + (Some(JSON::Number(result)), errors) + } else { + ( + None, + vec![ApplyToError::new( + format!( + "Method ->{} requires numeric arguments", + method_name.as_ref() + ), + input_path.to_vec(), + method_name.range(), + )], + ) + } + } else { + ( + None, + vec![ApplyToError::new( + format!( + "Method ->{} requires at least one argument", + method_name.as_ref() + ), + input_path.to_vec(), + method_name.range(), + )], + ) + } +} + +macro_rules! infix_math_op { + ($name:ident, $op:tt) => { + fn $name(a: &Number, b: &Number) -> Option { + if a.is_f64() || b.is_f64() { + Number::from_f64(a.as_f64().unwrap() $op b.as_f64().unwrap()) + } else if let (Some(a_i64), Some(b_i64)) = (a.as_i64(), b.as_i64()) { + Some(Number::from(a_i64 $op b_i64)) + } else { + None + } + } + }; +} +infix_math_op!(add_op, +); +infix_math_op!(sub_op, -); +infix_math_op!(mul_op, *); +infix_math_op!(div_op, /); +infix_math_op!(rem_op, %); + +fn math_shape( + _method_name: &WithRange, + _method_args: Option<&MethodArgs>, + _input_shape: Shape, + _dollar_shape: Shape, + _named_var_shapes: &IndexMap<&str, Shape>, +) -> Shape { + Shape::error("TODO: math_shape") +} + +macro_rules! infix_math_method { + ($struct_name:ident, $fn_name:ident, $op:ident) => { + impl_arrow_method!($struct_name, $fn_name, math_shape); + fn $fn_name( + method_name: &WithRange, + method_args: Option<&MethodArgs>, + data: &JSON, + vars: &VarsWithPathsMap, + input_path: &InputPath, + tail: &WithRange, + ) -> (Option, Vec) { + arithmetic_method(method_name, method_args, $op, data, vars, input_path) + .and_then_collecting_errors(|result| tail.apply_to_path(&result, vars, input_path)) + } + }; +} +infix_math_method!(AddMethod, add_method, add_op); +infix_math_method!(SubMethod, sub_method, sub_op); +infix_math_method!(MulMethod, mul_method, mul_op); +infix_math_method!(DivMethod, div_method, div_op); +infix_math_method!(ModMethod, mod_method, rem_op); + +impl_arrow_method!(HasMethod, has_method, has_shape); +fn has_method( + method_name: &WithRange, + method_args: Option<&MethodArgs>, + data: &JSON, + vars: &VarsWithPathsMap, + input_path: &InputPath, + tail: &WithRange, +) -> (Option, Vec) { + if let Some(MethodArgs { args, .. }) = method_args { + match args.first() { + Some(arg) => match arg.apply_to_path(data, vars, input_path) { + (Some(ref json_index @ JSON::Number(ref n)), arg_errors) => { + match (data, n.as_i64()) { + (JSON::Array(array), Some(index)) => { + let ilen = array.len() as i64; + // Negative indices count from the end of the array + let index = if index < 0 { ilen + index } else { index }; + tail.apply_to_path( + &JSON::Bool(index >= 0 && index < ilen), + vars, + &input_path.append(json_index.clone()), + ) + .prepend_errors(arg_errors) + } + + (json_key @ JSON::String(s), Some(index)) => { + let ilen = s.as_str().len() as i64; + // Negative indices count from the end of the array + let index = if index < 0 { ilen + index } else { index }; + tail.apply_to_path( + &JSON::Bool(index >= 0 && index < ilen), + vars, + &input_path.append(json_key.clone()), + ) + .prepend_errors(arg_errors) + } + + _ => tail + .apply_to_path( + &JSON::Bool(false), + vars, + &input_path.append(json_index.clone()), + ) + .prepend_errors(arg_errors), + } + } + + (Some(ref json_key @ JSON::String(ref s)), arg_errors) => match data { + JSON::Object(map) => tail + .apply_to_path( + &JSON::Bool(map.contains_key(s.as_str())), + vars, + &input_path.append(json_key.clone()), + ) + .prepend_errors(arg_errors), + + _ => tail + .apply_to_path( + &JSON::Bool(false), + vars, + &input_path.append(json_key.clone()), + ) + .prepend_errors(arg_errors), + }, + + (Some(value), arg_errors) => tail + .apply_to_path(&JSON::Bool(false), vars, &input_path.append(value.clone())) + .prepend_errors(arg_errors), + + (None, arg_errors) => tail + .apply_to_path(&JSON::Bool(false), vars, input_path) + .prepend_errors(arg_errors), + }, + None => ( + None, + vec![ApplyToError::new( + format!("Method ->{} requires an argument", method_name.as_ref()), + input_path.to_vec(), + method_name.range(), + )], + ), + } + } else { + ( + None, + vec![ApplyToError::new( + format!("Method ->{} requires an argument", method_name.as_ref()), + input_path.to_vec(), + method_name.range(), + )], + ) + } +} +fn has_shape( + _method_name: &WithRange, + _method_args: Option<&MethodArgs>, + _input_shape: Shape, + _dollar_shape: Shape, + _named_var_shapes: &IndexMap<&str, Shape>, +) -> Shape { + // TODO We could be more clever here (sometimes) based on the input_shape + // and argument shapes. + Shape::bool() +} + +impl_arrow_method!(GetMethod, get_method, get_shape); +fn get_method( + method_name: &WithRange, + method_args: Option<&MethodArgs>, + data: &JSON, + vars: &VarsWithPathsMap, + input_path: &InputPath, + tail: &WithRange, +) -> (Option, Vec) { + if let Some(MethodArgs { args, .. }) = method_args { + if let Some(index_literal) = args.first() { + match index_literal.apply_to_path(data, vars, input_path) { + (Some(JSON::Number(n)), index_errors) => match (data, n.as_i64()) { + (JSON::Array(array), Some(i)) => { + // Negative indices count from the end of the array + if let Some(element) = array.get(if i < 0 { + (array.len() as i64 + i) as usize + } else { + i as usize + }) { + tail.apply_to_path(element, vars, input_path) + .prepend_errors(index_errors) + } else { + ( + None, + vec_push( + index_errors, + ApplyToError::new( + format!( + "Method ->{}({}) index out of bounds", + method_name.as_ref(), + i, + ), + input_path.to_vec(), + index_literal.range(), + ), + ), + ) + } + } + + (JSON::String(s), Some(i)) => { + let s_str = s.as_str(); + let ilen = s_str.len() as i64; + // Negative indices count from the end of the array + let index = if i < 0 { ilen + i } else { i }; + if index >= 0 && index < ilen { + let uindex = index as usize; + let single_char_string = s_str[uindex..uindex + 1].to_string(); + tail.apply_to_path( + &JSON::String(single_char_string.into()), + vars, + input_path, + ) + .prepend_errors(index_errors) + } else { + ( + None, + vec_push( + index_errors, + ApplyToError::new( + format!( + "Method ->{}({}) index out of bounds", + method_name.as_ref(), + i, + ), + input_path.to_vec(), + index_literal.range(), + ), + ), + ) + } + } + + (_, None) => ( + None, + vec_push( + index_errors, + ApplyToError::new( + format!( + "Method ->{} requires an integer index", + method_name.as_ref() + ), + input_path.to_vec(), + index_literal.range(), + ), + ), + ), + _ => ( + None, + vec_push( + index_errors, + ApplyToError::new( + format!( + "Method ->{} requires an array or string input, not {}", + method_name.as_ref(), + json_type_name(data), + ), + input_path.to_vec(), + method_name.range(), + ), + ), + ), + }, + (Some(ref key @ JSON::String(ref s)), index_errors) => match data { + JSON::Object(map) => { + if let Some(value) = map.get(s.as_str()) { + tail.apply_to_path(value, vars, input_path) + .prepend_errors(index_errors) + } else { + ( + None, + vec_push( + index_errors, + ApplyToError::new( + format!( + "Method ->{}({}) object key not found", + method_name.as_ref(), + key + ), + input_path.to_vec(), + index_literal.range(), + ), + ), + ) + } + } + _ => ( + None, + vec_push( + index_errors, + ApplyToError::new( + format!( + "Method ->{}({}) requires an object input", + method_name.as_ref(), + key + ), + input_path.to_vec(), + merge_ranges( + method_name.range(), + method_args.and_then(|args| args.range()), + ), + ), + ), + ), + }, + (Some(value), index_errors) => ( + None, + vec_push( + index_errors, + ApplyToError::new( + format!( + "Method ->{}({}) requires an integer or string argument", + method_name.as_ref(), + value, + ), + input_path.to_vec(), + index_literal.range(), + ), + ), + ), + (None, index_errors) => ( + None, + vec_push( + index_errors, + ApplyToError::new( + format!( + "Method ->{} received undefined argument", + method_name.as_ref() + ), + input_path.to_vec(), + index_literal.range(), + ), + ), + ), + } + } else { + ( + None, + vec![ApplyToError::new( + format!("Method ->{} requires an argument", method_name.as_ref()), + input_path.to_vec(), + method_name.range(), + )], + ) + } + } else { + ( + None, + vec![ApplyToError::new( + format!("Method ->{} requires an argument", method_name.as_ref()), + input_path.to_vec(), + method_name.range(), + )], + ) + } +} +fn get_shape( + method_name: &WithRange, + method_args: Option<&MethodArgs>, + input_shape: Shape, + dollar_shape: Shape, + named_var_shapes: &IndexMap<&str, Shape>, +) -> Shape { + if let Some(MethodArgs { args, .. }) = method_args { + if let Some(index_literal) = args.first() { + let index_shape = index_literal.compute_output_shape( + input_shape.clone(), + dollar_shape.clone(), + named_var_shapes, + ); + return match index_shape.case() { + ShapeCase::String(value_opt) => match input_shape.case() { + ShapeCase::Object { fields, rest } => { + if let Some(literal_name) = value_opt { + if let Some(shape) = fields.get(literal_name.as_str()) { + return shape.clone(); + } + } + let mut value_shapes = fields.values().cloned().collect::>(); + if !rest.is_none() { + value_shapes.push(rest.clone()); + } + value_shapes.push(Shape::none()); + Shape::one(value_shapes) + } + ShapeCase::Array { .. } => Shape::error_with_range( + format!( + "Method ->{} applied to array requires integer index, not string", + method_name.as_ref() + ) + .as_str(), + index_literal.range(), + ), + ShapeCase::String(_) => Shape::error_with_range( + format!( + "Method ->{} applied to string requires integer index, not string", + method_name.as_ref() + ) + .as_str(), + index_literal.range(), + ), + _ => Shape::error("Method ->get requires an object, array, or string input"), + }, + + ShapeCase::Int(value_opt) => { + match input_shape.case() { + ShapeCase::Array { prefix, tail } => { + if let Some(index) = value_opt { + if let Some(item) = prefix.get(*index as usize) { + return item.clone(); + } + } + // If tail.is_none(), this will simplify to Shape::none(). + Shape::one([tail.clone(), Shape::none()]) + } + + ShapeCase::String(Some(s)) => { + if let Some(index) = value_opt { + let index = *index as usize; + if index < s.len() { + Shape::string_value(&s[index..index + 1]) + } else { + Shape::none() + } + } else { + Shape::one([Shape::string(), Shape::none()]) + } + } + ShapeCase::String(None) => Shape::one([Shape::string(), Shape::none()]), + + ShapeCase::Object { .. } => Shape::error_with_range( + format!( + "Method ->{} applied to object requires string index, not integer", + method_name.as_ref() + ) + .as_str(), + index_literal.range(), + ), + + _ => { + Shape::error("Method ->get requires an object, array, or string input") + } + } + } + + _ => Shape::error_with_range( + format!( + "Method ->{} requires an integer or string argument", + method_name.as_ref() + ) + .as_str(), + index_literal.range(), + ), + }; + } + } + + Shape::error_with_range( + format!("Method ->{} requires an argument", method_name.as_ref()).as_str(), + method_name.range(), + ) +} + +impl_arrow_method!(KeysMethod, keys_method, keys_shape); +fn keys_method( + method_name: &WithRange, + method_args: Option<&MethodArgs>, + data: &JSON, + vars: &VarsWithPathsMap, + input_path: &InputPath, + tail: &WithRange, +) -> (Option, Vec) { + if method_args.is_some() { + return ( + None, + vec![ApplyToError::new( + format!( + "Method ->{} does not take any arguments", + method_name.as_ref() + ), + input_path.to_vec(), + method_name.range(), + )], + ); + } + + match data { + JSON::Object(map) => { + let keys = map.keys().map(|key| JSON::String(key.clone())).collect(); + tail.apply_to_path(&JSON::Array(keys), vars, input_path) + } + _ => ( + None, + vec![ApplyToError::new( + format!( + "Method ->{} requires an object input, not {}", + method_name.as_ref(), + json_type_name(data), + ), + input_path.to_vec(), + method_name.range(), + )], + ), + } +} +fn keys_shape( + _method_name: &WithRange, + _method_args: Option<&MethodArgs>, + input_shape: Shape, + _dollar_shape: Shape, + _named_var_shapes: &IndexMap<&str, Shape>, +) -> Shape { + match input_shape.case() { + ShapeCase::Object { fields, rest, .. } => { + // Any statically known field names become string literal shapes in + // the resulting keys array. + let keys_vec = fields + .keys() + .map(|key| Shape::string_value(key.as_str())) + .collect::>(); + + Shape::array( + keys_vec, + // Since we're collecting key shapes, we want String for the + // rest shape when it's not None. + if rest.is_none() { + Shape::none() + } else { + Shape::string() + }, + ) + } + _ => Shape::error("Method ->keys requires an object input"), + } +} + +impl_arrow_method!(ValuesMethod, values_method, values_shape); +fn values_method( + method_name: &WithRange, + method_args: Option<&MethodArgs>, + data: &JSON, + vars: &VarsWithPathsMap, + input_path: &InputPath, + tail: &WithRange, +) -> (Option, Vec) { + if method_args.is_some() { + return ( + None, + vec![ApplyToError::new( + format!( + "Method ->{} does not take any arguments", + method_name.as_ref() + ), + input_path.to_vec(), + method_name.range(), + )], + ); + } + + match data { + JSON::Object(map) => { + let values = map.values().cloned().collect(); + tail.apply_to_path(&JSON::Array(values), vars, input_path) + } + _ => ( + None, + vec![ApplyToError::new( + format!( + "Method ->{} requires an object input, not {}", + method_name.as_ref(), + json_type_name(data), + ), + input_path.to_vec(), + method_name.range(), + )], + ), + } +} +fn values_shape( + _method_name: &WithRange, + _method_args: Option<&MethodArgs>, + input_shape: Shape, + _dollar_shape: Shape, + _named_var_shapes: &IndexMap<&str, Shape>, +) -> Shape { + match input_shape.case() { + ShapeCase::Object { fields, rest, .. } => { + Shape::array(fields.values().cloned(), rest.clone()) + } + _ => Shape::error("Method ->values requires an object input"), + } +} + +impl_arrow_method!(NotMethod, not_method, not_shape); +fn not_method( + method_name: &WithRange, + method_args: Option<&MethodArgs>, + data: &JSON, + vars: &VarsWithPathsMap, + input_path: &InputPath, + tail: &WithRange, +) -> (Option, Vec) { + if method_args.is_some() { + ( + None, + vec![ApplyToError::new( + format!( + "Method ->{} does not take any arguments", + method_name.as_ref() + ), + input_path.to_vec(), + method_name.range(), + )], + ) + } else { + tail.apply_to_path(&JSON::Bool(!is_truthy(data)), vars, input_path) + } +} +fn not_shape( + _method_name: &WithRange, + _method_args: Option<&MethodArgs>, + input_shape: Shape, + _dollar_shape: Shape, + _named_var_shapes: &IndexMap<&str, Shape>, +) -> Shape { + match input_shape.case() { + ShapeCase::Bool(Some(value)) => Shape::bool_value(!*value), + ShapeCase::Int(Some(value)) => Shape::bool_value(*value == 0), + ShapeCase::String(Some(value)) => Shape::bool_value(value.is_empty()), + ShapeCase::Null => Shape::bool_value(true), + ShapeCase::Array { .. } | ShapeCase::Object { .. } => Shape::bool_value(false), + _ => Shape::bool(), + } +} + +fn is_truthy(data: &JSON) -> bool { + match data { + JSON::Bool(b) => *b, + JSON::Number(n) => n.as_f64().map_or(false, |n| n != 0.0), + JSON::Null => false, + JSON::String(s) => !s.as_str().is_empty(), + JSON::Object(_) | JSON::Array(_) => true, + } +} + +impl_arrow_method!(OrMethod, or_method, or_shape); +fn or_method( + method_name: &WithRange, + method_args: Option<&MethodArgs>, + data: &JSON, + vars: &VarsWithPathsMap, + input_path: &InputPath, + tail: &WithRange, +) -> (Option, Vec) { + if let Some(MethodArgs { args, .. }) = method_args { + let mut result = is_truthy(data); + let mut errors = Vec::new(); + + for arg in args { + if result { + break; + } + let (value_opt, arg_errors) = arg.apply_to_path(data, vars, input_path); + errors.extend(arg_errors); + result = value_opt.map(|value| is_truthy(&value)).unwrap_or(false); + } + + tail.apply_to_path(&JSON::Bool(result), vars, input_path) + .prepend_errors(errors) + } else { + ( + None, + vec![ApplyToError::new( + format!("Method ->{} requires arguments", method_name.as_ref()), + input_path.to_vec(), + method_name.range(), + )], + ) + } +} +fn or_shape( + _method_name: &WithRange, + method_args: Option<&MethodArgs>, + input_shape: Shape, + dollar_shape: Shape, + named_var_shapes: &IndexMap<&str, Shape>, +) -> Shape { + match input_shape.case() { + ShapeCase::Bool(Some(true)) => { + return Shape::bool_value(true); + } + ShapeCase::Int(Some(value)) if *value != 0 => { + return Shape::bool_value(true); + } + ShapeCase::String(Some(value)) if !value.is_empty() => { + return Shape::bool_value(true); + } + ShapeCase::Array { .. } | ShapeCase::Object { .. } => { + return Shape::bool_value(true); + } + _ => {} + }; + + if let Some(MethodArgs { args, .. }) = method_args { + for arg in args { + let arg_shape = arg.compute_output_shape( + input_shape.clone(), + dollar_shape.clone(), + named_var_shapes, + ); + match arg_shape.case() { + ShapeCase::Bool(Some(true)) => { + return Shape::bool_value(true); + } + ShapeCase::Int(Some(value)) if *value != 0 => { + return Shape::bool_value(true); + } + ShapeCase::String(Some(value)) if !value.is_empty() => { + return Shape::bool_value(true); + } + ShapeCase::Array { .. } | ShapeCase::Object { .. } => { + return Shape::bool_value(true); + } + _ => {} + } + } + } + + Shape::bool() +} + +impl_arrow_method!(AndMethod, and_method, and_shape); +fn and_method( + method_name: &WithRange, + method_args: Option<&MethodArgs>, + data: &JSON, + vars: &VarsWithPathsMap, + input_path: &InputPath, + tail: &WithRange, +) -> (Option, Vec) { + if let Some(MethodArgs { args, .. }) = method_args { + let mut result = is_truthy(data); + let mut errors = Vec::new(); + + for arg in args { + if !result { + break; + } + let (value_opt, arg_errors) = arg.apply_to_path(data, vars, input_path); + errors.extend(arg_errors); + result = value_opt.map(|value| is_truthy(&value)).unwrap_or(false); + } + + tail.apply_to_path(&JSON::Bool(result), vars, input_path) + .prepend_errors(errors) + } else { + ( + None, + vec![ApplyToError::new( + format!("Method ->{} requires arguments", method_name.as_ref()), + input_path.to_vec(), + method_name.range(), + )], + ) + } +} +fn and_shape( + _method_name: &WithRange, + method_args: Option<&MethodArgs>, + input_shape: Shape, + dollar_shape: Shape, + named_var_shapes: &IndexMap<&str, Shape>, +) -> Shape { + match input_shape.case() { + ShapeCase::Bool(Some(false)) => { + return Shape::bool_value(false); + } + ShapeCase::Int(Some(value)) if *value == 0 => { + return Shape::bool_value(false); + } + ShapeCase::String(Some(value)) if value.is_empty() => { + return Shape::bool_value(false); + } + ShapeCase::Null => { + return Shape::bool_value(false); + } + _ => {} + }; + + if let Some(MethodArgs { args, .. }) = method_args { + for arg in args { + let arg_shape = arg.compute_output_shape( + input_shape.clone(), + dollar_shape.clone(), + named_var_shapes, + ); + match arg_shape.case() { + ShapeCase::Bool(Some(false)) => { + return Shape::bool_value(false); + } + ShapeCase::Int(Some(value)) if *value == 0 => { + return Shape::bool_value(false); + } + ShapeCase::String(Some(value)) if value.is_empty() => { + return Shape::bool_value(false); + } + ShapeCase::Null => { + return Shape::bool_value(false); + } + _ => {} + } + } + } + + Shape::bool() +} diff --git a/apollo-federation/src/sources/connect/json_selection/methods/public.rs b/apollo-federation/src/sources/connect/json_selection/methods/public.rs new file mode 100644 index 0000000000..37fc28bd0a --- /dev/null +++ b/apollo-federation/src/sources/connect/json_selection/methods/public.rs @@ -0,0 +1,828 @@ +use apollo_compiler::collections::IndexMap; +use serde_json_bytes::ByteString; +use serde_json_bytes::Map as JSONMap; +use serde_json_bytes::Value as JSON; +use shape::NamedShapePathKey; +use shape::Shape; +use shape::ShapeCase; + +use crate::impl_arrow_method; +use crate::sources::connect::json_selection::apply_to::ApplyToResultMethods; +use crate::sources::connect::json_selection::helpers::json_type_name; +use crate::sources::connect::json_selection::helpers::vec_push; +use crate::sources::connect::json_selection::immutable::InputPath; +use crate::sources::connect::json_selection::known_var::KnownVariable; +use crate::sources::connect::json_selection::lit_expr::LitExpr; +use crate::sources::connect::json_selection::location::merge_ranges; +use crate::sources::connect::json_selection::location::Ranged; +use crate::sources::connect::json_selection::location::WithRange; +use crate::sources::connect::json_selection::ApplyToError; +use crate::sources::connect::json_selection::ApplyToInternal; +use crate::sources::connect::json_selection::MethodArgs; +use crate::sources::connect::json_selection::PathList; +use crate::sources::connect::json_selection::VarsWithPathsMap; + +impl_arrow_method!(EchoMethod, echo_method, echo_shape); +fn echo_method( + method_name: &WithRange, + method_args: Option<&MethodArgs>, + data: &JSON, + vars: &VarsWithPathsMap, + input_path: &InputPath, + tail: &WithRange, +) -> (Option, Vec) { + if let Some(MethodArgs { args, .. }) = method_args { + if let Some(arg) = args.first() { + return arg + .apply_to_path(data, vars, input_path) + .and_then_collecting_errors(|value| tail.apply_to_path(value, vars, input_path)); + } + } + ( + None, + vec![ApplyToError::new( + format!("Method ->{} requires one argument", method_name.as_ref()), + input_path.to_vec(), + method_name.range(), + )], + ) +} +fn echo_shape( + method_name: &WithRange, + method_args: Option<&MethodArgs>, + input_shape: Shape, + dollar_shape: Shape, + named_var_shapes: &IndexMap<&str, Shape>, +) -> Shape { + if let Some(first_arg) = method_args.and_then(|args| args.args.first()) { + return first_arg.compute_output_shape(input_shape, dollar_shape, named_var_shapes); + } + Shape::error_with_range( + format!("Method ->{} requires one argument", method_name.as_ref()), + method_name.range(), + ) +} + +impl_arrow_method!(MapMethod, map_method, map_shape); +fn map_method( + method_name: &WithRange, + method_args: Option<&MethodArgs>, + data: &JSON, + vars: &VarsWithPathsMap, + input_path: &InputPath, + tail: &WithRange, +) -> (Option, Vec) { + if let Some(args) = method_args { + if let Some(first_arg) = args.args.first() { + if let JSON::Array(array) = data { + let mut output = Vec::with_capacity(array.len()); + let mut errors = Vec::new(); + + for (i, element) in array.iter().enumerate() { + let input_path = input_path.append(JSON::Number(i.into())); + let (applied_opt, arg_errors) = + first_arg.apply_to_path(element, vars, &input_path); + errors.extend(arg_errors); + if let Some(applied) = applied_opt { + let (value_opt, apply_errors) = + tail.apply_to_path(&applied, vars, &input_path); + errors.extend(apply_errors); + if let Some(value) = value_opt { + output.push(value); + continue; + } + } + output.push(JSON::Null); + } + + return (Some(JSON::Array(output)), errors); + } else { + return first_arg.apply_to_path(data, vars, input_path); + } + } else { + return ( + None, + vec![ApplyToError::new( + format!("Method ->{} requires one argument", method_name.as_ref()), + input_path.to_vec(), + method_name.range(), + )], + ); + } + } + ( + None, + vec![ApplyToError::new( + format!("Method ->{} requires one argument", method_name.as_ref()), + input_path.to_vec(), + method_name.range(), + )], + ) +} +fn map_shape( + method_name: &WithRange, + method_args: Option<&MethodArgs>, + input_shape: Shape, + dollar_shape: Shape, + named_var_shapes: &IndexMap<&str, Shape>, +) -> Shape { + if let Some(first_arg) = method_args.and_then(|args| args.args.first()) { + match input_shape.case() { + ShapeCase::Array { prefix, tail } => { + let new_prefix = prefix + .iter() + .map(|shape| { + first_arg.compute_output_shape( + shape.clone(), + dollar_shape.clone(), + named_var_shapes, + ) + }) + .collect::>(); + let new_tail = first_arg.compute_output_shape( + tail.clone(), + dollar_shape.clone(), + named_var_shapes, + ); + Shape::array(new_prefix, new_tail) + } + ShapeCase::Name(_name, _subpath) => { + // We don't have a way to tell if this is an array, where map is applied to each + // element, or a different value where map is applied to just one. So for now we + // erase the type (until we add more sophisticated resolution in the future). + Shape::unknown() + } + _ => first_arg.compute_output_shape( + input_shape.clone(), + dollar_shape.clone(), + named_var_shapes, + ), + } + } else { + Shape::error_with_range( + format!("Method ->{} requires one argument", method_name.as_ref()), + method_name.range(), + ) + } +} + +impl_arrow_method!(MatchMethod, match_method, match_shape); +fn match_method( + method_name: &WithRange, + method_args: Option<&MethodArgs>, + data: &JSON, + vars: &VarsWithPathsMap, + input_path: &InputPath, + tail: &WithRange, +) -> (Option, Vec) { + // Takes any number of pairs [key, value], and returns value for the first + // key that equals the data. If none of the pairs match, returns None. + // Typically, the final pair will use @ as its key to ensure some default + // value is returned. + let mut errors = Vec::new(); + + if let Some(MethodArgs { args, .. }) = method_args { + for pair in args { + if let LitExpr::Array(pair) = pair.as_ref() { + if pair.len() == 2 { + let (candidate_opt, candidate_errors) = + pair[0].apply_to_path(data, vars, input_path); + errors.extend(candidate_errors); + + if let Some(candidate) = candidate_opt { + if candidate == *data { + return pair[1] + .apply_to_path(data, vars, input_path) + .and_then_collecting_errors(|value| { + tail.apply_to_path(value, vars, input_path) + }) + .prepend_errors(errors); + } + }; + } + } + } + } + + ( + None, + vec_push( + errors, + ApplyToError::new( + format!( + "Method ->{} did not match any [candidate, value] pair", + method_name.as_ref(), + ), + input_path.to_vec(), + merge_ranges( + method_name.range(), + method_args.and_then(|args| args.range()), + ), + ), + ), + ) +} +pub(super) fn match_shape( + method_name: &WithRange, + method_args: Option<&MethodArgs>, + input_shape: Shape, + dollar_shape: Shape, + named_var_shapes: &IndexMap<&str, Shape>, +) -> Shape { + if let Some(MethodArgs { args, .. }) = method_args { + let mut result_union = Vec::new(); + let mut has_infallible_case = false; + + for pair in args { + if let LitExpr::Array(pair) = pair.as_ref() { + if pair.len() == 2 { + if let LitExpr::Path(path) = pair[0].as_ref() { + if let PathList::Var(known_var, _tail) = path.path.as_ref() { + if known_var.as_ref() == &KnownVariable::AtSign { + has_infallible_case = true; + } + } + }; + + let value_shape = pair[1].compute_output_shape( + input_shape.clone(), + dollar_shape.clone(), + named_var_shapes, + ); + result_union.push(value_shape); + } + } + } + + if !has_infallible_case { + result_union.push(Shape::none()); + } + + if result_union.is_empty() { + Shape::error_with_range( + format!( + "Method ->{} requires at least one [candidate, value] pair", + method_name.as_ref(), + ), + merge_ranges( + method_name.range(), + method_args.and_then(|args| args.range()), + ), + ) + } else { + Shape::one(result_union) + } + } else { + Shape::error_with_range( + format!( + "Method ->{} requires at least one [candidate, value] pair", + method_name.as_ref(), + ), + method_name.range(), + ) + } +} + +impl_arrow_method!(FirstMethod, first_method, first_shape); +fn first_method( + method_name: &WithRange, + method_args: Option<&MethodArgs>, + data: &JSON, + vars: &VarsWithPathsMap, + input_path: &InputPath, + tail: &WithRange, +) -> (Option, Vec) { + if method_args.is_some() { + return ( + None, + vec![ApplyToError::new( + format!( + "Method ->{} does not take any arguments", + method_name.as_ref() + ), + input_path.to_vec(), + method_name.range(), + )], + ); + } + + match data { + JSON::Array(array) => { + if let Some(first) = array.first() { + tail.apply_to_path(first, vars, input_path) + } else { + (None, vec![]) + } + } + + JSON::String(s) => { + if let Some(first) = s.as_str().chars().next() { + tail.apply_to_path(&JSON::String(first.to_string().into()), vars, input_path) + } else { + (None, vec![]) + } + } + + _ => tail.apply_to_path(data, vars, input_path), + } +} +fn first_shape( + method_name: &WithRange, + method_args: Option<&MethodArgs>, + input_shape: Shape, + _dollar_shape: Shape, + _named_var_shapes: &IndexMap<&str, Shape>, +) -> Shape { + if method_args.is_some() { + return Shape::error_with_range( + format!( + "Method ->{} does not take any arguments", + method_name.as_ref() + ), + method_name.range(), + ); + } + + match input_shape.case() { + ShapeCase::String(Some(value)) => Shape::string_value(&value[0..1]), + ShapeCase::String(None) => Shape::string(), + ShapeCase::Array { prefix, tail } => { + if let Some(first) = prefix.first() { + first.clone() + } else if tail.is_none() { + Shape::none() + } else { + Shape::one([tail.clone(), Shape::none()]) + } + } + ShapeCase::Name(_, _) => input_shape.child(&NamedShapePathKey::Index(0)), + // When there is no obvious first element, ->first gives us the input + // value itself, which has input_shape. + _ => input_shape.clone(), + } +} + +impl_arrow_method!(LastMethod, last_method, last_shape); +fn last_method( + method_name: &WithRange, + method_args: Option<&MethodArgs>, + data: &JSON, + vars: &VarsWithPathsMap, + input_path: &InputPath, + tail: &WithRange, +) -> (Option, Vec) { + if method_args.is_some() { + return ( + None, + vec![ApplyToError::new( + format!( + "Method ->{} does not take any arguments", + method_name.as_ref() + ), + input_path.to_vec(), + method_name.range(), + )], + ); + } + + match data { + JSON::Array(array) => { + if let Some(last) = array.last() { + tail.apply_to_path(last, vars, input_path) + } else { + (None, vec![]) + } + } + + JSON::String(s) => { + if let Some(last) = s.as_str().chars().last() { + tail.apply_to_path(&JSON::String(last.to_string().into()), vars, input_path) + } else { + (None, vec![]) + } + } + + _ => tail.apply_to_path(data, vars, input_path), + } +} +fn last_shape( + method_name: &WithRange, + method_args: Option<&MethodArgs>, + input_shape: Shape, + _dollar_shape: Shape, + _named_var_shapes: &IndexMap<&str, Shape>, +) -> Shape { + if method_args.is_some() { + return Shape::error_with_range( + format!( + "Method ->{} does not take any arguments", + method_name.as_ref() + ), + method_name.range(), + ); + } + + match input_shape.case() { + ShapeCase::String(Some(value)) => { + if let Some(last_char) = value.chars().last() { + Shape::string_value(last_char.to_string().as_str()) + } else { + Shape::none() + } + } + ShapeCase::String(None) => Shape::one([Shape::string(), Shape::none()]), + ShapeCase::Array { prefix, tail } => { + if tail.is_none() { + if let Some(last) = prefix.last() { + last.clone() + } else { + Shape::none() + } + } else if let Some(last) = prefix.last() { + Shape::one([last.clone(), tail.clone(), Shape::none()]) + } else { + Shape::one([tail.clone(), Shape::none()]) + } + } + ShapeCase::Name(_, _) => input_shape.any_item(), + // When there is no obvious last element, ->last gives us the input + // value itself, which has input_shape. + _ => input_shape.clone(), + } +} + +impl_arrow_method!(SliceMethod, slice_method, slice_shape); +fn slice_method( + method_name: &WithRange, + method_args: Option<&MethodArgs>, + data: &JSON, + vars: &VarsWithPathsMap, + input_path: &InputPath, + tail: &WithRange, +) -> (Option, Vec) { + let length = if let JSON::Array(array) = data { + array.len() as i64 + } else if let JSON::String(s) = data { + s.as_str().len() as i64 + } else { + return ( + None, + vec![ApplyToError::new( + format!( + "Method ->{} requires an array or string input", + method_name.as_ref() + ), + input_path.to_vec(), + method_name.range(), + )], + ); + }; + + if let Some(MethodArgs { args, .. }) = method_args { + let mut errors = Vec::new(); + + let start = args + .first() + .and_then(|arg| { + let (value_opt, apply_errors) = arg.apply_to_path(data, vars, input_path); + errors.extend(apply_errors); + value_opt + }) + .and_then(|n| n.as_i64()) + .unwrap_or(0) + .max(0) + .min(length) as usize; + + let end = args + .get(1) + .and_then(|arg| { + let (value_opt, apply_errors) = arg.apply_to_path(data, vars, input_path); + errors.extend(apply_errors); + value_opt + }) + .and_then(|n| n.as_i64()) + .unwrap_or(length) + .max(0) + .min(length) as usize; + + let array = match data { + JSON::Array(array) => { + if end - start > 0 { + JSON::Array( + array + .iter() + .skip(start) + .take(end - start) + .cloned() + .collect(), + ) + } else { + JSON::Array(vec![]) + } + } + + JSON::String(s) => { + if end - start > 0 { + JSON::String(s.as_str()[start..end].to_string().into()) + } else { + JSON::String("".to_string().into()) + } + } + + _ => unreachable!(), + }; + + tail.apply_to_path(&array, vars, input_path) + .prepend_errors(errors) + } else { + // TODO Should calling ->slice or ->slice() without arguments be an + // error? In JavaScript, array->slice() copies the array, but that's not + // so useful in an immutable value-typed language like JSONSelection. + (Some(data.clone()), vec![]) + } +} +fn slice_shape( + method_name: &WithRange, + _method_args: Option<&MethodArgs>, + input_shape: Shape, + _dollar_shape: Shape, + _named_var_shapes: &IndexMap<&str, Shape>, +) -> Shape { + // There are more clever shapes we could compute here (when start and end + // are statically known integers and input_shape is an array or string with + // statically known prefix elements, for example) but for now we play it + // safe (and honest) by returning a new variable-length array whose element + // shape is a union of the original element (prefix and tail) shapes. + match input_shape.case() { + ShapeCase::Array { prefix, tail } => { + let mut one_shapes = prefix.clone(); + if !tail.is_none() { + one_shapes.push(tail.clone()); + } + Shape::array([], Shape::one(one_shapes)) + } + ShapeCase::String(_) => Shape::string(), + ShapeCase::Name(_, _) => input_shape, // TODO: add a way to validate inputs after name resolution + _ => Shape::error_with_range( + format!( + "Method ->{} requires an array or string input", + method_name.as_ref() + ), + method_name.range(), + ), + } +} + +impl_arrow_method!(SizeMethod, size_method, size_shape); +fn size_method( + method_name: &WithRange, + method_args: Option<&MethodArgs>, + data: &JSON, + vars: &VarsWithPathsMap, + input_path: &InputPath, + tail: &WithRange, +) -> (Option, Vec) { + if method_args.is_some() { + return ( + None, + vec![ApplyToError::new( + format!( + "Method ->{} does not take any arguments", + method_name.as_ref() + ), + input_path.to_vec(), + method_name.range(), + )], + ); + } + + match data { + JSON::Array(array) => { + let size = array.len() as i64; + tail.apply_to_path(&JSON::Number(size.into()), vars, input_path) + } + JSON::String(s) => { + let size = s.as_str().len() as i64; + tail.apply_to_path(&JSON::Number(size.into()), vars, input_path) + } + // Though we can't ask for ->first or ->last or ->at(n) on an object, we + // can safely return how many properties the object has for ->size. + JSON::Object(map) => { + let size = map.len() as i64; + tail.apply_to_path(&JSON::Number(size.into()), vars, input_path) + } + _ => ( + None, + vec![ApplyToError::new( + format!( + "Method ->{} requires an array, string, or object input, not {}", + method_name.as_ref(), + json_type_name(data), + ), + input_path.to_vec(), + method_name.range(), + )], + ), + } +} +fn size_shape( + method_name: &WithRange, + method_args: Option<&MethodArgs>, + input_shape: Shape, + _dollar_shape: Shape, + _named_var_shapes: &IndexMap<&str, Shape>, +) -> Shape { + if method_args.is_some() { + return Shape::error_with_range( + format!( + "Method ->{} does not take any arguments", + method_name.as_ref() + ), + method_name.range(), + ); + } + + match input_shape.case() { + ShapeCase::String(Some(value)) => Shape::int_value(value.len() as i64), + ShapeCase::String(None) => Shape::int(), + ShapeCase::Name(_, _) => Shape::int(), // TODO: catch errors after name resolution + ShapeCase::Array { prefix, tail } => { + if tail.is_none() { + Shape::int_value(prefix.len() as i64) + } else { + Shape::int() + } + } + ShapeCase::Object { fields, rest, .. } => { + if rest.is_none() { + Shape::int_value(fields.len() as i64) + } else { + Shape::int() + } + } + _ => Shape::error_with_range( + format!( + "Method ->{} requires an array, string, or object input", + method_name.as_ref() + ), + method_name.range(), + ), + } +} + +impl_arrow_method!(EntriesMethod, entries_method, entries_shape); +fn entries_method( + method_name: &WithRange, + method_args: Option<&MethodArgs>, + data: &JSON, + vars: &VarsWithPathsMap, + input_path: &InputPath, + tail: &WithRange, +) -> (Option, Vec) { + if method_args.is_some() { + return ( + None, + vec![ApplyToError::new( + format!( + "Method ->{} does not take any arguments", + method_name.as_ref() + ), + input_path.to_vec(), + method_name.range(), + )], + ); + } + + match data { + JSON::Object(map) => { + let entries = map + .iter() + .map(|(key, value)| { + let mut key_value_pair = JSONMap::new(); + key_value_pair.insert(ByteString::from("key"), JSON::String(key.clone())); + key_value_pair.insert(ByteString::from("value"), value.clone()); + JSON::Object(key_value_pair) + }) + .collect(); + tail.apply_to_path(&JSON::Array(entries), vars, input_path) + } + _ => ( + None, + vec![ApplyToError::new( + format!( + "Method ->{} requires an object input, not {}", + method_name.as_ref(), + json_type_name(data), + ), + input_path.to_vec(), + method_name.range(), + )], + ), + } +} +fn entries_shape( + method_name: &WithRange, + method_args: Option<&MethodArgs>, + input_shape: Shape, + _dollar_shape: Shape, + _named_var_shapes: &IndexMap<&str, Shape>, +) -> Shape { + if method_args.is_some() { + return Shape::error_with_range( + format!( + "Method ->{} does not take any arguments", + method_name.as_ref() + ), + method_name.range(), + ); + } + + match input_shape.case() { + ShapeCase::Object { fields, rest, .. } => { + let entry_shapes = fields + .iter() + .map(|(key, value)| { + let mut key_value_pair = Shape::empty_map(); + key_value_pair.insert("key".to_string(), Shape::string_value(key.as_str())); + key_value_pair.insert("value".to_string(), value.clone()); + Shape::object(key_value_pair, Shape::none()) + }) + .collect::>(); + + if rest.is_none() { + Shape::array(entry_shapes, rest.clone()) + } else { + let mut tail_key_value_pair = Shape::empty_map(); + tail_key_value_pair.insert("key".to_string(), Shape::string()); + tail_key_value_pair.insert("value".to_string(), rest.clone()); + Shape::array( + entry_shapes, + Shape::object(tail_key_value_pair, Shape::none()), + ) + } + } + ShapeCase::Name(_, _) => { + let mut entries = Shape::empty_map(); + entries.insert("key".to_string(), Shape::string()); + entries.insert("value".to_string(), input_shape.any_field()); + Shape::list(Shape::object(entries, Shape::none())) + } + _ => Shape::error_with_range( + format!("Method ->{} requires an object input", method_name.as_ref()), + method_name.range(), + ), + } +} + +impl_arrow_method!( + JsonStringifyMethod, + json_stringify_method, + json_stringify_shape +); +fn json_stringify_method( + method_name: &WithRange, + method_args: Option<&MethodArgs>, + data: &JSON, + _vars: &VarsWithPathsMap, + input_path: &InputPath, + _tail: &WithRange, +) -> (Option, Vec) { + if method_args.is_some() { + return ( + None, + vec![ApplyToError::new( + format!( + "Method ->{} does not take any arguments", + method_name.as_ref() + ), + input_path.to_vec(), + method_name.range(), + )], + ); + } + + match serde_json::to_string(data) { + Ok(val) => (Some(JSON::String(val.into())), vec![]), + Err(err) => ( + None, + vec![ApplyToError::new( + format!( + "Method ->{} failed to serialize JSON: {}", + method_name.as_ref(), + err + ), + input_path.to_vec(), + method_name.range(), + )], + ), + } +} +fn json_stringify_shape( + _method_name: &WithRange, + _method_args: Option<&MethodArgs>, + _input_shape: Shape, + _dollar_shape: Shape, + _named_var_shapes: &IndexMap<&str, Shape>, +) -> Shape { + Shape::string() +} diff --git a/apollo-federation/src/sources/connect/json_selection/methods/snapshots/apollo_federation__sources__connect__json_selection__methods__tests__string_methods.snap b/apollo-federation/src/sources/connect/json_selection/methods/snapshots/apollo_federation__sources__connect__json_selection__methods__tests__string_methods.snap new file mode 100644 index 0000000000..4c7d98fd01 --- /dev/null +++ b/apollo-federation/src/sources/connect/json_selection/methods/snapshots/apollo_federation__sources__connect__json_selection__methods__tests__string_methods.snap @@ -0,0 +1,137 @@ +--- +source: apollo-federation/src/sources/connect/json_selection/methods/tests.rs +expression: selection_with_spaces +--- +Path( + PathSelection { + path: WithRange { + node: Var( + WithRange { + node: $, + range: Some( + 1..2, + ), + }, + WithRange { + node: Method( + WithRange { + node: "get", + range: Some( + 6..9, + ), + }, + Some( + MethodArgs { + args: [ + WithRange { + node: Path( + PathSelection { + path: WithRange { + node: Var( + WithRange { + node: $, + range: Some( + 12..13, + ), + }, + WithRange { + node: Method( + WithRange { + node: "echo", + range: Some( + 17..21, + ), + }, + Some( + MethodArgs { + args: [ + WithRange { + node: Number( + Number(-5), + ), + range: Some( + 24..27, + ), + }, + ], + range: Some( + 22..29, + ), + }, + ), + WithRange { + node: Method( + WithRange { + node: "mul", + range: Some( + 33..36, + ), + }, + Some( + MethodArgs { + args: [ + WithRange { + node: Number( + Number(2), + ), + range: Some( + 39..40, + ), + }, + ], + range: Some( + 37..42, + ), + }, + ), + WithRange { + node: Empty, + range: Some( + 42..42, + ), + }, + ), + range: Some( + 30..42, + ), + }, + ), + range: Some( + 14..42, + ), + }, + ), + range: Some( + 12..42, + ), + }, + }, + ), + range: Some( + 12..42, + ), + }, + ], + range: Some( + 10..44, + ), + }, + ), + WithRange { + node: Empty, + range: Some( + 44..44, + ), + }, + ), + range: Some( + 3..44, + ), + }, + ), + range: Some( + 1..44, + ), + }, + }, +) diff --git a/apollo-federation/src/sources/connect/json_selection/methods/tests.rs b/apollo-federation/src/sources/connect/json_selection/methods/tests.rs new file mode 100644 index 0000000000..86dbc3af79 --- /dev/null +++ b/apollo-federation/src/sources/connect/json_selection/methods/tests.rs @@ -0,0 +1,1176 @@ +use insta::assert_debug_snapshot; +use serde_json_bytes::json; + +use super::*; +use crate::selection; + +#[test] +fn test_echo_method() { + assert_eq!( + selection!("$->echo('oyez')").apply_to(&json!(null)), + (Some(json!("oyez")), vec![]), + ); + + assert_eq!( + selection!("$->echo('oyez')").apply_to(&json!([1, 2, 3])), + (Some(json!("oyez")), vec![]), + ); + + assert_eq!( + selection!("$->echo([1, 2, 3]) { id: $ }").apply_to(&json!(null)), + (Some(json!([{ "id": 1 }, { "id": 2 }, { "id": 3 }])), vec![]), + ); + + assert_eq!( + selection!("$->echo([1, 2, 3])->last { id: $ }").apply_to(&json!(null)), + (Some(json!({ "id": 3 })), vec![]), + ); + + assert_eq!( + selection!("$->echo([1.1, 0.2, -3.3]) { id: $ }").apply_to(&json!(null)), + ( + Some(json!([{ "id": 1.1 }, { "id": 0.2 }, { "id": -3.3 }])), + vec![] + ), + ); + + assert_eq!( + selection!("$.nested.value->echo(['before', @, 'after'])").apply_to(&json!({ + "nested": { + "value": 123, + }, + })), + (Some(json!(["before", 123, "after"])), vec![]), + ); + + assert_eq!( + selection!("$.nested.value->echo(['before', $, 'after'])").apply_to(&json!({ + "nested": { + "value": 123, + }, + })), + ( + Some(json!(["before", { + "nested": { + "value": 123, + }, + }, "after"])), + vec![] + ), + ); + + assert_eq!( + selection!("data->echo(@.results->last)").apply_to(&json!({ + "data": { + "results": [1, 2, 3], + }, + })), + (Some(json!(3)), vec![]), + ); + + assert_eq!( + selection!("results->echo(@->first)").apply_to(&json!({ + "results": [ + [1, 2, 3], + "ignored", + ], + })), + (Some(json!([1, 2, 3])), vec![]), + ); + + assert_eq!( + selection!("results->echo(@->first)->last").apply_to(&json!({ + "results": [ + [1, 2, 3], + "ignored", + ], + })), + (Some(json!(3)), vec![]), + ); + + { + let nested_value_data = json!({ + "nested": { + "value": 123, + }, + }); + + let expected = (Some(json!({ "wrapped": 123 })), vec![]); + + let check = |selection: &str| { + assert_eq!(selection!(selection).apply_to(&nested_value_data), expected,); + }; + + check("nested.value->echo({ wrapped: @ })"); + check("nested.value->echo({ wrapped: @,})"); + check("nested.value->echo({ wrapped: @,},)"); + check("nested.value->echo({ wrapped: @},)"); + check("nested.value->echo({ wrapped: @ , } , )"); + } + + // Turn a list of { name, hobby } objects into a single { names: [...], + // hobbies: [...] } object. + assert_eq!( + selection!( + r#" + people->echo({ + names: @.name, + hobbies: @.hobby, + }) + "# + ) + .apply_to(&json!({ + "people": [ + { "name": "Alice", "hobby": "reading" }, + { "name": "Bob", "hobby": "fishing" }, + { "hobby": "painting", "name": "Charlie" }, + ], + })), + ( + Some(json!({ + "names": ["Alice", "Bob", "Charlie"], + "hobbies": ["reading", "fishing", "painting"], + })), + vec![], + ), + ); +} + +#[test] +fn test_typeof_method() { + fn check(selection: &str, data: &JSON, expected_type: &str) { + assert_eq!( + selection!(selection).apply_to(data), + (Some(json!(expected_type)), vec![]), + ); + } + + check("$->typeof", &json!(null), "null"); + check("$->typeof", &json!(true), "boolean"); + check("@->typeof", &json!(false), "boolean"); + check("$->typeof", &json!(123), "number"); + check("$->typeof", &json!(123.45), "number"); + check("$->typeof", &json!("hello"), "string"); + check("$->typeof", &json!([1, 2, 3]), "array"); + check("$->typeof", &json!({ "key": "value" }), "object"); +} + +#[test] +fn test_map_method() { + assert_eq!( + selection!("$->map(@->add(10))").apply_to(&json!([1, 2, 3])), + (Some(json!(vec![11, 12, 13])), vec![]), + ); + + assert_eq!( + selection!("messages->map(@.role)").apply_to(&json!({ + "messages": [ + { "role": "admin" }, + { "role": "user" }, + { "role": "guest" }, + ], + })), + (Some(json!(["admin", "user", "guest"])), vec![]), + ); + + assert_eq!( + selection!("messages->map(@.roles)").apply_to(&json!({ + "messages": [ + { "roles": ["admin"] }, + { "roles": ["user", "guest"] }, + ], + })), + (Some(json!([["admin"], ["user", "guest"]])), vec![]), + ); + + assert_eq!( + selection!("values->map(@->typeof)").apply_to(&json!({ + "values": [1, 2.5, "hello", true, null, [], {}], + })), + ( + Some(json!([ + "number", "number", "string", "boolean", "null", "array", "object" + ])), + vec![], + ), + ); + + assert_eq!( + selection!("singleValue->map(@->mul(10))").apply_to(&json!({ + "singleValue": 123, + })), + (Some(json!(1230)), vec![]), + ); +} + +#[test] +fn test_missing_method() { + assert_eq!( + selection!("nested.path->bogus").apply_to(&json!({ + "nested": { + "path": 123, + }, + })), + ( + None, + vec![ApplyToError::from_json(&json!({ + "message": "Method ->bogus not found", + "path": ["nested", "path", "->bogus"], + "range": [13, 18], + }))], + ), + ); +} + +#[test] +fn test_match_methods() { + assert_eq!( + selection!( + r#" + name + __typename: kind->match( + ['dog', 'Canine'], + ['cat', 'Feline'] + ) + "# + ) + .apply_to(&json!({ + "kind": "cat", + "name": "Whiskers", + })), + ( + Some(json!({ + "__typename": "Feline", + "name": "Whiskers", + })), + vec![], + ), + ); + + assert_eq!( + selection!( + r#" + name + __typename: kind->match( + ['dog', 'Canine'], + ['cat', 'Feline'], + [@, 'Exotic'], + ) + "# + ) + .apply_to(&json!({ + "kind": "axlotl", + "name": "Gulpy", + })), + ( + Some(json!({ + "__typename": "Exotic", + "name": "Gulpy", + })), + vec![], + ), + ); + + assert_eq!( + selection!( + r#" + name + __typename: kind->match( + ['dog', 'Canine'], + ['cat', 'Feline'], + [@, 'Exotic'], + ) + "# + ) + .apply_to(&json!({ + "kind": "dog", + "name": "Laika", + })), + ( + Some(json!({ + "__typename": "Canine", + "name": "Laika", + })), + vec![], + ), + ); + + assert_eq!( + selection!( + r#" + num: value->matchIf( + [@->typeof->eq('number'), @], + [true, 'not a number'] + ) + "# + ) + .apply_to(&json!({ "value": 123 })), + ( + Some(json!({ + "num": 123, + })), + vec![], + ), + ); + + assert_eq!( + selection!( + r#" + num: value->matchIf( + [@->typeof->eq('number'), @], + [true, 'not a number'] + ) + "# + ) + .apply_to(&json!({ "value": true })), + ( + Some(json!({ + "num": "not a number", + })), + vec![], + ), + ); + + assert_eq!( + selection!( + r#" + result->matchIf( + [@->typeof->eq('boolean'), @], + [true, 'not boolean'] + ) + "# + ) + .apply_to(&json!({ + "result": true, + })), + (Some(json!(true)), vec![]), + ); + + assert_eq!( + selection!( + r#" + result->match_if( + [@->typeof->eq('boolean'), @], + [true, 'not boolean'] + ) + "# + ) + .apply_to(&json!({ + "result": 321, + })), + (Some(json!("not boolean")), vec![]), + ); +} + +#[test] +fn test_arithmetic_methods() { + assert_eq!( + selection!("$->add(1)").apply_to(&json!(2)), + (Some(json!(3)), vec![]), + ); + assert_eq!( + selection!("$->add(1.5)").apply_to(&json!(2)), + (Some(json!(3.5)), vec![]), + ); + assert_eq!( + selection!("$->add(1)").apply_to(&json!(2.5)), + (Some(json!(3.5)), vec![]), + ); + assert_eq!( + selection!("$->add(1, 2, 3, 5, 8)").apply_to(&json!(1)), + (Some(json!(20)), vec![]), + ); + + assert_eq!( + selection!("$->sub(1)").apply_to(&json!(2)), + (Some(json!(1)), vec![]), + ); + assert_eq!( + selection!("$->sub(1.5)").apply_to(&json!(2)), + (Some(json!(0.5)), vec![]), + ); + assert_eq!( + selection!("$->sub(10)").apply_to(&json!(2.5)), + (Some(json!(-7.5)), vec![]), + ); + assert_eq!( + selection!("$->sub(10, 2.5)").apply_to(&json!(2.5)), + (Some(json!(-10.0)), vec![]), + ); + + assert_eq!( + selection!("$->mul(2)").apply_to(&json!(3)), + (Some(json!(6)), vec![]), + ); + assert_eq!( + selection!("$->mul(2.5)").apply_to(&json!(3)), + (Some(json!(7.5)), vec![]), + ); + assert_eq!( + selection!("$->mul(2)").apply_to(&json!(3.5)), + (Some(json!(7.0)), vec![]), + ); + assert_eq!( + selection!("$->mul(-2.5)").apply_to(&json!(3.5)), + (Some(json!(-8.75)), vec![]), + ); + assert_eq!( + selection!("$->mul(2, 3, 5, 7)").apply_to(&json!(10)), + (Some(json!(2100)), vec![]), + ); + + assert_eq!( + selection!("$->div(2)").apply_to(&json!(6)), + (Some(json!(3)), vec![]), + ); + assert_eq!( + selection!("$->div(2.5)").apply_to(&json!(7.5)), + (Some(json!(3.0)), vec![]), + ); + assert_eq!( + selection!("$->div(2)").apply_to(&json!(7)), + (Some(json!(3)), vec![]), + ); + assert_eq!( + selection!("$->div(2.5)").apply_to(&json!(7)), + (Some(json!(2.8)), vec![]), + ); + assert_eq!( + selection!("$->div(2, 3, 5, 7)").apply_to(&json!(2100)), + (Some(json!(10)), vec![]), + ); + + assert_eq!( + selection!("$->mod(2)").apply_to(&json!(6)), + (Some(json!(0)), vec![]), + ); + assert_eq!( + selection!("$->mod(2.5)").apply_to(&json!(7.5)), + (Some(json!(0.0)), vec![]), + ); + assert_eq!( + selection!("$->mod(2)").apply_to(&json!(7)), + (Some(json!(1)), vec![]), + ); + assert_eq!( + selection!("$->mod(4)").apply_to(&json!(7)), + (Some(json!(3)), vec![]), + ); + assert_eq!( + selection!("$->mod(2.5)").apply_to(&json!(7)), + (Some(json!(2.0)), vec![]), + ); + assert_eq!( + selection!("$->mod(2, 3, 5, 7)").apply_to(&json!(2100)), + (Some(json!(0)), vec![]), + ); +} + +#[test] +fn test_array_methods() { + assert_eq!( + selection!("$->first").apply_to(&json!([1, 2, 3])), + (Some(json!(1)), vec![]), + ); + + assert_eq!(selection!("$->first").apply_to(&json!([])), (None, vec![]),); + + assert_eq!( + selection!("$->last").apply_to(&json!([1, 2, 3])), + (Some(json!(3)), vec![]), + ); + + assert_eq!(selection!("$->last").apply_to(&json!([])), (None, vec![]),); + + assert_eq!( + selection!("$->get(1)").apply_to(&json!([1, 2, 3])), + (Some(json!(2)), vec![]), + ); + + assert_eq!( + selection!("$->get(-1)").apply_to(&json!([1, 2, 3])), + (Some(json!(3)), vec![]), + ); + + assert_eq!( + selection!("numbers->map(@->get(-2))").apply_to(&json!({ + "numbers": [ + [1, 2, 3], + [5, 6], + ], + })), + (Some(json!([2, 5])), vec![]), + ); + + assert_eq!( + selection!("$->get(3)").apply_to(&json!([1, 2, 3])), + ( + None, + vec![ApplyToError::from_json(&json!({ + "message": "Method ->get(3) index out of bounds", + "path": ["->get"], + "range": [7, 8], + }))] + ), + ); + + assert_eq!( + selection!("$->get(-4)").apply_to(&json!([1, 2, 3])), + ( + None, + vec![ApplyToError::from_json(&json!({ + "message": "Method ->get(-4) index out of bounds", + "path": ["->get"], + "range": [7, 9], + }))] + ), + ); + + assert_eq!( + selection!("$->get").apply_to(&json!([1, 2, 3])), + ( + None, + vec![ApplyToError::from_json(&json!({ + "message": "Method ->get requires an argument", + "path": ["->get"], + "range": [3, 6], + }))] + ), + ); + + assert_eq!( + selection!("$->get('bogus')").apply_to(&json!([1, 2, 3])), + ( + None, + vec![ApplyToError::from_json(&json!({ + "message": "Method ->get(\"bogus\") requires an object input", + "path": ["->get"], + "range": [3, 15], + }))] + ), + ); + + assert_eq!( + selection!("$->has(1)").apply_to(&json!([1, 2, 3])), + (Some(json!(true)), vec![]), + ); + + assert_eq!( + selection!("$->has(5)").apply_to(&json!([1, 2, 3])), + (Some(json!(false)), vec![]), + ); + + assert_eq!( + selection!("$->slice(1, 3)").apply_to(&json!([1, 2, 3, 4, 5])), + (Some(json!([2, 3])), vec![]), + ); + + assert_eq!( + selection!("$->slice(1, 3)").apply_to(&json!([1, 2])), + (Some(json!([2])), vec![]), + ); + + assert_eq!( + selection!("$->slice(1, 3)").apply_to(&json!([1])), + (Some(json!([])), vec![]), + ); + + assert_eq!( + selection!("$->slice(1, 3)").apply_to(&json!([])), + (Some(json!([])), vec![]), + ); + + assert_eq!( + selection!("$->size").apply_to(&json!([])), + (Some(json!(0)), vec![]), + ); + + assert_eq!( + selection!("$->size").apply_to(&json!([1, 2, 3])), + (Some(json!(3)), vec![]), + ); +} + +#[test] +fn test_size_method_errors() { + assert_eq!( + selection!("$->size").apply_to(&json!(null)), + ( + None, + vec![ApplyToError::from_json(&json!({ + "message": "Method ->size requires an array, string, or object input, not null", + "path": ["->size"], + "range": [3, 7], + }))] + ), + ); + + assert_eq!( + selection!("$->size").apply_to(&json!(true)), + ( + None, + vec![ApplyToError::from_json(&json!({ + "message": "Method ->size requires an array, string, or object input, not boolean", + "path": ["->size"], + "range": [3, 7], + }))] + ), + ); + + assert_eq!( + selection!("count->size").apply_to(&json!({ + "count": 123, + })), + ( + None, + vec![ApplyToError::from_json(&json!({ + "message": "Method ->size requires an array, string, or object input, not number", + "path": ["count", "->size"], + "range": [7, 11], + }))] + ), + ); +} + +#[test] +fn test_string_methods() { + assert_eq!( + selection!("$->has(2)").apply_to(&json!("oyez")), + (Some(json!(true)), vec![]), + ); + + assert_eq!( + selection!("$->has(-2)").apply_to(&json!("oyez")), + (Some(json!(true)), vec![]), + ); + + assert_eq!( + selection!("$->has(10)").apply_to(&json!("oyez")), + (Some(json!(false)), vec![]), + ); + + assert_eq!( + selection!("$->has(-10)").apply_to(&json!("oyez")), + (Some(json!(false)), vec![]), + ); + + assert_eq!( + selection!("$->first").apply_to(&json!("hello")), + (Some(json!("h")), vec![]), + ); + + assert_eq!( + selection!("$->last").apply_to(&json!("hello")), + (Some(json!("o")), vec![]), + ); + + assert_eq!( + selection!("$->get(2)").apply_to(&json!("oyez")), + (Some(json!("e")), vec![]), + ); + + assert_eq!( + selection!("$->get(-1)").apply_to(&json!("oyez")), + (Some(json!("z")), vec![]), + ); + + assert_eq!( + selection!("$->get(3)").apply_to(&json!("oyez")), + (Some(json!("z")), vec![]), + ); + + assert_eq!( + selection!("$->get(4)").apply_to(&json!("oyez")), + ( + None, + vec![ApplyToError::from_json(&json!({ + "message": "Method ->get(4) index out of bounds", + "path": ["->get"], + "range": [7, 8], + }))] + ), + ); + + { + let expected = ( + None, + vec![ApplyToError::from_json(&json!({ + "message": "Method ->get(-10) index out of bounds", + "path": ["->get"], + "range": [7, 26], + }))], + ); + assert_eq!( + selection!("$->get($->echo(-5)->mul(2))").apply_to(&json!("oyez")), + expected.clone(), + ); + assert_eq!( + // The extra spaces here should not affect the error.range, as long + // as we don't accidentally capture trailing spaces in the range. + selection!("$->get($->echo(-5)->mul(2) )").apply_to(&json!("oyez")), + expected.clone(), + ); + // All these extra spaces certainly do affect the error.range, but it's + // worth testing that we get all the ranges right, even with so much + // space that could be accidentally captured. + let selection_with_spaces = selection!(" $ -> get ( $ -> echo ( - 5 ) -> mul ( 2 ) ) "); + assert_eq!( + selection_with_spaces.apply_to(&json!("oyez")), + ( + None, + vec![ApplyToError::from_json(&json!({ + "message": "Method ->get(-10) index out of bounds", + "path": ["->get"], + "range": [12, 42], + }))] + ) + ); + assert_debug_snapshot!(selection_with_spaces); + } + + assert_eq!( + selection!("$->get(true)").apply_to(&json!("input")), + ( + None, + vec![ApplyToError::from_json(&json!({ + "message": "Method ->get(true) requires an integer or string argument", + "path": ["->get"], + "range": [7, 11], + }))] + ), + ); + + assert_eq!( + selection!("$->slice(1, 3)").apply_to(&json!("")), + (Some(json!("")), vec![]), + ); + + assert_eq!( + selection!("$->slice(1, 3)").apply_to(&json!("hello")), + (Some(json!("el")), vec![]), + ); + + assert_eq!( + selection!("$->slice(1, 3)").apply_to(&json!("he")), + (Some(json!("e")), vec![]), + ); + + assert_eq!( + selection!("$->slice(1, 3)").apply_to(&json!("h")), + (Some(json!("")), vec![]), + ); + + assert_eq!( + selection!("$->size").apply_to(&json!("hello")), + (Some(json!(5)), vec![]), + ); + + assert_eq!( + selection!("$->size").apply_to(&json!("")), + (Some(json!(0)), vec![]), + ); +} + +#[test] +fn test_object_methods() { + assert_eq!( + selection!("object->has('a')").apply_to(&json!({ + "object": { + "a": 123, + "b": 456, + }, + })), + (Some(json!(true)), vec![]), + ); + + assert_eq!( + selection!("object->has('c')").apply_to(&json!({ + "object": { + "a": 123, + "b": 456, + }, + })), + (Some(json!(false)), vec![]), + ); + + assert_eq!( + selection!("object->has(true)").apply_to(&json!({ + "object": { + "a": 123, + "b": 456, + }, + })), + (Some(json!(false)), vec![]), + ); + + assert_eq!( + selection!("object->has(null)").apply_to(&json!({ + "object": { + "a": 123, + "b": 456, + }, + })), + (Some(json!(false)), vec![]), + ); + + assert_eq!( + selection!("object->has('a')->and(object->has('b'))").apply_to(&json!({ + "object": { + "a": 123, + "b": 456, + }, + })), + (Some(json!(true)), vec![]), + ); + + assert_eq!( + selection!("object->has('b')->and(object->has('c'))").apply_to(&json!({ + "object": { + "a": 123, + "b": 456, + }, + })), + (Some(json!(false)), vec![]), + ); + + assert_eq!( + selection!("object->has('xxx')->typeof").apply_to(&json!({ + "object": { + "a": 123, + "b": 456, + }, + })), + (Some(json!("boolean")), vec![]), + ); + + assert_eq!( + selection!("$->size").apply_to(&json!({ "a": 1, "b": 2, "c": 3 })), + (Some(json!(3)), vec![]), + ); + + assert_eq!( + selection!("$->size").apply_to(&json!({})), + (Some(json!(0)), vec![]), + ); + + assert_eq!( + selection!("$->get('a')").apply_to(&json!({ + "a": 1, + "b": 2, + "c": 3, + })), + (Some(json!(1)), vec![]), + ); + + assert_eq!( + selection!("$->get('b')").apply_to(&json!({ + "a": 1, + "b": 2, + "c": 3, + })), + (Some(json!(2)), vec![]), + ); + + assert_eq!( + selection!("$->get('c')").apply_to(&json!({ + "a": 1, + "b": 2, + "c": 3, + })), + (Some(json!(3)), vec![]), + ); + + assert_eq!( + selection!("$->get('d')").apply_to(&json!({ + "a": 1, + "b": 2, + "c": 3, + })), + ( + None, + vec![ApplyToError::from_json(&json!({ + "message": "Method ->get(\"d\") object key not found", + "path": ["->get"], + "range": [7, 10], + }))] + ), + ); + + assert_eq!( + selection!("$->get('a')->add(10)").apply_to(&json!({ + "a": 1, + "b": 2, + "c": 3, + })), + (Some(json!(11)), vec![]), + ); + + assert_eq!( + selection!("$->get('b')->add(10)").apply_to(&json!({ + "a": 1, + "b": 2, + "c": 3, + })), + (Some(json!(12)), vec![]), + ); + + assert_eq!( + selection!("$->keys").apply_to(&json!({ + "a": 1, + "b": 2, + "c": 3, + })), + (Some(json!(["a", "b", "c"])), vec![]), + ); + + assert_eq!( + selection!("$->keys").apply_to(&json!({})), + (Some(json!([])), vec![]), + ); + + assert_eq!( + selection!("notAnObject->keys").apply_to(&json!({ + "notAnObject": 123, + })), + ( + None, + vec![ApplyToError::from_json(&json!({ + "message": "Method ->keys requires an object input, not number", + "path": ["notAnObject", "->keys"], + "range": [13, 17], + }))] + ), + ); + + assert_eq!( + selection!("$->values").apply_to(&json!({ + "a": 1, + "b": "two", + "c": false, + })), + (Some(json!([1, "two", false])), vec![]), + ); + + assert_eq!( + selection!("$->values").apply_to(&json!({})), + (Some(json!([])), vec![]), + ); + + assert_eq!( + selection!("notAnObject->values").apply_to(&json!({ + "notAnObject": null, + })), + ( + None, + vec![ApplyToError::from_json(&json!({ + "message": "Method ->values requires an object input, not null", + "path": ["notAnObject", "->values"], + "range": [13, 19], + }))] + ), + ); + + assert_eq!( + selection!("$->entries").apply_to(&json!({ + "a": 1, + "b": "two", + "c": false, + })), + ( + Some(json!([ + { "key": "a", "value": 1 }, + { "key": "b", "value": "two" }, + { "key": "c", "value": false }, + ])), + vec![], + ), + ); + + assert_eq!( + // This is just like $->keys, given the automatic array mapping of + // .key, though you probably want to use ->keys directly because it + // avoids cloning all the values unnecessarily. + selection!("$->entries.key").apply_to(&json!({ + "one": 1, + "two": 2, + "three": 3, + })), + (Some(json!(["one", "two", "three"])), vec![]), + ); + + assert_eq!( + // This is just like $->values, given the automatic array mapping of + // .value, though you probably want to use ->values directly because + // it avoids cloning all the keys unnecessarily. + selection!("$->entries.value").apply_to(&json!({ + "one": 1, + "two": 2, + "three": 3, + })), + (Some(json!([1, 2, 3])), vec![]), + ); + + assert_eq!( + selection!("$->entries").apply_to(&json!({})), + (Some(json!([])), vec![]), + ); + + assert_eq!( + selection!("notAnObject->entries").apply_to(&json!({ + "notAnObject": true, + })), + ( + None, + vec![ApplyToError::from_json(&json!({ + "message": "Method ->entries requires an object input, not boolean", + "path": ["notAnObject", "->entries"], + "range": [13, 20], + }))] + ), + ); +} + +#[test] +fn test_logical_methods() { + assert_eq!( + selection!("$->map(@->not)").apply_to(&json!([ + true, + false, + 0, + 1, + -123, + null, + "hello", + {}, + [], + ])), + ( + Some(json!([ + false, true, true, false, false, true, false, false, false, + ])), + vec![], + ), + ); + + assert_eq!( + selection!("$->map(@->not->not)").apply_to(&json!([ + true, + false, + 0, + 1, + -123, + null, + "hello", + {}, + [], + ])), + ( + Some(json!([ + true, false, false, true, true, false, true, true, true, + ])), + vec![], + ), + ); + + assert_eq!( + selection!("$.a->and($.b, $.c)").apply_to(&json!({ + "a": true, + "b": null, + "c": true, + })), + (Some(json!(false)), vec![]), + ); + assert_eq!( + selection!("$.b->and($.c, $.a)").apply_to(&json!({ + "a": "hello", + "b": true, + "c": 123, + })), + (Some(json!(true)), vec![]), + ); + assert_eq!( + selection!("$.both->and($.and)").apply_to(&json!({ + "both": true, + "and": true, + })), + (Some(json!(true)), vec![]), + ); + assert_eq!( + selection!("data.x->and($.data.y)").apply_to(&json!({ + "data": { + "x": true, + "y": false, + }, + })), + (Some(json!(false)), vec![]), + ); + + assert_eq!( + selection!("$.a->or($.b, $.c)").apply_to(&json!({ + "a": true, + "b": null, + "c": true, + })), + (Some(json!(true)), vec![]), + ); + assert_eq!( + selection!("$.b->or($.a, $.c)").apply_to(&json!({ + "a": false, + "b": null, + "c": 0, + })), + (Some(json!(false)), vec![]), + ); + assert_eq!( + selection!("$.both->or($.and)").apply_to(&json!({ + "both": true, + "and": false, + })), + (Some(json!(true)), vec![]), + ); + assert_eq!( + selection!("data.x->or($.data.y)").apply_to(&json!({ + "data": { + "x": false, + "y": false, + }, + })), + (Some(json!(false)), vec![]), + ); +} + +#[rstest::rstest] +#[case(json!(null), json!("null"), vec![])] +#[case(json!(true), json!("true"), vec![])] +#[case(json!(false), json!("false"), vec![])] +#[case(json!(42), json!("42"), vec![])] +#[case(json!(10.8), json!("10.8"), vec![])] +#[case(json!("hello world"), json!("\"hello world\""), vec![])] +#[case(json!([1, 2, 3]), json!("[1,2,3]"), vec![])] +#[case(json!({ "key": "value" }), json!("{\"key\":\"value\"}"), vec![])] +#[case(json!([1, "two", true, null]), json!("[1,\"two\",true,null]"), vec![])] +fn table_test_json_stringify_method( + #[case] input: JSON, + #[case] expected: JSON, + #[case] errors: Vec, +) { + assert_eq!( + selection!("$->jsonStringify").apply_to(&input), + (Some(expected), errors), + ); +} + +#[test] +fn test_json_stringify_method_error() { + assert_eq!( + selection!("$->jsonStringify(1)").apply_to(&json!(null)), + ( + None, + vec![ApplyToError::new( + "Method ->jsonStringify does not take any arguments".to_string(), + vec![json!("->jsonStringify")], + Some(3..16) + )] + ), + ); +} diff --git a/apollo-federation/src/sources/connect/json_selection/mod.rs b/apollo-federation/src/sources/connect/json_selection/mod.rs index 0abd543130..216e27b714 100644 --- a/apollo-federation/src/sources/connect/json_selection/mod.rs +++ b/apollo-federation/src/sources/connect/json_selection/mod.rs @@ -1,13 +1,19 @@ mod apply_to; -mod graphql; mod helpers; +mod immutable; +mod known_var; +mod lit_expr; +mod location; +mod methods; mod parser; mod pretty; +mod selection_set; pub use apply_to::*; -pub use parser::*; // Pretty code is currently only used in tests, so this cfg is to suppress the // unused lint warning. If pretty code is needed in not test code, feel free to // remove the `#[cfg(test)]`. +pub(crate) use location::Ranged; +pub use parser::*; #[cfg(test)] -pub use pretty::*; +pub(crate) use pretty::*; diff --git a/apollo-federation/src/sources/connect/json_selection/parser.rs b/apollo-federation/src/sources/connect/json_selection/parser.rs index f73762f223..cb6f30e6d6 100644 --- a/apollo-federation/src/sources/connect/json_selection/parser.rs +++ b/apollo-federation/src/sources/connect/json_selection/parser.rs @@ -1,5 +1,6 @@ use std::fmt::Display; +use apollo_compiler::collections::IndexSet; use nom::branch::alt; use nom::character::complete::char; use nom::character::complete::one_of; @@ -7,21 +8,79 @@ use nom::combinator::all_consuming; use nom::combinator::map; use nom::combinator::opt; use nom::combinator::recognize; +use nom::error::ParseError; use nom::multi::many0; -use nom::sequence::delimited; use nom::sequence::pair; use nom::sequence::preceded; +use nom::sequence::terminated; use nom::sequence::tuple; use nom::IResult; -use serde::Serialize; +use nom::Slice; use serde_json_bytes::Value as JSON; use super::helpers::spaces_or_comments; +use super::known_var::KnownVariable; +use super::lit_expr::LitExpr; +use super::location::merge_ranges; +use super::location::new_span; +use super::location::ranged_span; +use super::location::OffsetRange; +use super::location::Ranged; +use super::location::Span; +use super::location::WithRange; +use crate::sources::connect::variable::Namespace; +use crate::sources::connect::variable::VariableNamespace; +use crate::sources::connect::variable::VariablePathPart; +use crate::sources::connect::variable::VariableReference; + +// ParseResult is the internal type returned by most ::parse methods, as it is +// convenient to use with nom's combinators. The top-level JSONSelection::parse +// method returns a slightly different IResult type that hides implementation +// details of the nom-specific types. +// +// TODO Consider switching the third IResult type parameter to VerboseError +// here, if error messages can be improved with additional context. +pub(super) type ParseResult<'a, T> = IResult, T>; + +// Generates a non-fatal error with the given suffix and message, allowing the +// parser to recover and continue. +pub(super) fn nom_error_message<'a>( + suffix: Span<'a>, + // This message type forbids computing error messages with format!, which + // might be worthwhile in the future. For now, it's convenient to avoid + // String messages so the Span type can remain Copy, so we don't have to + // clone spans frequently in the parsing code. In most cases, the suffix + // provides the dynamic context needed to interpret the static message. + message: &'static str, +) -> nom::Err>> { + nom::Err::Error(nom::error::Error::from_error_kind( + suffix.map_extra(|_| Some(message)), + nom::error::ErrorKind::IsNot, + )) +} + +// Generates a fatal error with the given suffix Span and message, causing the +// parser to abort with the given error message, which is useful after +// recognizing syntax that completely constrains what follows (like the -> token +// before a method name), and what follows does not parse as required. +pub(super) fn nom_fail_message<'a>( + suffix: Span<'a>, + message: &'static str, +) -> nom::Err>> { + nom::Err::Failure(nom::error::Error::from_error_kind( + suffix.map_extra(|_| Some(message)), + nom::error::ErrorKind::IsNot, + )) +} + +pub(crate) trait ExternalVarPaths { + fn external_var_paths(&self) -> Vec<&PathSelection>; +} -// JSONSelection ::= NakedSubSelection | PathSelection +// JSONSelection ::= PathSelection | NakedSubSelection // NakedSubSelection ::= NamedSelection* StarSelection? -#[derive(Debug, PartialEq, Clone, Serialize)] +#[derive(Debug, PartialEq, Clone)] pub enum JSONSelection { // Although we reuse the SubSelection type for the JSONSelection::Named // case, we parse it as a sequence of NamedSelection items without the @@ -30,30 +89,131 @@ pub enum JSONSelection { Path(PathSelection), } +// To keep JSONSelection::parse consumers from depending on details of the nom +// error types, JSONSelection::parse reports this custom error type. Other +// ::parse methods still internally report nom::error::Error for the most part. +#[derive(Debug, PartialEq, Eq, Clone)] +pub struct JSONSelectionParseError { + // The message will be a meaningful error message in many cases, but may + // fall back to a formatted nom::error::ErrorKind in some cases, e.g. when + // an alt(...) runs out of options and we can't determine which underlying + // error was "most" responsible. + pub message: String, + + // Since we are not exposing the nom_locate-specific Span type, we report + // span.fragment() and span.location_offset() here. + pub fragment: String, + + // While it might be nice to report a range rather than just an offset, not + // all parsing errors have an unambiguous end offset, so the best we can do + // is point to the suffix of the input that failed to parse (which + // corresponds to where the fragment starts). + pub offset: usize, +} + +impl std::error::Error for JSONSelectionParseError {} + +impl Display for JSONSelectionParseError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}: {}", self.message, self.fragment) + } +} + impl JSONSelection { pub fn empty() -> Self { JSONSelection::Named(SubSelection { selections: vec![], - star: None, + ..Default::default() }) } - pub fn parse(input: &str) -> IResult<&str, Self> { - alt(( - all_consuming(map( - tuple(( - many0(NamedSelection::parse), - // When a * selection is used, it must be the last selection - // in the sequence, since it is not a NamedSelection. - opt(StarSelection::parse), - // In case there were no named selections and no * selection, we - // still want to consume any space before the end of the input. - spaces_or_comments, - )), - |(selections, star, _)| Self::Named(SubSelection { selections, star }), + pub fn is_empty(&self) -> bool { + match self { + JSONSelection::Named(subselect) => subselect.selections.is_empty(), + JSONSelection::Path(path) => *path.path == PathList::Empty, + } + } + + // JSONSelection::parse is possibly the "most public" method in the entire + // file, so it's important that the method signature can remain stable even + // if we drastically change implementation details. That's why we use &str + // as the input type and a custom JSONSelectionParseError type as the error + // type, rather than using Span or nom::error::Error directly. + pub fn parse(input: &str) -> Result { + match JSONSelection::parse_span(new_span(input)) { + Ok((remainder, selection)) => { + let fragment = remainder.fragment(); + if fragment.is_empty() { + Ok(selection) + } else { + Err(JSONSelectionParseError { + message: "Unexpected trailing characters".to_string(), + fragment: fragment.to_string(), + offset: remainder.location_offset(), + }) + } + } + + Err(e) => match e { + nom::Err::Error(e) | nom::Err::Failure(e) => { + Err(JSONSelectionParseError { + message: if let Some(message_str) = e.input.extra { + message_str.to_string() + } else { + // These errors aren't the most user-friendly, but + // with any luck we can gradually replace them with + // custom error messages over time. + format!("nom::error::ErrorKind::{:?}", e.code) + }, + fragment: e.input.fragment().to_string(), + offset: e.input.location_offset(), + }) + } + + nom::Err::Incomplete(_) => unreachable!("nom::Err::Incomplete not expected here"), + }, + } + } + + fn parse_span(input: Span) -> ParseResult { + match alt(( + all_consuming(terminated( + map(PathSelection::parse, Self::Path), + // By convention, most ::parse methods do not consume trailing + // spaces_or_comments, so we need to consume them here in order + // to satisfy the all_consuming requirement. + spaces_or_comments, + )), + all_consuming(terminated( + map(SubSelection::parse_naked, Self::Named), + // It's tempting to hoist the all_consuming(terminated(...)) + // checks outside the alt((...)) so we only need to handle + // trailing spaces_or_comments once, but that won't work because + // the Self::Path case should fail when a single PathSelection + // cannot be parsed, and that failure typically happens because + // the PathSelection::parse method does not consume the entire + // input, which is caught by the first all_consuming above. + spaces_or_comments, )), - all_consuming(map(PathSelection::parse, Self::Path)), ))(input) + { + Ok((remainder, selection)) => { + if remainder.fragment().is_empty() { + Ok((remainder, selection)) + } else { + Err(nom_fail_message( + // Usually our nom errors report the original input that + // failed to parse, but that's not helpful here, since + // input corresponds to the entire string, whereas this + // error message is reporting junk at the end of the + // string that should not be there. + remainder, + "Unexpected trailing characters", + )) + } + } + Err(e) => Err(e), + } } pub(crate) fn next_subselection(&self) -> Option<&SubSelection> { @@ -63,30 +223,86 @@ impl JSONSelection { } } + #[allow(unused)] pub(crate) fn next_mut_subselection(&mut self) -> Option<&mut SubSelection> { match self { JSONSelection::Named(subselect) => Some(subselect), JSONSelection::Path(path) => path.next_mut_subselection(), } } + + pub fn external_variables(&self) -> impl Iterator + '_ { + self.external_var_paths() + .into_iter() + .flat_map(|var_path| var_path.variable_reference()) + .map(|var_ref| var_ref.namespace.namespace) + } +} + +impl ExternalVarPaths for JSONSelection { + fn external_var_paths(&self) -> Vec<&PathSelection> { + match self { + JSONSelection::Named(subselect) => subselect.external_var_paths(), + JSONSelection::Path(path) => path.external_var_paths(), + } + } } -// NamedSelection ::= NamedPathSelection | NamedFieldSelection | NamedQuotedSelection | NamedGroupSelection +// NamedSelection ::= NamedPathSelection | PathWithSubSelection | NamedFieldSelection | NamedGroupSelection // NamedPathSelection ::= Alias PathSelection -// NamedFieldSelection ::= Alias? Identifier SubSelection? -// NamedQuotedSelection ::= Alias StringLiteral SubSelection? +// NamedFieldSelection ::= Alias? Key SubSelection? // NamedGroupSelection ::= Alias SubSelection +// PathSelection ::= Path SubSelection? +// PathWithSubSelection ::= Path SubSelection -#[derive(Debug, PartialEq, Clone, Serialize)] +#[derive(Debug, PartialEq, Eq, Clone)] pub enum NamedSelection { - Field(Option, String, Option), - Quoted(Alias, String, Option), - Path(Alias, PathSelection), + Field(Option, WithRange, Option), + // Represents either NamedPathSelection or PathWithSubSelection, with the + // invariant alias.is_some() || path.has_subselection() enforced by + // NamedSelection::parse_path. + Path { + alias: Option, + // True for PathWithSubSelection, and potentially in the future for + // object/null-returning NamedSelection::Path items that do not have an + // explicit trailing SubSelection. + inline: bool, + path: PathSelection, + }, Group(Alias, SubSelection), } +// Like PathSelection, NamedSelection is an AST structure that takes its range +// entirely from its children, so NamedSelection itself does not need to provide +// separate storage for its own range, and therefore does not need to be wrapped +// as WithRange, but merely needs to implement the Ranged trait. +impl Ranged for NamedSelection { + fn range(&self) -> OffsetRange { + match self { + Self::Field(alias, key, sub) => { + let range = key.range(); + let range = if let Some(alias) = alias.as_ref() { + merge_ranges(alias.range(), range) + } else { + range + }; + if let Some(sub) = sub.as_ref() { + merge_ranges(range, sub.range()) + } else { + range + } + } + Self::Path { alias, path, .. } => { + let alias_range = alias.as_ref().and_then(|alias| alias.range()); + merge_ranges(alias_range, path.range()) + } + Self::Group(alias, sub) => merge_ranges(alias.range(), sub.range()), + } + } +} + impl NamedSelection { - pub(crate) fn parse(input: &str) -> IResult<&str, Self> { + pub(crate) fn parse(input: Span) -> ParseResult { alt(( // We must try parsing NamedPathSelection before NamedFieldSelection // and NamedQuotedSelection because a NamedPathSelection without a @@ -98,65 +314,100 @@ impl NamedSelection { // nom, so instead we greedily parse NamedPathSelection first. Self::parse_path, Self::parse_field, - Self::parse_quoted, Self::parse_group, ))(input) } - fn parse_field(input: &str) -> IResult<&str, Self> { + fn parse_field(input: Span) -> ParseResult { tuple(( opt(Alias::parse), - delimited(spaces_or_comments, parse_identifier, spaces_or_comments), - opt(SubSelection::parse), - ))(input) - .map(|(input, (alias, name, selection))| (input, Self::Field(alias, name, selection))) - } - - fn parse_quoted(input: &str) -> IResult<&str, Self> { - tuple(( - Alias::parse, - delimited(spaces_or_comments, parse_string_literal, spaces_or_comments), + Key::parse, + spaces_or_comments, opt(SubSelection::parse), ))(input) - .map(|(input, (alias, name, selection))| (input, Self::Quoted(alias, name, selection))) + .map(|(remainder, (alias, name, _, selection))| { + (remainder, Self::Field(alias, name, selection)) + }) } - fn parse_path(input: &str) -> IResult<&str, Self> { - tuple((Alias::parse, PathSelection::parse))(input) - .map(|(input, (alias, path))| (input, Self::Path(alias, path))) + // Parses either NamedPathSelection or PathWithSubSelection. + fn parse_path(input: Span) -> ParseResult { + if let Ok((remainder, alias)) = Alias::parse(input) { + match PathSelection::parse(remainder) { + Ok((remainder, path)) => Ok(( + remainder, + Self::Path { + alias: Some(alias), + inline: false, + path, + }, + )), + Err(nom::Err::Failure(e)) => Err(nom::Err::Failure(e)), + Err(_) => Err(nom_error_message( + input, + "Path selection alias must be followed by a path", + )), + } + } else { + match PathSelection::parse(input) { + Ok((remainder, path)) => { + if path.has_subselection() { + Ok(( + remainder, + Self::Path { + alias: None, + // Inline without ... + inline: true, + path, + }, + )) + } else { + Err(nom_fail_message( + input, + "Named path selection must either begin with alias or ..., or end with subselection", + )) + } + } + Err(nom::Err::Failure(e)) => Err(nom::Err::Failure(e)), + Err(_) => Err(nom_error_message( + input, + "Path selection must either begin with alias or ..., or end with subselection", + )), + } + } } - fn parse_group(input: &str) -> IResult<&str, Self> { + fn parse_group(input: Span) -> ParseResult { tuple((Alias::parse, SubSelection::parse))(input) .map(|(input, (alias, group))| (input, Self::Group(alias, group))) } - #[allow(dead_code)] - pub(crate) fn name(&self) -> &str { + pub(crate) fn names(&self) -> Vec<&str> { match self { Self::Field(alias, name, _) => { if let Some(alias) = alias { - alias.name.as_str() + vec![alias.name.as_str()] } else { - name.as_str() + vec![name.as_str()] } } - Self::Quoted(alias, _, _) => alias.name.as_str(), - Self::Path(alias, _) => alias.name.as_str(), - Self::Group(alias, _) => alias.name.as_str(), - } - } - - /// Extracts the property path for a given named selection - /// - // TODO: Expand on what this means once I have a better understanding - pub(crate) fn property_path(&self) -> Vec { - match self { - NamedSelection::Field(_, name, _) => vec![Key::Field(name.to_string())], - NamedSelection::Quoted(_, _, Some(_)) => todo!(), - NamedSelection::Quoted(_, name, None) => vec![Key::Quoted(name.to_string())], - NamedSelection::Path(_, path) => path.collect_paths(), - NamedSelection::Group(alias, _) => vec![Key::Field(alias.name.to_string())], + Self::Path { alias, path, .. } => { + #[allow(clippy::if_same_then_else)] + if let Some(alias) = alias { + vec![alias.name.as_str()] + } else if let Some(sub) = path.next_subselection() { + // Flatten and deduplicate the names of the NamedSelection + // items in the SubSelection. + let mut name_set = IndexSet::default(); + for selection in sub.selections_iter() { + name_set.extend(selection.names()); + } + name_set.into_iter().collect() + } else { + vec![] + } + } + Self::Group(alias, _) => vec![alias.name.as_str()], } } @@ -164,27 +415,24 @@ impl NamedSelection { pub(crate) fn next_subselection(&self) -> Option<&SubSelection> { match self { // Paths are complicated because they can have a subselection deeply nested - NamedSelection::Path(_, path) => path.next_subselection(), + Self::Path { path, .. } => path.next_subselection(), // The other options have it at the root - NamedSelection::Field(_, _, Some(sub)) - | NamedSelection::Quoted(_, _, Some(sub)) - | NamedSelection::Group(_, sub) => Some(sub), + Self::Field(_, _, Some(sub)) | Self::Group(_, sub) => Some(sub), // Every other option does not have a subselection _ => None, } } + #[allow(unused)] pub(crate) fn next_mut_subselection(&mut self) -> Option<&mut SubSelection> { match self { // Paths are complicated because they can have a subselection deeply nested - NamedSelection::Path(_, path) => path.next_mut_subselection(), + Self::Path { path, .. } => path.next_mut_subselection(), // The other options have it at the root - NamedSelection::Field(_, _, Some(sub)) - | NamedSelection::Quoted(_, _, Some(sub)) - | NamedSelection::Group(_, sub) => Some(sub), + Self::Field(_, _, Some(sub)) | Self::Group(_, sub) => Some(sub), // Every other option does not have a subselection _ => None, @@ -192,178 +440,563 @@ impl NamedSelection { } } -// PathSelection ::= (VarPath | KeyPath) SubSelection? -// VarPath ::= "$" (NO_SPACE Identifier)? PathStep* -// KeyPath ::= Key PathStep+ -// PathStep ::= "." Key | "->" Identifier MethodArgs? - -#[derive(Debug, PartialEq, Clone, Serialize)] -pub enum PathSelection { - // We use a recursive structure here instead of a Vec to make applying - // the selection to a JSON value easier. - Var(String, Box), - Key(Key, Box), +impl ExternalVarPaths for NamedSelection { + fn external_var_paths(&self) -> Vec<&PathSelection> { + match self { + Self::Field(_, _, Some(sub)) | Self::Group(_, sub) => sub.external_var_paths(), + Self::Path { path, .. } => path.external_var_paths(), + _ => vec![], + } + } +} + +// Path ::= VarPath | KeyPath | AtPath | ExprPath +// PathSelection ::= Path SubSelection? +// PathWithSubSelection ::= Path SubSelection +// VarPath ::= "$" (NO_SPACE Identifier)? PathStep* +// KeyPath ::= Key PathStep+ +// AtPath ::= "@" PathStep* +// ExprPath ::= "$(" LitExpr ")" PathStep* +// PathStep ::= "." Key | "->" Identifier MethodArgs? + +#[derive(Debug, PartialEq, Eq, Clone)] +pub struct PathSelection { + pub(super) path: WithRange, +} + +// Like NamedSelection, PathSelection is an AST structure that takes its range +// entirely from self.path (a WithRange), so PathSelection itself does +// not need to be wrapped as WithRange, but merely needs to +// implement the Ranged trait. +impl Ranged for PathSelection { + fn range(&self) -> OffsetRange { + self.path.range() + } +} + +impl PathSelection { + pub fn parse(input: Span) -> ParseResult { + PathList::parse(input).map(|(input, path)| (input, Self { path })) + } + + pub(crate) fn variable_reference(&self) -> Option> { + match self.path.as_ref() { + PathList::Var(var, tail) => match var.as_ref() { + KnownVariable::External(namespace) => { + let parts = tail.as_ref().variable_path_parts(); + let location = parts + .last() + .map(|part| part.location.clone()) + .or(var.range()) + .map(|location| location.end) + .and_then(|end| var.range().map(|location| location.start..end)) + .unwrap_or_default(); + Some(VariableReference { + namespace: VariableNamespace { + namespace: *namespace, + location: var.range().unwrap_or_default(), + }, + path: parts, + location, + }) + } + _ => None, + }, + _ => None, + } + } + + #[allow(unused)] + pub(super) fn is_single_key(&self) -> bool { + self.path.is_single_key() + } + + #[allow(unused)] + pub(super) fn from_slice(keys: &[Key], selection: Option) -> Self { + Self { + path: WithRange::new(PathList::from_slice(keys, selection), None), + } + } + + #[allow(unused)] + pub(super) fn has_subselection(&self) -> bool { + self.path.has_subselection() + } + + pub(super) fn next_subselection(&self) -> Option<&SubSelection> { + self.path.next_subselection() + } + + #[allow(unused)] + pub(super) fn next_mut_subselection(&mut self) -> Option<&mut SubSelection> { + self.path.next_mut_subselection() + } +} + +impl ExternalVarPaths for PathSelection { + fn external_var_paths(&self) -> Vec<&PathSelection> { + let mut paths = vec![]; + match self.path.as_ref() { + PathList::Var(var_name, tail) => { + if matches!(var_name.as_ref(), KnownVariable::External(_)) { + paths.push(self); + } + paths.extend(tail.external_var_paths()); + } + other => { + paths.extend(other.external_var_paths()); + } + }; + paths + } +} + +impl From for PathSelection { + fn from(path: PathList) -> Self { + Self { + path: WithRange::new(path, None), + } + } +} + +#[derive(Debug, PartialEq, Eq, Clone)] +pub(super) enum PathList { + // A VarPath must start with a variable (either $identifier, $, or @), + // followed by any number of PathStep items (the WithRange). + // Because we represent the @ quasi-variable using PathList::Var, this + // variant handles both VarPath and AtPath from the grammar. The + // PathList::Var variant may only appear at the beginning of a + // PathSelection's PathList, not in the middle. + Var(WithRange, WithRange), + + // A PathSelection that starts with a PathList::Key is a KeyPath, but a + // PathList::Key also counts as PathStep item, so it may also appear in the + // middle/tail of a PathList. + Key(WithRange, WithRange), + + // An ExprPath, which begins with a LitExpr enclosed by $(...). Must appear + // only at the beginning of a PathSelection, like PathList::Var. + Expr(WithRange, WithRange), + + // A PathList::Method is a PathStep item that may appear only in the + // middle/tail (not the beginning) of a PathSelection. + Method(WithRange, Option, WithRange), + + // Optionally, a PathList may end with a SubSelection, which applies a set + // of named selections to the final value of the path. PathList::Selection + // by itself is not a valid PathList. Selection(SubSelection), + + // Every PathList must be terminated by either PathList::Selection or + // PathList::Empty. PathList::Empty by itself is not a valid PathList. Empty, } -impl PathSelection { - pub(crate) fn parse(input: &str) -> IResult<&str, Self> { +impl PathList { + pub(super) fn parse(input: Span) -> ParseResult> { match Self::parse_with_depth(input, 0) { - Ok((remainder, Self::Empty)) => Err(nom::Err::Error(nom::error::Error::new( - remainder, - nom::error::ErrorKind::IsNot, - ))), + Ok((_, parsed)) if matches!(*parsed, Self::Empty) => Err(nom_error_message( + input, + // As a small technical note, you could consider + // NamedGroupSelection (an Alias followed by a SubSelection) as + // a kind of NamedPathSelection where the path is empty, but + // it's still useful to distinguish groups in the grammar so we + // can forbid empty paths in general. In fact, when parsing a + // NamedGroupSelection, this error message is likely to be the + // reason we abandon parsing NamedPathSelection and correctly + // fall back to NamedGroupSelection. + "Path selection cannot be empty", + )), otherwise => otherwise, } } - fn parse_with_depth(input: &str, depth: usize) -> IResult<&str, Self> { + #[cfg(test)] + pub(super) fn into_with_range(self) -> WithRange { + WithRange::new(self, None) + } + + fn parse_with_depth(input: Span, depth: usize) -> ParseResult> { + // If the input is empty (i.e. this method will end up returning + // PathList::Empty), we want the OffsetRange to be an empty range at the + // end of the previously parsed PathList elements, not separated from + // them by trailing spaces or comments, so we need to capture the empty + // range before consuming leading spaces_or_comments. + let offset_if_empty = input.location_offset(); + let range_if_empty: OffsetRange = Some(offset_if_empty..offset_if_empty); + + // Consume leading spaces_or_comments for all cases below. let (input, _spaces) = spaces_or_comments(input)?; - // Variable references and key references without a leading . are - // accepted only at depth 0, or at the beginning of the PathSelection. + // Variable references (including @ references), $(...) literals, and + // key references without a leading . are accepted only at depth 0, or + // at the beginning of the PathSelection. if depth == 0 { - if let Ok((suffix, opt_var)) = delimited( - tuple((spaces_or_comments, char('$'))), - opt(parse_identifier), + // The $(...) syntax allows embedding LitExpr values within + // JSONSelection syntax (when not already parsing a LitExpr). This + // case needs to come before the $ (and $var) case, because $( looks + // like the $ variable followed by a parse error in the variable + // case, unless we add some complicated lookahead logic there. + if let Ok((suffix, (_, dollar_open_paren, expr, close_paren, _))) = tuple(( + spaces_or_comments, + ranged_span("$("), + LitExpr::parse, spaces_or_comments, - )(input) + ranged_span(")"), + ))(input) { - let (input, rest) = Self::parse_with_depth(suffix, depth + 1)?; - // Note the $ prefix is included in the variable name. - let dollar_var = format!("${}", opt_var.unwrap_or("".to_string())); - return Ok((input, Self::Var(dollar_var, Box::new(rest)))); + let (remainder, rest) = Self::parse_with_depth(suffix, depth + 1)?; + let expr_range = merge_ranges(dollar_open_paren.range(), close_paren.range()); + let full_range = merge_ranges(expr_range, rest.range()); + return Ok(( + remainder, + WithRange::new(Self::Expr(expr, rest), full_range), + )); + } + + if let Ok((suffix, (dollar, opt_var))) = + tuple((ranged_span("$"), opt(parse_identifier_no_space)))(input) + { + let dollar_range = dollar.range(); + let (remainder, rest) = Self::parse_with_depth(suffix, depth + 1)?; + let full_range = merge_ranges(dollar_range.clone(), rest.range()); + return if let Some(var) = opt_var { + let full_name = format!("{}{}", dollar.as_ref(), var.as_str()); + if let Some(known_var) = KnownVariable::from_str(full_name.as_str()) { + let var_range = merge_ranges(dollar_range.clone(), var.range()); + let ranged_known_var = WithRange::new(known_var, var_range); + Ok(( + remainder, + WithRange::new(Self::Var(ranged_known_var, rest), full_range), + )) + } else { + Err(nom_fail_message( + input, + // Here's an error where we might like to use + // format! to include the full_name of the unknown + // variable in the error message, but that means + // we'd have to store the message as an owned + // String, which would make Span no longer Copy, + // which leads to more cloning of Spans in the + // parser code. For now, the input Span reported + // with the error will begin with the unknown + // variable name, which should be enough to + // interpret this static message. + "Unknown variable", + )) + } + } else { + let ranged_dollar_var = + WithRange::new(KnownVariable::Dollar, dollar_range.clone()); + Ok(( + remainder, + WithRange::new(Self::Var(ranged_dollar_var, rest), full_range), + )) + }; + } + + if let Ok((suffix, at)) = ranged_span("@")(input) { + let (remainder, rest) = Self::parse_with_depth(suffix, depth + 1)?; + let full_range = merge_ranges(at.range(), rest.range()); + return Ok(( + remainder, + WithRange::new( + Self::Var(WithRange::new(KnownVariable::AtSign, at.range()), rest), + full_range, + ), + )); } if let Ok((suffix, key)) = Key::parse(input) { - let (input, rest) = Self::parse_with_depth(suffix, depth + 1)?; - return match rest { - Self::Empty | Self::Selection(_) => Err(nom::Err::Error( - nom::error::Error::new(input, nom::error::ErrorKind::IsNot), + let (remainder, rest) = Self::parse_with_depth(suffix, depth + 1)?; + return match rest.as_ref() { + // We use nom_error_message rather than nom_fail_message + // here because the key might actually be a field selection, + // which means we want to unwind parsing the path and fall + // back to parsing other kinds of NamedSelection. + Self::Empty | Self::Selection(_) => Err(nom_error_message( + input, + // Another place where format! might be useful to + // suggest .{key}, which would require storing error + // messages as owned Strings. + "Single-key path must be prefixed with $. to avoid ambiguity with field name", )), - rest => Ok((input, Self::Key(key, Box::new(rest)))), + _ => { + let full_range = merge_ranges(key.range(), rest.range()); + Ok((remainder, WithRange::new(Self::Key(key, rest), full_range))) + } }; } } - // The .key case is applicable at any depth. If it comes first in the - // path selection, $.key is implied, but the distinction is preserved - // (using Self::Path rather than Self::Var) for accurate reprintability. - if let Ok((suffix, key)) = preceded( - tuple((spaces_or_comments, char('.'), spaces_or_comments)), - Key::parse, - )(input) - { - // tuple((char('.'), Key::parse))(input) { - let (input, rest) = Self::parse_with_depth(suffix, depth + 1)?; - return Ok((input, Self::Key(key, Box::new(rest)))); + if depth == 0 { + // If the PathSelection does not start with a $var (or $ or @), a + // key., or $(expr), it is not a valid PathSelection. + if tuple((ranged_span("."), Key::parse))(input).is_ok() { + // Since we previously allowed starting key paths with .key but + // now forbid that syntax (because it can be ambiguous), suggest + // the unambiguous $.key syntax instead. + return Err(nom_fail_message( + input, + "Key paths cannot start with just .key (use $.key instead)", + )); + } + // This error technically covers the case above, but doesn't suggest + // a helpful solution. + return Err(nom_error_message( + input, + "Path selection must start with key., $variable, $, @, or $(expression)", + )); } - if depth == 0 { - // If the PathSelection does not start with a $var, a key., or a - // .key, it is not a valid PathSelection. - return Err(nom::Err::Error(nom::error::Error::new( + // In previous versions of this code, a .key could appear at depth 0 (at + // the beginning of a path), which was useful to disambiguate a KeyPath + // consisting of a single key from a field selection. + // + // Now that key paths can appear alongside/after named selections within + // a SubSelection, the .key syntax is potentially unsafe because it may + // be parsed as a continuation of a previous field selection, since we + // ignore spaces/newlines/comments between keys in a path. + // + // In order to prevent this ambiguity, we now require that a single .key + // be written as a subproperty of the $ variable, e.g. $.key, which is + // equivalent to the old behavior, but parses unambiguously. In terms of + // this code, that means we allow a .key only at depths > 0. + if let Ok((remainder, (dot, key))) = tuple((ranged_span("."), Key::parse))(input) { + let (remainder, rest) = Self::parse_with_depth(remainder, depth + 1)?; + let dot_key_range = merge_ranges(dot.range(), key.range()); + let full_range = merge_ranges(dot_key_range, rest.range()); + return Ok((remainder, WithRange::new(Self::Key(key, rest), full_range))); + } + + // If we failed to parse "." Key above, but the input starts with a '.' + // character, it's an error unless it's the beginning of a ... token. + if input.fragment().starts_with('.') && !input.fragment().starts_with("...") { + return Err(nom_fail_message( input, - nom::error::ErrorKind::IsNot, - ))); + "Path selection . must be followed by key (identifier or quoted string literal)", + )); + } + + // PathSelection can never start with a naked ->method (instead, use + // $->method or @->method if you want to operate on the current value). + if let Ok((suffix, arrow)) = ranged_span("->")(input) { + // As soon as we see a -> token, we know what follows must be a + // method name, so we can unconditionally return based on what + // parse_identifier tells us. since MethodArgs::parse is optional, + // the absence of args will never trigger the error case. + return match tuple((parse_identifier, opt(MethodArgs::parse)))(suffix) { + Ok((suffix, (method, args))) => { + let (remainder, rest) = Self::parse_with_depth(suffix, depth + 1)?; + let full_range = merge_ranges(arrow.range(), rest.range()); + Ok(( + remainder, + WithRange::new(Self::Method(method, args, rest), full_range), + )) + } + Err(_) => Err(nom_fail_message(input, "Method name must follow ->")), + }; } - // If the PathSelection has a SubSelection, it must appear at the end of - // a non-empty path. + // Likewise, if the PathSelection has a SubSelection, it must appear at + // the end of a non-empty path. PathList::parse_with_depth is not + // responsible for enforcing a trailing SubSelection in the + // PathWithSubSelection case, since that requirement is checked by + // NamedSelection::parse_path. if let Ok((suffix, selection)) = SubSelection::parse(input) { - return Ok((suffix, Self::Selection(selection))); + let selection_range = selection.range(); + return Ok(( + suffix, + WithRange::new(Self::Selection(selection), selection_range), + )); } // The Self::Empty enum case is used to indicate the end of a // PathSelection that has no SubSelection. - Ok((input, Self::Empty)) + Ok((input, WithRange::new(Self::Empty, range_if_empty))) } - pub(crate) fn from_slice(properties: &[Key], selection: Option) -> Self { - match properties { - [] => selection.map_or(Self::Empty, Self::Selection), - [head, tail @ ..] => { - Self::Key(head.clone(), Box::new(Self::from_slice(tail, selection))) - } + pub(super) fn is_single_key(&self) -> bool { + match self { + Self::Key(_, rest) => matches!(rest.as_ref(), Self::Selection(_) | Self::Empty), + _ => false, } } - /// Collect all nested paths - /// - /// This method attempts to collect as many paths as possible, shorting out once - /// a non path selection is encountered. - pub(crate) fn collect_paths(&self) -> Vec { - let mut results = Vec::new(); - - // Collect as many as possible - let mut current = self; - while let Self::Key(key, rest) = current { - results.push(key.clone()); + fn variable_path_parts(&self) -> Vec { + match self { + Self::Key(key, rest) => { + let mut parts = vec![VariablePathPart { + part: key.as_str(), + location: key.range().unwrap_or_default(), + }]; + parts.extend(rest.variable_path_parts()); + parts + } + _ => vec![], + } + } - current = rest; + #[allow(unused)] + pub(super) fn from_slice(properties: &[Key], selection: Option) -> Self { + match properties { + [] => selection.map_or(Self::Empty, Self::Selection), + [head, tail @ ..] => Self::Key( + WithRange::new(head.clone(), None), + WithRange::new(Self::from_slice(tail, selection), None), + ), } + } - results + pub(super) fn has_subselection(&self) -> bool { + self.next_subselection().is_some() } /// Find the next subselection, traversing nested chains if needed - pub(crate) fn next_subselection(&self) -> Option<&SubSelection> { + pub(super) fn next_subselection(&self) -> Option<&SubSelection> { match self { - PathSelection::Var(_, path) => path.next_subselection(), - PathSelection::Key(_, path) => path.next_subselection(), - PathSelection::Selection(sub) => Some(sub), - PathSelection::Empty => None, + Self::Var(_, tail) => tail.next_subselection(), + Self::Key(_, tail) => tail.next_subselection(), + Self::Expr(_, tail) => tail.next_subselection(), + Self::Method(_, _, tail) => tail.next_subselection(), + Self::Selection(sub) => Some(sub), + Self::Empty => None, } } + #[allow(unused)] /// Find the next subselection, traversing nested chains if needed. Returns a mutable reference - pub(crate) fn next_mut_subselection(&mut self) -> Option<&mut SubSelection> { + pub(super) fn next_mut_subselection(&mut self) -> Option<&mut SubSelection> { + match self { + Self::Var(_, tail) => tail.next_mut_subselection(), + Self::Key(_, tail) => tail.next_mut_subselection(), + Self::Expr(_, tail) => tail.next_mut_subselection(), + Self::Method(_, _, tail) => tail.next_mut_subselection(), + Self::Selection(sub) => Some(sub), + Self::Empty => None, + } + } +} + +impl ExternalVarPaths for PathList { + fn external_var_paths(&self) -> Vec<&PathSelection> { + let mut paths = vec![]; match self { - PathSelection::Var(_, path) => path.next_mut_subselection(), - PathSelection::Key(_, path) => path.next_mut_subselection(), - PathSelection::Selection(sub) => Some(sub), - PathSelection::Empty => None, + // PathSelection::external_var_paths is responsible for adding all + // variable &PathSelection items to the set, since this + // PathList::Var case cannot be sure it's looking at the beginning + // of the path. However, we call rest.external_var_paths() + // recursively because the tail of the list could contain other full + // PathSelection variable references. + PathList::Var(_, rest) | PathList::Key(_, rest) => { + paths.extend(rest.external_var_paths()); + } + PathList::Expr(expr, rest) => { + paths.extend(expr.external_var_paths()); + paths.extend(rest.external_var_paths()); + } + PathList::Method(_, opt_args, rest) => { + if let Some(args) = opt_args { + for lit_arg in &args.args { + paths.extend(lit_arg.external_var_paths()); + } + } + paths.extend(rest.external_var_paths()); + } + PathList::Selection(sub) => paths.extend(sub.external_var_paths()), + PathList::Empty => {} } + paths } } // SubSelection ::= "{" NakedSubSelection "}" -#[derive(Debug, PartialEq, Clone, Serialize, Default)] +#[derive(Debug, PartialEq, Eq, Clone, Default)] pub struct SubSelection { pub(super) selections: Vec, - pub(super) star: Option, + pub(super) range: OffsetRange, +} + +impl Ranged for SubSelection { + // Since SubSelection is a struct, we can store its range directly as a + // field of the struct, allowing SubSelection to implement the Ranged trait + // without a WithRange wrapper. + fn range(&self) -> OffsetRange { + self.range.clone() + } } impl SubSelection { - pub(crate) fn parse(input: &str) -> IResult<&str, Self> { - tuple(( - spaces_or_comments, - char('{'), - many0(NamedSelection::parse), - // Note that when a * selection is used, it must be the last - // selection in the SubSelection, since it does not count as a - // NamedSelection, and is stored as a separate field from the - // selections vector. - opt(StarSelection::parse), + pub(crate) fn parse(input: Span) -> ParseResult { + match tuple(( spaces_or_comments, - char('}'), + ranged_span("{"), + Self::parse_naked, spaces_or_comments, + ranged_span("}"), ))(input) - .map(|(input, (_, _, selections, star, _, _, _))| (input, Self { selections, star })) + { + Ok((remainder, (_, open_brace, sub, _, close_brace))) => { + let range = merge_ranges(open_brace.range(), close_brace.range()); + Ok(( + remainder, + Self { + selections: sub.selections, + range, + }, + )) + } + Err(e) => Err(e), + } } - pub fn selections_iter(&self) -> impl Iterator { - self.selections.iter() - } + fn parse_naked(input: Span) -> ParseResult { + many0(NamedSelection::parse)(input).map(|(remainder, selections)| { + let range = merge_ranges( + selections.first().and_then(|first| first.range()), + selections.last().and_then(|last| last.range()), + ); - pub fn has_star(&self) -> bool { - self.star.is_some() + (remainder, Self { selections, range }) + }) } - pub fn set_star(&mut self, star: Option) { - self.star = star; + // Returns an Iterator over each &NamedSelection that contributes a single + // name to the output object. This is more complicated than returning + // self.selections.iter() because some NamedSelection::Path elements can + // contribute multiple names if they do no have an Alias. + pub fn selections_iter(&self) -> impl Iterator { + // TODO Implement a NamedSelectionIterator to traverse nested selections + // lazily, rather than using an intermediary vector. + let mut selections = vec![]; + for selection in &self.selections { + match selection { + NamedSelection::Path { alias, path, .. } => { + if alias.is_some() { + // If the PathSelection has an Alias, then it has a + // singular name and should be visited directly. + selections.push(selection); + } else if let Some(sub) = path.next_subselection() { + // If the PathSelection does not have an Alias but does + // have a SubSelection, then it represents the + // PathWithSubSelection non-terminal from the grammar + // (see README.md + PR #6076), which produces multiple + // names derived from the SubSelection, which need to be + // recursively collected. + selections.extend(sub.selections_iter()); + } else { + // This no-Alias, no-SubSelection case should be + // forbidden by NamedSelection::parse_path. + debug_assert!(false, "PathSelection without Alias or SubSelection"); + } + } + _ => { + selections.push(selection); + } + }; + } + selections.into_iter() } pub fn append_selection(&mut self, selection: NamedSelection) { @@ -373,91 +1006,54 @@ impl SubSelection { pub fn last_selection_mut(&mut self) -> Option<&mut NamedSelection> { self.selections.last_mut() } - - // Since we enforce that new selections may only be appended to - // self.selections, we can provide an index-based search method that returns - // an unforgeable NamedSelectionIndex, which can later be used to access the - // selection using either get_at_index or get_at_index_mut. - // TODO In the future, this method could make use of an internal lookup - // table to avoid linear search. - pub fn index_of_named_selection(&self, name: &str) -> Option { - self.selections - .iter() - .position(|selection| selection.name() == name) - .map(|pos| NamedSelectionIndex { pos }) - } - - pub fn get_at_index(&self, index: &NamedSelectionIndex) -> &NamedSelection { - self.selections - .get(index.pos) - .expect("NamedSelectionIndex out of bounds") - } - - pub fn get_at_index_mut(&mut self, index: &NamedSelectionIndex) -> &mut NamedSelection { - self.selections - .get_mut(index.pos) - .expect("NamedSelectionIndex out of bounds") - } -} - -pub struct NamedSelectionIndex { - // Intentionally private so NamedSelectionIndex cannot be forged. - pos: usize, } -// StarSelection ::= Alias? "*" SubSelection? - -#[derive(Debug, PartialEq, Clone, Serialize)] -pub struct StarSelection( - pub(super) Option, - pub(super) Option>, -); - -impl StarSelection { - pub(crate) fn new(alias: Option, sub: Option) -> Self { - Self(alias, sub.map(Box::new)) - } - - pub(crate) fn parse(input: &str) -> IResult<&str, Self> { - tuple(( - // The spaces_or_comments separators are necessary here because - // Alias::parse and SubSelection::parse only consume surrounding - // spaces when they match, and they are both optional here. - opt(Alias::parse), - spaces_or_comments, - char('*'), - spaces_or_comments, - opt(SubSelection::parse), - ))(input) - .map(|(remainder, (alias, _, _, _, selection))| { - (remainder, Self(alias, selection.map(Box::new))) - }) +impl ExternalVarPaths for SubSelection { + fn external_var_paths(&self) -> Vec<&PathSelection> { + let mut paths = vec![]; + for selection in &self.selections { + paths.extend(selection.external_var_paths()); + } + paths } } -// Alias ::= Identifier ":" +// Alias ::= Key ":" -#[derive(Debug, PartialEq, Clone, Serialize)] +#[derive(Debug, PartialEq, Eq, Clone)] pub struct Alias { - pub(super) name: String, + pub(super) name: WithRange, + pub(super) range: OffsetRange, +} + +impl Ranged for Alias { + fn range(&self) -> OffsetRange { + self.range.clone() + } } impl Alias { pub fn new(name: &str) -> Self { Self { - name: name.to_string(), + name: WithRange::new(Key::field(name), None), + range: None, } } - fn parse(input: &str) -> IResult<&str, Self> { - tuple(( - spaces_or_comments, - parse_identifier, - spaces_or_comments, - char(':'), - spaces_or_comments, - ))(input) - .map(|(input, (_, name, _, _, _))| (input, Self { name })) + pub fn quoted(name: &str) -> Self { + Self { + name: WithRange::new(Key::quoted(name), None), + range: None, + } + } + + fn parse(input: Span) -> ParseResult { + tuple((Key::parse, spaces_or_comments, ranged_span(":")))(input).map( + |(input, (name, _, colon))| { + let range = merge_ranges(name.range(), colon.range()); + (input, Self { name, range }) + }, + ) } pub fn name(&self) -> &str { @@ -465,28 +1061,42 @@ impl Alias { } } -// Key ::= Identifier | StringLiteral +// Key ::= Identifier | LitString -#[derive(Debug, PartialEq, Eq, Hash, Clone, Serialize)] +#[derive(Debug, PartialEq, Eq, Clone, Hash)] pub enum Key { Field(String), Quoted(String), - Index(usize), } impl Key { - fn parse(input: &str) -> IResult<&str, Self> { + pub fn parse(input: Span) -> ParseResult> { alt(( - map(parse_identifier, Self::Field), - map(parse_string_literal, Self::Quoted), + map(parse_identifier, |id| id.take_as(Key::Field)), + map(parse_string_literal, |s| s.take_as(Key::Quoted)), ))(input) } + pub fn field(name: &str) -> Self { + Self::Field(name.to_string()) + } + + pub fn quoted(name: &str) -> Self { + Self::Quoted(name.to_string()) + } + + pub fn into_with_range(self) -> WithRange { + WithRange::new(self, None) + } + + pub fn is_quoted(&self) -> bool { + matches!(self, Self::Quoted(_)) + } + pub fn to_json(&self) -> JSON { match self { Key::Field(name) => JSON::String(name.clone().into()), Key::Quoted(name) => JSON::String(name.clone().into()), - Key::Index(index) => JSON::Number((*index).into()), } } @@ -497,7 +1107,14 @@ impl Key { match self { Key::Field(name) => name.clone(), Key::Quoted(name) => name.clone(), - Key::Index(n) => n.to_string(), + } + } + // Like as_string, but without cloning a new String, for times when the Key + // itself lives longer than the &str. + pub fn as_str(&self) -> &str { + match self { + Key::Field(name) => name.as_str(), + Key::Quoted(name) => name.as_str(), } } @@ -515,7 +1132,6 @@ impl Key { let quoted = serde_json_bytes::Value::String(field.clone().into()).to_string(); format!(".{quoted}") } - Key::Index(index) => format!(".{index}"), } } } @@ -529,33 +1145,37 @@ impl Display for Key { // Identifier ::= [a-zA-Z_] NO_SPACE [0-9a-zA-Z_]* -fn parse_identifier(input: &str) -> IResult<&str, String> { - delimited( - spaces_or_comments, - recognize(pair( - one_of("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ_"), - many0(one_of( - "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ_0123456789", - )), +fn parse_identifier(input: Span) -> ParseResult> { + preceded(spaces_or_comments, parse_identifier_no_space)(input) +} + +fn parse_identifier_no_space(input: Span) -> ParseResult> { + recognize(pair( + one_of("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ_"), + many0(one_of( + "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ_0123456789", )), - spaces_or_comments, - )(input) - .map(|(input, name)| (input, name.to_string())) + ))(input) + .map(|(remainder, name)| { + let range = Some(name.location_offset()..remainder.location_offset()); + (remainder, WithRange::new(name.to_string(), range)) + }) } -// StringLiteral ::= +// LitString ::= // | "'" ("\\'" | [^'])* "'" // | '"' ('\\"' | [^"])* '"' -fn parse_string_literal(input: &str) -> IResult<&str, String> { - let input = spaces_or_comments(input).map(|(input, _)| input)?; +pub(crate) fn parse_string_literal(input: Span) -> ParseResult> { + let input = spaces_or_comments(input)?.0; + let start = input.location_offset(); let mut input_char_indices = input.char_indices(); match input_char_indices.next() { Some((0, quote @ '\'')) | Some((0, quote @ '"')) => { let mut escape_next = false; let mut chars: Vec = vec![]; - let mut remainder: Option<&str> = None; + let mut remainder_opt: Option = None; for (i, c) in input_char_indices { if escape_next { @@ -571,411 +1191,489 @@ fn parse_string_literal(input: &str) -> IResult<&str, String> { continue; } if c == quote { - remainder = Some(spaces_or_comments(&input[i + 1..])?.0); + remainder_opt = Some(input.slice(i + 1..)); break; } chars.push(c); } - if let Some(remainder) = remainder { - Ok((remainder, chars.iter().collect::())) + if let Some(remainder) = remainder_opt { + Ok(( + remainder, + WithRange::new( + chars.iter().collect::(), + Some(start..remainder.location_offset()), + ), + )) } else { - Err(nom::Err::Error(nom::error::Error::new( - input, - nom::error::ErrorKind::Eof, - ))) + Err(nom_fail_message(input, "Unterminated string literal")) } } - _ => Err(nom::Err::Error(nom::error::Error::new( - input, - nom::error::ErrorKind::IsNot, - ))), + _ => Err(nom_error_message(input, "Not a string literal")), } } -#[cfg(test)] -mod tests { - use super::*; - use crate::selection; +#[derive(Debug, PartialEq, Eq, Clone, Default)] +pub(super) struct MethodArgs { + pub(super) args: Vec>, + pub(super) range: OffsetRange, +} - #[test] - fn test_identifier() { - assert_eq!(parse_identifier("hello"), Ok(("", "hello".to_string())),); +impl Ranged for MethodArgs { + fn range(&self) -> OffsetRange { + self.range.clone() + } +} - assert_eq!( - parse_identifier("hello_world"), - Ok(("", "hello_world".to_string())), - ); +// Comma-separated positional arguments for a method, surrounded by parentheses. +// When an arrow method is used without arguments, the Option for +// the PathSelection::Method will be None, so we can safely define MethodArgs +// using a Vec in all cases (possibly empty but never missing). +impl MethodArgs { + fn parse(input: Span) -> ParseResult { + tuple(( + spaces_or_comments, + ranged_span("("), + spaces_or_comments, + opt(map( + tuple(( + LitExpr::parse, + many0(preceded( + tuple((spaces_or_comments, char(','))), + LitExpr::parse, + )), + opt(tuple((spaces_or_comments, char(',')))), + )), + |(first, rest, _trailing_comma)| { + let mut output = vec![first]; + output.extend(rest); + output + }, + )), + spaces_or_comments, + ranged_span(")"), + ))(input) + .map(|(remainder, (_, open_paren, _, args, _, close_paren))| { + let range = merge_ranges(open_paren.range(), close_paren.range()); + ( + remainder, + Self { + args: args.unwrap_or_default(), + range, + }, + ) + }) + } +} - assert_eq!( - parse_identifier("hello_world_123"), - Ok(("", "hello_world_123".to_string())), - ); +#[cfg(test)] +mod tests { + use apollo_compiler::collections::IndexMap; + use insta::assert_debug_snapshot; + + use super::super::location::strip_ranges::StripRanges; + use super::*; + use crate::selection; + use crate::sources::connect::json_selection::helpers::span_is_all_spaces_or_comments; + use crate::sources::connect::json_selection::location::new_span; + + #[test] + fn test_identifier() { + fn check(input: &str, expected_name: &str) { + let (remainder, name) = parse_identifier(new_span(input)).unwrap(); + assert!(span_is_all_spaces_or_comments(remainder)); + assert_eq!(name.as_ref(), expected_name); + } + + check("hello", "hello"); + check("hello_world", "hello_world"); + check(" hello_world ", "hello_world"); + check("hello_world_123", "hello_world_123"); + check(" hello ", "hello"); + + fn check_no_space(input: &str, expected_name: &str) { + let name = parse_identifier_no_space(new_span(input)).unwrap().1; + assert_eq!(name.as_ref(), expected_name); + } + + check_no_space("oyez", "oyez"); + check_no_space("oyez ", "oyez"); - assert_eq!(parse_identifier(" hello "), Ok(("", "hello".to_string())),); + { + let identifier_with_leading_space = new_span(" oyez "); + assert_eq!( + parse_identifier_no_space(identifier_with_leading_space), + Err(nom::Err::Error(nom::error::Error::from_error_kind( + // The parse_identifier_no_space function does not provide a + // custom error message, since it's only used internally. + // Testing it directly here is somewhat contrived. + identifier_with_leading_space, + nom::error::ErrorKind::OneOf, + ))), + ); + } } #[test] fn test_string_literal() { - assert_eq!( - parse_string_literal("'hello world'"), - Ok(("", "hello world".to_string())), - ); - assert_eq!( - parse_string_literal("\"hello world\""), - Ok(("", "hello world".to_string())), - ); - assert_eq!( - parse_string_literal("'hello \"world\"'"), - Ok(("", "hello \"world\"".to_string())), - ); - assert_eq!( - parse_string_literal("\"hello \\\"world\\\"\""), - Ok(("", "hello \"world\"".to_string())), - ); - assert_eq!( - parse_string_literal("'hello \\'world\\''"), - Ok(("", "hello 'world'".to_string())), - ); + fn check(input: &str, expected: &str) { + let (remainder, lit) = parse_string_literal(new_span(input)).unwrap(); + assert!(span_is_all_spaces_or_comments(remainder)); + assert_eq!(lit.as_ref(), expected); + } + check("'hello world'", "hello world"); + check("\"hello world\"", "hello world"); + check("'hello \"world\"'", "hello \"world\""); + check("\"hello \\\"world\\\"\"", "hello \"world\""); + check("'hello \\'world\\''", "hello 'world'"); } + #[test] fn test_key() { - assert_eq!( - Key::parse("hello"), - Ok(("", Key::Field("hello".to_string()))), - ); + fn check(input: &str, expected: &Key) { + let (remainder, key) = Key::parse(new_span(input)).unwrap(); + assert!(span_is_all_spaces_or_comments(remainder)); + assert_eq!(key.as_ref(), expected); + } - assert_eq!( - Key::parse("'hello'"), - Ok(("", Key::Quoted("hello".to_string()))), - ); + check("hello", &Key::field("hello")); + check("'hello'", &Key::quoted("hello")); + check(" hello ", &Key::field("hello")); + check("\"hello\"", &Key::quoted("hello")); + check(" \"hello\" ", &Key::quoted("hello")); } #[test] fn test_alias() { - assert_eq!( - Alias::parse("hello:"), - Ok(( - "", - Alias { - name: "hello".to_string(), - }, - )), - ); - - assert_eq!( - Alias::parse("hello :"), - Ok(( - "", - Alias { - name: "hello".to_string(), - }, - )), - ); - - assert_eq!( - Alias::parse("hello : "), - Ok(( - "", - Alias { - name: "hello".to_string(), - }, - )), - ); - - assert_eq!( - Alias::parse(" hello :"), - Ok(( - "", - Alias { - name: "hello".to_string(), - }, - )), - ); + fn check(input: &str, alias: &str) { + let (remainder, parsed) = Alias::parse(new_span(input)).unwrap(); + assert!(span_is_all_spaces_or_comments(remainder)); + assert_eq!(parsed.name(), alias); + } - assert_eq!( - Alias::parse("hello: "), - Ok(( - "", - Alias { - name: "hello".to_string(), - }, - )), - ); + check("hello:", "hello"); + check("hello :", "hello"); + check("hello : ", "hello"); + check(" hello :", "hello"); + check("hello: ", "hello"); } #[test] fn test_named_selection() { - fn assert_result_and_name(input: &str, expected: NamedSelection, name: &str) { - let actual = NamedSelection::parse(input); - assert_eq!(actual, Ok(("", expected.clone()))); - assert_eq!(actual.unwrap().1.name(), name); + fn assert_result_and_names(input: &str, expected: NamedSelection, names: &[&str]) { + let (remainder, selection) = NamedSelection::parse(new_span(input)).unwrap(); + assert!(span_is_all_spaces_or_comments(remainder)); + let selection = selection.strip_ranges(); + assert_eq!(selection, expected); + assert_eq!(selection.names(), names); assert_eq!( - selection!(input), + selection!(input).strip_ranges(), JSONSelection::Named(SubSelection { selections: vec![expected], - star: None, - }), + ..Default::default() + },), ); } - assert_result_and_name( - "hello", - NamedSelection::Field(None, "hello".to_string(), None), + assert_result_and_names( "hello", + NamedSelection::Field(None, Key::field("hello").into_with_range(), None), + &["hello"], ); - assert_result_and_name( + assert_result_and_names( "hello { world }", NamedSelection::Field( None, - "hello".to_string(), + Key::field("hello").into_with_range(), Some(SubSelection { - selections: vec![NamedSelection::Field(None, "world".to_string(), None)], - star: None, + selections: vec![NamedSelection::Field( + None, + Key::field("world").into_with_range(), + None, + )], + ..Default::default() }), ), - "hello", + &["hello"], ); - assert_result_and_name( + assert_result_and_names( "hi: hello", NamedSelection::Field( - Some(Alias { - name: "hi".to_string(), - }), - "hello".to_string(), + Some(Alias::new("hi")), + Key::field("hello").into_with_range(), None, ), - "hi", + &["hi"], ); - assert_result_and_name( + assert_result_and_names( "hi: 'hello world'", - NamedSelection::Quoted( - Alias { - name: "hi".to_string(), - }, - "hello world".to_string(), + NamedSelection::Field( + Some(Alias::new("hi")), + Key::quoted("hello world").into_with_range(), None, ), - "hi", + &["hi"], ); - assert_result_and_name( + assert_result_and_names( "hi: hello { world }", NamedSelection::Field( - Some(Alias { - name: "hi".to_string(), - }), - "hello".to_string(), + Some(Alias::new("hi")), + Key::field("hello").into_with_range(), Some(SubSelection { - selections: vec![NamedSelection::Field(None, "world".to_string(), None)], - star: None, + selections: vec![NamedSelection::Field( + None, + Key::field("world").into_with_range(), + None, + )], + ..Default::default() }), ), - "hi", + &["hi"], ); - assert_result_and_name( + assert_result_and_names( "hey: hello { world again }", NamedSelection::Field( - Some(Alias { - name: "hey".to_string(), - }), - "hello".to_string(), + Some(Alias::new("hey")), + Key::field("hello").into_with_range(), Some(SubSelection { selections: vec![ - NamedSelection::Field(None, "world".to_string(), None), - NamedSelection::Field(None, "again".to_string(), None), + NamedSelection::Field(None, Key::field("world").into_with_range(), None), + NamedSelection::Field(None, Key::field("again").into_with_range(), None), ], - star: None, + ..Default::default() }), ), - "hey", + &["hey"], ); - assert_result_and_name( + assert_result_and_names( "hey: 'hello world' { again }", - NamedSelection::Quoted( - Alias { - name: "hey".to_string(), - }, - "hello world".to_string(), + NamedSelection::Field( + Some(Alias::new("hey")), + Key::quoted("hello world").into_with_range(), Some(SubSelection { - selections: vec![NamedSelection::Field(None, "again".to_string(), None)], - star: None, + selections: vec![NamedSelection::Field( + None, + Key::field("again").into_with_range(), + None, + )], + ..Default::default() }), ), - "hey", + &["hey"], ); - assert_result_and_name( + assert_result_and_names( "leggo: 'my ego'", - NamedSelection::Quoted( - Alias { - name: "leggo".to_string(), - }, - "my ego".to_string(), + NamedSelection::Field( + Some(Alias::new("leggo")), + Key::quoted("my ego").into_with_range(), + None, + ), + &["leggo"], + ); + + assert_result_and_names( + "'let go': 'my ego'", + NamedSelection::Field( + Some(Alias::quoted("let go")), + Key::quoted("my ego").into_with_range(), None, ), - "leggo", + &["let go"], ); } #[test] fn test_selection() { assert_eq!( - selection!(""), + selection!("").strip_ranges(), JSONSelection::Named(SubSelection { selections: vec![], - star: None, + ..Default::default() }), ); assert_eq!( - selection!(" "), + selection!(" ").strip_ranges(), JSONSelection::Named(SubSelection { selections: vec![], - star: None, + ..Default::default() }), ); assert_eq!( - selection!("hello"), + selection!("hello").strip_ranges(), JSONSelection::Named(SubSelection { - selections: vec![NamedSelection::Field(None, "hello".to_string(), None),], - star: None, + selections: vec![NamedSelection::Field( + None, + Key::field("hello").into_with_range(), + None + )], + ..Default::default() }), ); assert_eq!( - selection!(".hello"), - JSONSelection::Path(PathSelection::from_slice( - &[Key::Field("hello".to_string()),], - None - )), + selection!("$.hello").strip_ranges(), + JSONSelection::Path(PathSelection { + path: PathList::Var( + KnownVariable::Dollar.into_with_range(), + PathList::Key( + Key::field("hello").into_with_range(), + PathList::Empty.into_with_range() + ) + .into_with_range(), + ) + .into_with_range(), + }), ); { let expected = JSONSelection::Named(SubSelection { - selections: vec![NamedSelection::Path( - Alias { - name: "hi".to_string(), - }, - PathSelection::from_slice( + selections: vec![NamedSelection::Path { + alias: Some(Alias::new("hi")), + inline: false, + path: PathSelection::from_slice( &[ Key::Field("hello".to_string()), Key::Field("world".to_string()), ], None, ), - )], - star: None, + }], + ..Default::default() }); - assert_eq!(selection!("hi: .hello.world"), expected); - assert_eq!(selection!("hi: .hello .world"), expected); - assert_eq!(selection!("hi: . hello. world"), expected); - assert_eq!(selection!("hi: .hello . world"), expected); - assert_eq!(selection!("hi: hello.world"), expected); - assert_eq!(selection!("hi: hello. world"), expected); - assert_eq!(selection!("hi: hello .world"), expected); - assert_eq!(selection!("hi: hello . world"), expected); + assert_eq!(selection!("hi: hello.world").strip_ranges(), expected); + assert_eq!(selection!("hi: hello .world").strip_ranges(), expected); + assert_eq!(selection!("hi: hello. world").strip_ranges(), expected); + assert_eq!(selection!("hi: hello . world").strip_ranges(), expected); + assert_eq!(selection!("hi: hello.world").strip_ranges(), expected); + assert_eq!(selection!("hi: hello. world").strip_ranges(), expected); + assert_eq!(selection!("hi: hello .world").strip_ranges(), expected); + assert_eq!(selection!("hi: hello . world ").strip_ranges(), expected); } { let expected = JSONSelection::Named(SubSelection { selections: vec![ - NamedSelection::Field(None, "before".to_string(), None), - NamedSelection::Path( - Alias { - name: "hi".to_string(), - }, - PathSelection::from_slice( + NamedSelection::Field(None, Key::field("before").into_with_range(), None), + NamedSelection::Path { + alias: Some(Alias::new("hi")), + inline: false, + path: PathSelection::from_slice( &[ Key::Field("hello".to_string()), Key::Field("world".to_string()), ], None, ), - ), - NamedSelection::Field(None, "after".to_string(), None), + }, + NamedSelection::Field(None, Key::field("after").into_with_range(), None), ], - star: None, + ..Default::default() }); - assert_eq!(selection!("before hi: .hello.world after"), expected); - assert_eq!(selection!("before hi: .hello .world after"), expected); - assert_eq!(selection!("before hi: .hello. world after"), expected); - assert_eq!(selection!("before hi: .hello . world after"), expected); - assert_eq!(selection!("before hi: . hello.world after"), expected); - assert_eq!(selection!("before hi: . hello .world after"), expected); - assert_eq!(selection!("before hi: . hello. world after"), expected); - assert_eq!(selection!("before hi: . hello . world after"), expected); - assert_eq!(selection!("before hi: hello.world after"), expected); - assert_eq!(selection!("before hi: hello .world after"), expected); - assert_eq!(selection!("before hi: hello. world after"), expected); - assert_eq!(selection!("before hi: hello . world after"), expected); + assert_eq!( + selection!("before hi: hello.world after").strip_ranges(), + expected + ); + assert_eq!( + selection!("before hi: hello .world after").strip_ranges(), + expected + ); + assert_eq!( + selection!("before hi: hello. world after").strip_ranges(), + expected + ); + assert_eq!( + selection!("before hi: hello . world after").strip_ranges(), + expected + ); + assert_eq!( + selection!("before hi: hello.world after").strip_ranges(), + expected + ); + assert_eq!( + selection!("before hi: hello .world after").strip_ranges(), + expected + ); + assert_eq!( + selection!("before hi: hello. world after").strip_ranges(), + expected + ); + assert_eq!( + selection!("before hi: hello . world after").strip_ranges(), + expected + ); } { let expected = JSONSelection::Named(SubSelection { selections: vec![ - NamedSelection::Field(None, "before".to_string(), None), - NamedSelection::Path( - Alias { - name: "hi".to_string(), - }, - PathSelection::from_slice( + NamedSelection::Field(None, Key::field("before").into_with_range(), None), + NamedSelection::Path { + alias: Some(Alias::new("hi")), + inline: false, + path: PathSelection::from_slice( &[ Key::Field("hello".to_string()), Key::Field("world".to_string()), ], Some(SubSelection { selections: vec![ - NamedSelection::Field(None, "nested".to_string(), None), - NamedSelection::Field(None, "names".to_string(), None), + NamedSelection::Field( + None, + Key::field("nested").into_with_range(), + None, + ), + NamedSelection::Field( + None, + Key::field("names").into_with_range(), + None, + ), ], - star: None, + ..Default::default() }), ), - ), - NamedSelection::Field(None, "after".to_string(), None), + }, + NamedSelection::Field(None, Key::field("after").into_with_range(), None), ], - star: None, + ..Default::default() }); assert_eq!( - selection!("before hi: .hello.world { nested names } after"), - expected - ); - assert_eq!( - selection!("before hi:.hello.world{nested names}after"), + selection!("before hi: hello.world { nested names } after").strip_ranges(), expected ); assert_eq!( - selection!("before hi: hello.world { nested names } after"), + selection!("before hi:hello.world{nested names}after").strip_ranges(), expected ); assert_eq!( - selection!("before hi:hello.world{nested names}after"), + selection!(" before hi : hello . world { nested names } after ").strip_ranges(), expected ); } - assert_eq!( - selection!( - " + assert_debug_snapshot!(selection!( + " # Comments are supported because we parse them as whitespace topLevelAlias: topLevelField { - # Non-identifier properties must be aliased as an identifier - nonIdentifier: 'property name with spaces' + identifier: 'property name with spaces' + 'unaliased non-identifier property' + 'non-identifier alias': identifier # This extracts the value located at the given path and applies a # selection set to it before renaming the result to pathSelection - pathSelection: .some.nested.path { + pathSelection: some.nested.path { still: yet more properties @@ -985,85 +1683,61 @@ mod tests { # under the given alias siblingGroup: { brother sister } }" - ), - JSONSelection::Named(SubSelection { - selections: vec![NamedSelection::Field( - Some(Alias { - name: "topLevelAlias".to_string(), - }), - "topLevelField".to_string(), - Some(SubSelection { - selections: vec![ - NamedSelection::Quoted( - Alias { - name: "nonIdentifier".to_string(), - }, - "property name with spaces".to_string(), - None, - ), - NamedSelection::Path( - Alias { - name: "pathSelection".to_string(), - }, - PathSelection::from_slice( - &[ - Key::Field("some".to_string()), - Key::Field("nested".to_string()), - Key::Field("path".to_string()), - ], - Some(SubSelection { - selections: vec![ - NamedSelection::Field( - Some(Alias { - name: "still".to_string(), - }), - "yet".to_string(), - None, - ), - NamedSelection::Field(None, "more".to_string(), None,), - NamedSelection::Field( - None, - "properties".to_string(), - None, - ), - ], - star: None, - }) - ), - ), - NamedSelection::Group( - Alias { - name: "siblingGroup".to_string(), - }, - SubSelection { - selections: vec![ - NamedSelection::Field(None, "brother".to_string(), None,), - NamedSelection::Field(None, "sister".to_string(), None,), - ], - star: None, - }, - ), - ], - star: None, - }), - )], - star: None, - }), - ); + )); } + #[track_caller] fn check_path_selection(input: &str, expected: PathSelection) { - assert_eq!(PathSelection::parse(input), Ok(("", expected.clone()))); - assert_eq!(selection!(input), JSONSelection::Path(expected.clone())); + let (remainder, path_selection) = PathSelection::parse(new_span(input)).unwrap(); + assert!(span_is_all_spaces_or_comments(remainder)); + assert_eq!(&path_selection.strip_ranges(), &expected); + assert_eq!( + selection!(input).strip_ranges(), + JSONSelection::Path(expected) + ); } #[test] fn test_path_selection() { check_path_selection( - ".hello", - PathSelection::from_slice(&[Key::Field("hello".to_string())], None), + "$.hello", + PathSelection { + path: PathList::Var( + KnownVariable::Dollar.into_with_range(), + PathList::Key( + Key::field("hello").into_with_range(), + PathList::Empty.into_with_range(), + ) + .into_with_range(), + ) + .into_with_range(), + }, ); + { + let expected = PathSelection { + path: PathList::Var( + KnownVariable::Dollar.into_with_range(), + PathList::Key( + Key::field("hello").into_with_range(), + PathList::Key( + Key::field("world").into_with_range(), + PathList::Empty.into_with_range(), + ) + .into_with_range(), + ) + .into_with_range(), + ) + .into_with_range(), + }; + check_path_selection("$.hello.world", expected.clone()); + check_path_selection("$.hello .world", expected.clone()); + check_path_selection("$.hello. world", expected.clone()); + check_path_selection("$.hello . world", expected.clone()); + check_path_selection("$ . hello . world", expected.clone()); + check_path_selection(" $ . hello . world ", expected.clone()); + } + { let expected = PathSelection::from_slice( &[ @@ -1072,14 +1746,11 @@ mod tests { ], None, ); - check_path_selection(".hello.world", expected.clone()); - check_path_selection(".hello .world", expected.clone()); - check_path_selection(".hello. world", expected.clone()); - check_path_selection(".hello . world", expected.clone()); check_path_selection("hello.world", expected.clone()); check_path_selection("hello .world", expected.clone()); check_path_selection("hello. world", expected.clone()); check_path_selection("hello . world", expected.clone()); + check_path_selection(" hello . world ", expected.clone()); } { @@ -1089,22 +1760,20 @@ mod tests { Key::Field("world".to_string()), ], Some(SubSelection { - selections: vec![NamedSelection::Field(None, "hello".to_string(), None)], - star: None, + selections: vec![NamedSelection::Field( + None, + Key::field("hello").into_with_range(), + None, + )], + ..Default::default() }), ); - check_path_selection(".hello.world { hello }", expected.clone()); - check_path_selection(".hello .world { hello }", expected.clone()); - check_path_selection(".hello. world { hello }", expected.clone()); - check_path_selection(".hello . world { hello }", expected.clone()); - check_path_selection(". hello.world { hello }", expected.clone()); - check_path_selection(". hello .world { hello }", expected.clone()); - check_path_selection(". hello. world { hello }", expected.clone()); - check_path_selection(". hello . world { hello }", expected.clone()); + check_path_selection("hello.world{hello}", expected.clone()); check_path_selection("hello.world { hello }", expected.clone()); check_path_selection("hello .world { hello }", expected.clone()); check_path_selection("hello. world { hello }", expected.clone()); check_path_selection("hello . world { hello }", expected.clone()); + check_path_selection(" hello . world { hello } ", expected.clone()); } { @@ -1117,10 +1786,6 @@ mod tests { ], None, ); - check_path_selection( - ".nested.'string literal'.\"property\".name", - expected.clone(), - ); check_path_selection( "nested.'string literal'.\"property\".name", expected.clone(), @@ -1141,6 +1806,10 @@ mod tests { "nested.'string literal'.\"property\". name", expected.clone(), ); + check_path_selection( + " nested . 'string literal' . \"property\" . name ", + expected.clone(), + ); } { @@ -1150,24 +1819,22 @@ mod tests { Key::Quoted("string literal".to_string()), ], Some(SubSelection { - selections: vec![NamedSelection::Quoted( - Alias { - name: "leggo".to_string(), - }, - "my ego".to_string(), + selections: vec![NamedSelection::Field( + Some(Alias::new("leggo")), + Key::quoted("my ego").into_with_range(), None, )], - star: None, + ..Default::default() }), ); check_path_selection( - ".nested.'string literal' { leggo: 'my ego' }", + "nested.'string literal' { leggo: 'my ego' }", expected.clone(), ); check_path_selection( - "nested.'string literal' { leggo: 'my ego' }", + " nested . 'string literal' { leggo : 'my ego' } ", expected.clone(), ); @@ -1180,130 +1847,258 @@ mod tests { "nested . 'string literal' { leggo: 'my ego' }", expected.clone(), ); + check_path_selection( + " nested . \"string literal\" { leggo: 'my ego' } ", + expected.clone(), + ); + } + + { + let expected = PathSelection { + path: PathList::Var( + KnownVariable::Dollar.into_with_range(), + PathList::Key( + Key::field("results").into_with_range(), + PathList::Selection(SubSelection { + selections: vec![NamedSelection::Field( + None, + Key::quoted("quoted without alias").into_with_range(), + Some(SubSelection { + selections: vec![ + NamedSelection::Field( + None, + Key::field("id").into_with_range(), + None, + ), + NamedSelection::Field( + None, + Key::quoted("n a m e").into_with_range(), + None, + ), + ], + ..Default::default() + }), + )], + ..Default::default() + }) + .into_with_range(), + ) + .into_with_range(), + ) + .into_with_range(), + }; + check_path_selection( + "$.results{'quoted without alias'{id'n a m e'}}", + expected.clone(), + ); + check_path_selection( + " $ . results { 'quoted without alias' { id 'n a m e' } } ", + expected.clone(), + ); + } + + { + let expected = PathSelection { + path: PathList::Var( + KnownVariable::Dollar.into_with_range(), + PathList::Key( + Key::field("results").into_with_range(), + PathList::Selection(SubSelection { + selections: vec![NamedSelection::Field( + Some(Alias::quoted("non-identifier alias")), + Key::quoted("quoted with alias").into_with_range(), + Some(SubSelection { + selections: vec![ + NamedSelection::Field( + None, + Key::field("id").into_with_range(), + None, + ), + NamedSelection::Field( + Some(Alias::quoted("n a m e")), + Key::field("name").into_with_range(), + None, + ), + ], + ..Default::default() + }), + )], + ..Default::default() + }) + .into_with_range(), + ) + .into_with_range(), + ) + .into_with_range(), + }; + check_path_selection( + "$.results{'non-identifier alias':'quoted with alias'{id'n a m e':name}}", + expected.clone(), + ); + check_path_selection( + " $ . results { 'non-identifier alias' : 'quoted with alias' { id 'n a m e': name } } ", + expected.clone(), + ); } } #[test] fn test_path_selection_vars() { check_path_selection( - "$var", - PathSelection::Var("$var".to_string(), Box::new(PathSelection::Empty)), + "$this", + PathSelection { + path: PathList::Var( + KnownVariable::from(Namespace::This).into_with_range(), + PathList::Empty.into_with_range(), + ) + .into_with_range(), + }, ); check_path_selection( "$", - PathSelection::Var("$".to_string(), Box::new(PathSelection::Empty)), + PathSelection { + path: PathList::Var( + KnownVariable::Dollar.into_with_range(), + PathList::Empty.into_with_range(), + ) + .into_with_range(), + }, ); check_path_selection( - "$var { hello }", - PathSelection::Var( - "$var".to_string(), - Box::new(PathSelection::Selection(SubSelection { - selections: vec![NamedSelection::Field(None, "hello".to_string(), None)], - star: None, - })), - ), + "$this { hello }", + PathSelection { + path: PathList::Var( + KnownVariable::from(Namespace::This).into_with_range(), + PathList::Selection(SubSelection { + selections: vec![NamedSelection::Field( + None, + Key::field("hello").into_with_range(), + None, + )], + ..Default::default() + }) + .into_with_range(), + ) + .into_with_range(), + }, ); check_path_selection( "$ { hello }", - PathSelection::Var( - "$".to_string(), - Box::new(PathSelection::Selection(SubSelection { - selections: vec![NamedSelection::Field(None, "hello".to_string(), None)], - star: None, - })), - ), + PathSelection { + path: PathList::Var( + KnownVariable::Dollar.into_with_range(), + PathList::Selection(SubSelection { + selections: vec![NamedSelection::Field( + None, + Key::field("hello").into_with_range(), + None, + )], + ..Default::default() + }) + .into_with_range(), + ) + .into_with_range(), + }, ); check_path_selection( - "$var { before alias: $args.arg after }", - PathSelection::Var( - "$var".to_string(), - Box::new(PathSelection::Selection(SubSelection { + "$this { before alias: $args.arg after }", + PathList::Var( + KnownVariable::from(Namespace::This).into_with_range(), + PathList::Selection(SubSelection { selections: vec![ - NamedSelection::Field(None, "before".to_string(), None), - NamedSelection::Path( - Alias { - name: "alias".to_string(), + NamedSelection::Field(None, Key::field("before").into_with_range(), None), + NamedSelection::Path { + alias: Some(Alias::new("alias")), + inline: false, + path: PathSelection { + path: PathList::Var( + KnownVariable::from(Namespace::Args).into_with_range(), + PathList::Key( + Key::field("arg").into_with_range(), + PathList::Empty.into_with_range(), + ) + .into_with_range(), + ) + .into_with_range(), }, - PathSelection::Var( - "$args".to_string(), - Box::new(PathSelection::Key( - Key::Field("arg".to_string()), - Box::new(PathSelection::Empty), - )), - ), - ), - NamedSelection::Field(None, "after".to_string(), None), + }, + NamedSelection::Field(None, Key::field("after").into_with_range(), None), ], - star: None, - })), - ), + ..Default::default() + }) + .into_with_range(), + ) + .into(), ); check_path_selection( "$.nested { key injected: $args.arg }", - PathSelection::Var( - "$".to_string(), - Box::new(PathSelection::Key( - Key::Field("nested".to_string()), - Box::new(PathSelection::Selection(SubSelection { - selections: vec![ - NamedSelection::Field(None, "key".to_string(), None), - NamedSelection::Path( - Alias { - name: "injected".to_string(), - }, - PathSelection::Var( - "$args".to_string(), - Box::new(PathSelection::Key( - Key::Field("arg".to_string()), - Box::new(PathSelection::Empty), - )), + PathSelection { + path: PathList::Var( + KnownVariable::Dollar.into_with_range(), + PathList::Key( + Key::field("nested").into_with_range(), + PathList::Selection(SubSelection { + selections: vec![ + NamedSelection::Field( + None, + Key::field("key").into_with_range(), + None, ), - ), - ], - star: None, - })), - )), - ), + NamedSelection::Path { + alias: Some(Alias::new("injected")), + inline: false, + path: PathSelection { + path: PathList::Var( + KnownVariable::from(Namespace::Args).into_with_range(), + PathList::Key( + Key::field("arg").into_with_range(), + PathList::Empty.into_with_range(), + ) + .into_with_range(), + ) + .into_with_range(), + }, + }, + ], + ..Default::default() + }) + .into_with_range(), + ) + .into_with_range(), + ) + .into_with_range(), + }, ); check_path_selection( - "$root.a.b.c", - PathSelection::Var( - "$root".to_string(), - Box::new(PathSelection::from_slice( - &[ - Key::Field("a".to_string()), - Key::Field("b".to_string()), - Key::Field("c".to_string()), - ], - None, - )), - ), + "$args.a.b.c", + PathSelection { + path: PathList::Var( + KnownVariable::from(Namespace::Args).into_with_range(), + PathList::from_slice( + &[ + Key::Field("a".to_string()), + Key::Field("b".to_string()), + Key::Field("c".to_string()), + ], + None, + ) + .into_with_range(), + ) + .into_with_range(), + }, ); check_path_selection( - "undotted.x.y.z", + "root.x.y.z", PathSelection::from_slice( &[ - Key::Field("undotted".to_string()), - Key::Field("x".to_string()), - Key::Field("y".to_string()), - Key::Field("z".to_string()), - ], - None, - ), - ); - - check_path_selection( - ".dotted.x.y.z", - PathSelection::from_slice( - &[ - Key::Field("dotted".to_string()), + Key::Field("root".to_string()), Key::Field("x".to_string()), Key::Field("y".to_string()), Key::Field("z".to_string()), @@ -1314,290 +2109,1056 @@ mod tests { check_path_selection( "$.data", - PathSelection::Var( - "$".to_string(), - Box::new(PathSelection::Key( - Key::Field("data".to_string()), - Box::new(PathSelection::Empty), - )), - ), + PathSelection { + path: PathList::Var( + KnownVariable::Dollar.into_with_range(), + PathList::Key( + Key::field("data").into_with_range(), + PathList::Empty.into_with_range(), + ) + .into_with_range(), + ) + .into_with_range(), + }, ); check_path_selection( "$.data.'quoted property'.nested", - PathSelection::Var( - "$".to_string(), - Box::new(PathSelection::Key( - Key::Field("data".to_string()), - Box::new(PathSelection::Key( - Key::Quoted("quoted property".to_string()), - Box::new(PathSelection::Key( - Key::Field("nested".to_string()), - Box::new(PathSelection::Empty), - )), - )), - )), - ), + PathSelection { + path: PathList::Var( + KnownVariable::Dollar.into_with_range(), + PathList::Key( + Key::field("data").into_with_range(), + PathList::Key( + Key::quoted("quoted property").into_with_range(), + PathList::Key( + Key::field("nested").into_with_range(), + PathList::Empty.into_with_range(), + ) + .into_with_range(), + ) + .into_with_range(), + ) + .into_with_range(), + ) + .into_with_range(), + }, ); - assert_eq!( - PathSelection::parse("naked"), - Err(nom::Err::Error(nom::error::Error::new( - "", - nom::error::ErrorKind::IsNot, - ))), + #[track_caller] + fn check_path_parse_error(input: &str, expected_offset: usize, expected_message: &str) { + match PathSelection::parse(new_span(input)) { + Ok((remainder, path)) => { + panic!( + "Expected error at offset {} with message '{}', but got path {:?} and remainder {:?}", + expected_offset, + expected_message, + path, + remainder, + ); + } + Err(nom::Err::Error(e) | nom::Err::Failure(e)) => { + assert_eq!(&input[expected_offset..], *e.input.fragment()); + // The PartialEq implementation for LocatedSpan + // unfortunately ignores span.extra, so we have to check + // e.input.extra manually. + assert_eq!(e.input.extra, Some(expected_message)); + } + Err(e) => { + panic!("Unexpected error {:?}", e); + } + } + } + + let single_key_path_error_message = + "Single-key path must be prefixed with $. to avoid ambiguity with field name"; + check_path_parse_error( + new_span("naked").fragment(), + 0, + single_key_path_error_message, + ); + check_path_parse_error( + new_span("naked { hi }").fragment(), + 0, + single_key_path_error_message, + ); + check_path_parse_error( + new_span(" naked { hi }").fragment(), + 2, + single_key_path_error_message, ); - assert_eq!( - PathSelection::parse("naked { hi }"), - Err(nom::Err::Error(nom::error::Error::new( - "", - nom::error::ErrorKind::IsNot, - ))), + let path_key_ambiguity_error_message = + "Path selection . must be followed by key (identifier or quoted string literal)"; + check_path_parse_error( + new_span("valid.$invalid").fragment(), + 5, + path_key_ambiguity_error_message, + ); + check_path_parse_error( + new_span(" valid.$invalid").fragment(), + 7, + path_key_ambiguity_error_message, + ); + check_path_parse_error( + new_span(" valid . $invalid").fragment(), + 8, + path_key_ambiguity_error_message, ); assert_eq!( - PathSelection::parse("valid.$invalid"), - Err(nom::Err::Error(nom::error::Error::new( - ".$invalid", - nom::error::ErrorKind::IsNot, - ))), + selection!("$").strip_ranges(), + JSONSelection::Path(PathSelection { + path: PathList::Var( + KnownVariable::Dollar.into_with_range(), + PathList::Empty.into_with_range() + ) + .into_with_range(), + }), ); assert_eq!( - selection!("$"), - JSONSelection::Path(PathSelection::Var( - "$".to_string(), - Box::new(PathSelection::Empty), - )), + selection!("$this").strip_ranges(), + JSONSelection::Path(PathSelection { + path: PathList::Var( + KnownVariable::from(Namespace::This).into_with_range(), + PathList::Empty.into_with_range() + ) + .into_with_range(), + }), ); assert_eq!( - selection!("$this"), - JSONSelection::Path(PathSelection::Var( - "$this".to_string(), - Box::new(PathSelection::Empty), - )), + selection!("value: $ a { b c }").strip_ranges(), + JSONSelection::Named(SubSelection { + selections: vec![ + NamedSelection::Path { + alias: Some(Alias::new("value")), + inline: false, + path: PathSelection { + path: PathList::Var( + KnownVariable::Dollar.into_with_range(), + PathList::Empty.into_with_range() + ) + .into_with_range(), + }, + }, + NamedSelection::Field( + None, + Key::field("a").into_with_range(), + Some(SubSelection { + selections: vec![ + NamedSelection::Field( + None, + Key::field("b").into_with_range(), + None + ), + NamedSelection::Field( + None, + Key::field("c").into_with_range(), + None + ), + ], + ..Default::default() + }), + ), + ], + ..Default::default() + }), + ); + assert_eq!( + selection!("value: $this { b c }").strip_ranges(), + JSONSelection::Named(SubSelection { + selections: vec![NamedSelection::Path { + alias: Some(Alias::new("value")), + inline: false, + path: PathSelection { + path: PathList::Var( + KnownVariable::from(Namespace::This).into_with_range(), + PathList::Selection(SubSelection { + selections: vec![ + NamedSelection::Field( + None, + Key::field("b").into_with_range(), + None + ), + NamedSelection::Field( + None, + Key::field("c").into_with_range(), + None + ), + ], + ..Default::default() + }) + .into_with_range(), + ) + .into_with_range(), + }, + }], + ..Default::default() + }), ); } #[test] - fn test_subselection() { - assert_eq!( - SubSelection::parse(" { \n } "), - Ok(( - "", - SubSelection { - selections: vec![], - star: None, - }, - )), + fn test_error_snapshots() { + // The .data shorthand is no longer allowed, since it can be mistakenly + // parsed as a continuation of a previous selection. Instead, use $.data + // to achieve the same effect without ambiguity. + assert_debug_snapshot!(JSONSelection::parse(".data")); + + // We statically verify that all variables are KnownVariables, and + // $bogus is not one of them. + assert_debug_snapshot!(JSONSelection::parse("$bogus")); + + // If you want to mix a path selection with other named selections, the + // path selection must have a trailing subselection, to enforce that it + // returns an object with statically known keys. + assert_debug_snapshot!(JSONSelection::parse("id $.object")); + } + + #[test] + fn test_path_selection_at() { + check_path_selection( + "@", + PathSelection { + path: PathList::Var( + KnownVariable::AtSign.into_with_range(), + PathList::Empty.into_with_range(), + ) + .into_with_range(), + }, ); - assert_eq!( - SubSelection::parse("{hello}"), - Ok(( - "", - SubSelection { - selections: vec![NamedSelection::Field(None, "hello".to_string(), None),], - star: None, - }, - )), + check_path_selection( + "@.a.b.c", + PathSelection { + path: PathList::Var( + KnownVariable::AtSign.into_with_range(), + PathList::from_slice( + &[ + Key::Field("a".to_string()), + Key::Field("b".to_string()), + Key::Field("c".to_string()), + ], + None, + ) + .into_with_range(), + ) + .into_with_range(), + }, ); - assert_eq!( - SubSelection::parse("{ hello }"), - Ok(( - "", - SubSelection { - selections: vec![NamedSelection::Field(None, "hello".to_string(), None),], - star: None, - }, - )), + check_path_selection( + "@.items->first", + PathSelection { + path: PathList::Var( + KnownVariable::AtSign.into_with_range(), + PathList::Key( + Key::field("items").into_with_range(), + PathList::Method( + WithRange::new("first".to_string(), None), + None, + PathList::Empty.into_with_range(), + ) + .into_with_range(), + ) + .into_with_range(), + ) + .into_with_range(), + }, ); + } - assert_eq!( - SubSelection::parse(" { padded } "), - Ok(( - "", - SubSelection { - selections: vec![NamedSelection::Field(None, "padded".to_string(), None),], - star: None, + #[test] + fn test_expr_path_selections() { + fn check_simple_lit_expr(input: &str, expected: LitExpr) { + check_path_selection( + input, + PathSelection { + path: PathList::Expr( + expected.into_with_range(), + PathList::Empty.into_with_range(), + ) + .into_with_range(), }, - )), + ); + } + + check_simple_lit_expr("$(null)", LitExpr::Null); + + check_simple_lit_expr("$(true)", LitExpr::Bool(true)); + check_simple_lit_expr("$(false)", LitExpr::Bool(false)); + + check_simple_lit_expr( + "$(1234)", + LitExpr::Number("1234".parse().expect("serde_json::Number parse error")), + ); + check_simple_lit_expr( + "$(1234.5678)", + LitExpr::Number("1234.5678".parse().expect("serde_json::Number parse error")), ); - assert_eq!( - SubSelection::parse("{ hello world }"), - Ok(( - "", - SubSelection { - selections: vec![ - NamedSelection::Field(None, "hello".to_string(), None), - NamedSelection::Field(None, "world".to_string(), None), - ], - star: None, - }, - )), + check_simple_lit_expr( + "$('hello world')", + LitExpr::String("hello world".to_string()), + ); + check_simple_lit_expr( + "$(\"hello world\")", + LitExpr::String("hello world".to_string()), + ); + check_simple_lit_expr( + "$(\"hello \\\"world\\\"\")", + LitExpr::String("hello \"world\"".to_string()), ); - assert_eq!( - SubSelection::parse("{ hello { world } }"), - Ok(( - "", - SubSelection { - selections: vec![NamedSelection::Field( - None, - "hello".to_string(), - Some(SubSelection { - selections: vec![NamedSelection::Field( - None, - "world".to_string(), - None - ),], - star: None, - }) - ),], - star: None, - }, - )), + check_simple_lit_expr( + "$([1, 2, 3])", + LitExpr::Array( + vec!["1".parse(), "2".parse(), "3".parse()] + .into_iter() + .map(|n| { + LitExpr::Number(n.expect("serde_json::Number parse error")) + .into_with_range() + }) + .collect(), + ), + ); + + check_simple_lit_expr("$({})", LitExpr::Object(IndexMap::default())); + check_simple_lit_expr( + "$({ a: 1, b: 2, c: 3 })", + LitExpr::Object({ + let mut map = IndexMap::default(); + for (key, value) in &[("a", "1"), ("b", "2"), ("c", "3")] { + map.insert( + Key::field(key).into_with_range(), + LitExpr::Number(value.parse().expect("serde_json::Number parse error")) + .into_with_range(), + ); + } + map + }), + ); + + assert_debug_snapshot!( + // Using extra spaces here to make sure the ranges don't + // accidentally include leading/trailing spaces. + selection!(" suffix : results -> slice ( $( - 1 ) -> mul ( $args . suffixLength ) ) ") ); } #[test] - fn test_star_selection() { - assert_eq!( - StarSelection::parse("rest: *"), - Ok(( - "", - StarSelection( - Some(Alias { - name: "rest".to_string(), + fn test_path_methods() { + check_path_selection( + "data.x->or(data.y)", + PathSelection { + path: PathList::Key( + Key::field("data").into_with_range(), + PathList::Key( + Key::field("x").into_with_range(), + PathList::Method( + WithRange::new("or".to_string(), None), + Some(MethodArgs { + args: vec![LitExpr::Path(PathSelection::from_slice( + &[Key::field("data"), Key::field("y")], + None, + )) + .into_with_range()], + ..Default::default() + }), + PathList::Empty.into_with_range(), + ) + .into_with_range(), + ) + .into_with_range(), + ) + .into_with_range(), + }, + ); + + { + fn make_dollar_key_expr(key: &str) -> WithRange { + WithRange::new( + LitExpr::Path(PathSelection { + path: PathList::Var( + KnownVariable::Dollar.into_with_range(), + PathList::Key( + Key::field(key).into_with_range(), + PathList::Empty.into_with_range(), + ) + .into_with_range(), + ) + .into_with_range(), }), - None - ), - )), + None, + ) + } + + let expected = PathSelection { + path: PathList::Key( + Key::field("data").into_with_range(), + PathList::Method( + WithRange::new("query".to_string(), None), + Some(MethodArgs { + args: vec![ + make_dollar_key_expr("a"), + make_dollar_key_expr("b"), + make_dollar_key_expr("c"), + ], + ..Default::default() + }), + PathList::Empty.into_with_range(), + ) + .into_with_range(), + ) + .into_with_range(), + }; + check_path_selection("data->query($.a, $.b, $.c)", expected.clone()); + check_path_selection("data->query($.a, $.b, $.c )", expected.clone()); + check_path_selection("data->query($.a, $.b, $.c,)", expected.clone()); + check_path_selection("data->query($.a, $.b, $.c ,)", expected.clone()); + check_path_selection("data->query($.a, $.b, $.c , )", expected.clone()); + } + + { + let expected = PathSelection { + path: PathList::Key( + Key::field("data").into_with_range(), + PathList::Key( + Key::field("x").into_with_range(), + PathList::Method( + WithRange::new("concat".to_string(), None), + Some(MethodArgs { + args: vec![LitExpr::Array(vec![ + LitExpr::Path(PathSelection::from_slice( + &[Key::field("data"), Key::field("y")], + None, + )) + .into_with_range(), + LitExpr::Path(PathSelection::from_slice( + &[Key::field("data"), Key::field("z")], + None, + )) + .into_with_range(), + ]) + .into_with_range()], + ..Default::default() + }), + PathList::Empty.into_with_range(), + ) + .into_with_range(), + ) + .into_with_range(), + ) + .into_with_range(), + }; + check_path_selection("data.x->concat([data.y, data.z])", expected.clone()); + check_path_selection("data.x->concat([ data.y, data.z ])", expected.clone()); + check_path_selection("data.x->concat([data.y, data.z,])", expected.clone()); + check_path_selection("data.x->concat([data.y, data.z , ])", expected.clone()); + check_path_selection("data.x->concat([data.y, data.z,],)", expected.clone()); + check_path_selection("data.x->concat([data.y, data.z , ] , )", expected.clone()); + } + + check_path_selection( + "data->method([$ { x2: x->times(2) }, $ { y2: y->times(2) }])", + PathSelection { + path: PathList::Key( + Key::field("data").into_with_range(), + PathList::Method( + WithRange::new("method".to_string(), None), + Some(MethodArgs { + args: vec![LitExpr::Array(vec![ + LitExpr::Path(PathSelection { + path: PathList::Var( + KnownVariable::Dollar.into_with_range(), + PathList::Selection( + SubSelection { + selections: vec![NamedSelection::Path { + alias: Some(Alias::new("x2")), + inline: false, + path: PathSelection { + path: PathList::Key( + Key::field("x").into_with_range(), + PathList::Method( + WithRange::new( + "times".to_string(), + None, + ), + Some(MethodArgs { + args: vec![LitExpr::Number( + "2".parse().expect( + "serde_json::Number parse error", + ), + ).into_with_range()], + ..Default::default() + }), + PathList::Empty.into_with_range(), + ) + .into_with_range(), + ) + .into_with_range(), + }, + }], + ..Default::default() + }, + ) + .into_with_range(), + ) + .into_with_range(), + }) + .into_with_range(), + LitExpr::Path(PathSelection { + path: PathList::Var( + KnownVariable::Dollar.into_with_range(), + PathList::Selection( + SubSelection { + selections: vec![NamedSelection::Path { + alias: Some(Alias::new("y2")), + inline: false, + path: PathSelection { + path: PathList::Key( + Key::field("y").into_with_range(), + PathList::Method( + WithRange::new( + "times".to_string(), + None, + ), + Some( + MethodArgs { + args: vec![LitExpr::Number( + "2".parse().expect( + "serde_json::Number parse error", + ), + ).into_with_range()], + ..Default::default() + }, + ), + PathList::Empty.into_with_range(), + ) + .into_with_range(), + ) + .into_with_range(), + }, + }], + ..Default::default() + }, + ) + .into_with_range(), + ) + .into_with_range(), + }) + .into_with_range(), + ]) + .into_with_range()], + ..Default::default() + }), + PathList::Empty.into_with_range(), + ) + .into_with_range(), + ) + .into_with_range(), + }, ); + } - assert_eq!( - StarSelection::parse("*"), - Ok(("", StarSelection(None, None),)), + #[test] + fn test_path_with_subselection() { + assert_debug_snapshot!(selection!( + r#" + choices->first.message { content role } + "# + )); + + assert_debug_snapshot!(selection!( + r#" + id + created + choices->first.message { content role } + model + "# + )); + + assert_debug_snapshot!(selection!( + r#" + id + created + choices->first.message { content role } + model + choices->last.message { lastContent: content } + "# + )); + + assert_debug_snapshot!(JSONSelection::parse( + r#" + id + created + choices->first.message + model + "# + )); + + assert_debug_snapshot!(JSONSelection::parse( + r#" + id: $this.id + $args.input { + title + body + } + "# + )); + + // Like the selection above, this selection produces an output shape + // with id, title, and body all flattened in a top-level object. + assert_debug_snapshot!(JSONSelection::parse( + r#" + $this { id } + $args { $.input { title body } } + "# + )); + + assert_debug_snapshot!(JSONSelection::parse( + r#" + # Equivalent to id: $this.id + $this { id } + + $args { + __typename: $("Args") + + # Using $. instead of just . prevents .input from + # parsing as a key applied to the $("Args") string. + $.input { title body } + + extra + } + + from: $.from + "# + )); + } + + #[test] + fn test_subselection() { + fn check_parsed(input: &str, expected: SubSelection) { + let (remainder, parsed) = SubSelection::parse(new_span(input)).unwrap(); + assert!(span_is_all_spaces_or_comments(remainder)); + assert_eq!(parsed.strip_ranges(), expected); + } + + check_parsed( + " { \n } ", + SubSelection { + selections: vec![], + ..Default::default() + }, ); - assert_eq!( - StarSelection::parse(" * "), - Ok(("", StarSelection(None, None),)), + check_parsed( + "{hello}", + SubSelection { + selections: vec![NamedSelection::Field( + None, + Key::field("hello").into_with_range(), + None, + )], + ..Default::default() + }, ); - assert_eq!( - StarSelection::parse(" * { hello } "), - Ok(( - "", - StarSelection( + check_parsed( + "{ hello }", + SubSelection { + selections: vec![NamedSelection::Field( None, - Some(Box::new(SubSelection { - selections: vec![NamedSelection::Field(None, "hello".to_string(), None),], - star: None, - })) - ), - )), + Key::field("hello").into_with_range(), + None, + )], + ..Default::default() + }, ); - assert_eq!( - StarSelection::parse("hi: * { hello }"), - Ok(( - "", - StarSelection( - Some(Alias { - name: "hi".to_string(), - }), - Some(Box::new(SubSelection { - selections: vec![NamedSelection::Field(None, "hello".to_string(), None),], - star: None, - })) - ), - )), + check_parsed( + " { padded } ", + SubSelection { + selections: vec![NamedSelection::Field( + None, + Key::field("padded").into_with_range(), + None, + )], + ..Default::default() + }, ); - assert_eq!( - StarSelection::parse("alias: * { x y z rest: * }"), - Ok(( - "", - StarSelection( - Some(Alias { - name: "alias".to_string() + check_parsed( + "{ hello world }", + SubSelection { + selections: vec![ + NamedSelection::Field(None, Key::field("hello").into_with_range(), None), + NamedSelection::Field(None, Key::field("world").into_with_range(), None), + ], + ..Default::default() + }, + ); + + check_parsed( + "{ hello { world } }", + SubSelection { + selections: vec![NamedSelection::Field( + None, + Key::field("hello").into_with_range(), + Some(SubSelection { + selections: vec![NamedSelection::Field( + None, + Key::field("world").into_with_range(), + None, + )], + ..Default::default() }), - Some(Box::new(SubSelection { + )], + ..Default::default() + }, + ); + } + + #[test] + fn test_external_var_paths() { + fn parse(input: &str) -> PathSelection { + PathSelection::parse(new_span(input)) + .unwrap() + .1 + .strip_ranges() + } + + { + let sel = selection!( + r#" + $->echo([$args.arg1, $args.arg2, @.items->first]) + "# + ) + .strip_ranges(); + let args_arg1_path = parse("$args.arg1"); + let args_arg2_path = parse("$args.arg2"); + assert_eq!( + sel.external_var_paths(), + vec![&args_arg1_path, &args_arg2_path] + ); + } + { + let sel = selection!( + r#" + $this.kind->match( + ["A", $this.a], + ["B", $this.b], + ["C", $this.c], + [@, @->to_lower_case], + ) + "# + ) + .strip_ranges(); + let this_kind_path = match &sel { + JSONSelection::Path(path) => path, + _ => panic!("Expected PathSelection"), + }; + let this_a_path = parse("$this.a"); + let this_b_path = parse("$this.b"); + let this_c_path = parse("$this.c"); + assert_eq!( + sel.external_var_paths(), + vec![this_kind_path, &this_a_path, &this_b_path, &this_c_path,] + ); + } + { + let sel = selection!( + r#" + data.results->slice($args.start, $args.end) { + id + __typename: $args.type + } + "# + ) + .strip_ranges(); + let start_path = parse("$args.start"); + let end_path = parse("$args.end"); + let args_type_path = parse("$args.type"); + assert_eq!( + sel.external_var_paths(), + vec![&start_path, &end_path, &args_type_path] + ); + } + } + + #[test] + fn test_ranged_locations() { + fn check(input: &str, expected: JSONSelection) { + let parsed = JSONSelection::parse(input).unwrap(); + assert_eq!(parsed, expected); + } + + check( + "hello", + JSONSelection::Named(SubSelection { + selections: vec![NamedSelection::Field( + None, + WithRange::new(Key::field("hello"), Some(0..5)), + None, + )], + range: Some(0..5), + }), + ); + + check( + " hello ", + JSONSelection::Named(SubSelection { + selections: vec![NamedSelection::Field( + None, + WithRange::new(Key::field("hello"), Some(2..7)), + None, + )], + range: Some(2..7), + }), + ); + + check( + " hello { hi name }", + JSONSelection::Named(SubSelection { + selections: vec![NamedSelection::Field( + None, + WithRange::new(Key::field("hello"), Some(2..7)), + Some(SubSelection { selections: vec![ - NamedSelection::Field(None, "x".to_string(), None), - NamedSelection::Field(None, "y".to_string(), None), - NamedSelection::Field(None, "z".to_string(), None), + NamedSelection::Field( + None, + WithRange::new(Key::field("hi"), Some(11..13)), + None, + ), + NamedSelection::Field( + None, + WithRange::new(Key::field("name"), Some(14..18)), + None, + ), ], - star: Some(StarSelection( - Some(Alias { - name: "rest".to_string(), - }), - None - )), - })), + range: Some(9..20), + }), + )], + range: Some(2..20), + }), + ); + + check( + "$args.product.id", + JSONSelection::Path(PathSelection { + path: WithRange::new( + PathList::Var( + WithRange::new(KnownVariable::from(Namespace::Args), Some(0..5)), + WithRange::new( + PathList::Key( + WithRange::new(Key::field("product"), Some(6..13)), + WithRange::new( + PathList::Key( + WithRange::new(Key::field("id"), Some(14..16)), + WithRange::new(PathList::Empty, Some(16..16)), + ), + Some(13..16), + ), + ), + Some(5..16), + ), + ), + Some(0..16), ), - )), + }), ); - assert_eq!( - selection!(" before alias: * { * { a b c } } "), - JSONSelection::Named(SubSelection { - selections: vec![NamedSelection::Field(None, "before".to_string(), None),], - star: Some(StarSelection( - Some(Alias { - name: "alias".to_string() - }), - Some(Box::new(SubSelection { - selections: vec![], - star: Some(StarSelection( - None, - Some(Box::new(SubSelection { - selections: vec![ - NamedSelection::Field(None, "a".to_string(), None), - NamedSelection::Field(None, "b".to_string(), None), - NamedSelection::Field(None, "c".to_string(), None), - ], - star: None, - })) - )), - })), - )), + check( + " $args . product . id ", + JSONSelection::Path(PathSelection { + path: WithRange::new( + PathList::Var( + WithRange::new(KnownVariable::from(Namespace::Args), Some(1..6)), + WithRange::new( + PathList::Key( + WithRange::new(Key::field("product"), Some(9..16)), + WithRange::new( + PathList::Key( + WithRange::new(Key::field("id"), Some(19..21)), + WithRange::new(PathList::Empty, Some(21..21)), + ), + Some(17..21), + ), + ), + Some(7..21), + ), + ), + Some(1..21), + ), }), ); - assert_eq!( - selection!(" before group: { * { a b c } } after "), + check( + "before product:$args.product{id name}after", JSONSelection::Named(SubSelection { selections: vec![ - NamedSelection::Field(None, "before".to_string(), None), - NamedSelection::Group( - Alias { - name: "group".to_string(), - }, - SubSelection { - selections: vec![], - star: Some(StarSelection( - None, - Some(Box::new(SubSelection { - selections: vec![ - NamedSelection::Field(None, "a".to_string(), None), - NamedSelection::Field(None, "b".to_string(), None), - NamedSelection::Field(None, "c".to_string(), None), - ], - star: None, - })) - )), + NamedSelection::Field( + None, + WithRange::new(Key::field("before"), Some(0..6)), + None, + ), + NamedSelection::Path { + alias: Some(Alias { + name: WithRange::new(Key::field("product"), Some(7..14)), + range: Some(7..15), + }), + inline: false, + path: PathSelection { + path: WithRange::new( + PathList::Var( + WithRange::new( + KnownVariable::from(Namespace::Args), + Some(15..20), + ), + WithRange::new( + PathList::Key( + WithRange::new(Key::field("product"), Some(21..28)), + WithRange::new( + PathList::Selection(SubSelection { + selections: vec![ + NamedSelection::Field( + None, + WithRange::new( + Key::field("id"), + Some(29..31), + ), + None, + ), + NamedSelection::Field( + None, + WithRange::new( + Key::field("name"), + Some(32..36), + ), + None, + ), + ], + range: Some(28..37), + }), + Some(28..37), + ), + ), + Some(20..37), + ), + ), + Some(15..37), + ), }, + }, + NamedSelection::Field( + None, + WithRange::new(Key::field("after"), Some(37..42)), + None, ), - NamedSelection::Field(None, "after".to_string(), None), ], - star: None, + range: Some(0..42), }), ); } + + #[test] + fn test_variable_reference_no_path() { + let selection = JSONSelection::parse("$this").unwrap(); + let var_paths = selection.external_var_paths(); + assert_eq!(var_paths.len(), 1); + assert_eq!( + var_paths[0].variable_reference(), + Some(VariableReference { + namespace: VariableNamespace { + namespace: Namespace::This, + location: 0..5 + }, + path: vec![], + location: 0..5, + }) + ); + } + + #[test] + fn test_variable_reference_with_path() { + let selection = JSONSelection::parse("$this.a.b.c").unwrap(); + let var_paths = selection.external_var_paths(); + assert_eq!(var_paths.len(), 1); + assert_eq!( + var_paths[0].variable_reference(), + Some(VariableReference { + namespace: VariableNamespace { + namespace: Namespace::This, + location: 0..5 + }, + path: vec![ + VariablePathPart { + part: "a", + location: 6..7, + }, + VariablePathPart { + part: "b", + location: 8..9, + }, + VariablePathPart { + part: "c", + location: 10..11, + }, + ], + location: 0..11, + }) + ); + } + + #[test] + fn test_variable_reference_nested() { + let selection = JSONSelection::parse("a b { c: $this.x.y.z { d } }").unwrap(); + let var_paths = selection.external_var_paths(); + assert_eq!(var_paths.len(), 1); + assert_eq!( + var_paths[0].variable_reference(), + Some(VariableReference { + namespace: VariableNamespace { + namespace: Namespace::This, + location: 9..14 + }, + path: vec![ + VariablePathPart { + part: "x", + location: 15..16, + }, + VariablePathPart { + part: "y", + location: 17..18, + }, + VariablePathPart { + part: "z", + location: 19..20, + }, + ], + location: 9..20, + }) + ); + } + + #[test] + fn test_external_var_paths_no_variable() { + let selection = JSONSelection::parse("a.b.c").unwrap(); + let var_paths = selection.external_var_paths(); + assert_eq!(var_paths.len(), 0); + } + + #[test] + fn test_parse_unknown_variable() { + assert_eq!( + JSONSelection::parse("a b { c: $foobar.x.y.z { d } }"), + Err(JSONSelectionParseError { + message: "Unknown variable".to_string(), + fragment: "$foobar.x.y.z { d } }".to_string(), + offset: 9, + }) + ); + } } diff --git a/apollo-federation/src/sources/connect/json_selection/pretty.rs b/apollo-federation/src/sources/connect/json_selection/pretty.rs index f6890d635f..5ded294700 100644 --- a/apollo-federation/src/sources/connect/json_selection/pretty.rs +++ b/apollo-federation/src/sources/connect/json_selection/pretty.rs @@ -5,17 +5,29 @@ //! pretty printing trait which is then implemented on the various sub types //! of the JSONSelection tree. +use itertools::Itertools; + +use super::lit_expr::LitExpr; +use super::parser::Alias; +use super::parser::Key; use crate::sources::connect::json_selection::JSONSelection; +use crate::sources::connect::json_selection::MethodArgs; use crate::sources::connect::json_selection::NamedSelection; +use crate::sources::connect::json_selection::PathList; use crate::sources::connect::json_selection::PathSelection; -use crate::sources::connect::json_selection::StarSelection; use crate::sources::connect::json_selection::SubSelection; +impl std::fmt::Display for JSONSelection { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.write_str(&self.pretty_print()) + } +} + /// Pretty print trait /// /// This trait marks a type as supporting pretty printing itself outside of a /// Display implementation, which might be more useful for snapshots. -pub trait PrettyPrintable { +pub(crate) trait PrettyPrintable { /// Pretty print the struct fn pretty_print(&self) -> String { self.pretty_print_with_indentation(true, 0) @@ -35,20 +47,10 @@ fn indent_chars(indent: usize) -> String { impl PrettyPrintable for JSONSelection { fn pretty_print_with_indentation(&self, inline: bool, indentation: usize) -> String { - let mut result = String::new(); - match self { - JSONSelection::Named(named) => { - let named = named.pretty_print_with_indentation(inline, indentation); - result.push_str(named.as_str()); - } - JSONSelection::Path(path) => { - let path = path.pretty_print_with_indentation(inline, indentation); - result.push_str(path.as_str()); - } - }; - - result + JSONSelection::Named(named) => named.print_subselections(indentation), + JSONSelection::Path(path) => path.pretty_print_with_indentation(inline, indentation), + } } } @@ -63,18 +65,9 @@ impl PrettyPrintable for SubSelection { result.push_str("{\n"); - for selection in &self.selections { - let selection = selection.pretty_print_with_indentation(false, indentation + 1); - result.push_str(selection.as_str()); - result.push('\n'); - } - - if let Some(star) = self.star.as_ref() { - let star = star.pretty_print_with_indentation(false, indentation + 1); - result.push_str(star.as_str()); - result.push('\n'); - } + result.push_str(&self.print_subselections(indentation + 1)); + result.push('\n'); result.push_str(indent.as_str()); result.push('}'); @@ -82,7 +75,38 @@ impl PrettyPrintable for SubSelection { } } +impl SubSelection { + /// Prints all of the selections in a subselection + fn print_subselections(&self, indentation: usize) -> String { + self.selections + .iter() + .map(|s| s.pretty_print_with_indentation(false, indentation)) + .join("\n") + } +} + impl PrettyPrintable for PathSelection { + fn pretty_print_with_indentation(&self, inline: bool, indentation: usize) -> String { + let inner = self.path.pretty_print_with_indentation(inline, indentation); + // Because we can't tell where PathList::Key elements appear in the path + // once we're inside PathList::pretty_print_with_indentation, we print + // all PathList::Key elements with a leading '.' character, but we + // remove the initial '.' if the path has more than one element, because + // then the leading '.' is not necessary to disambiguate the key from a + // field. To complicate matters further, inner may begin with spaces due + // to indentation. + let leading_space_count = inner.chars().take_while(|c| *c == ' ').count(); + let suffix = inner[leading_space_count..].to_string(); + if suffix.starts_with('.') && !self.path.is_single_key() { + // Strip the '.' but keep any leading spaces. + format!("{}{}", " ".repeat(leading_space_count), &suffix[1..]) + } else { + inner + } + } +} + +impl PrettyPrintable for PathList { fn pretty_print_with_indentation(&self, inline: bool, indentation: usize) -> String { let mut result = String::new(); @@ -91,29 +115,54 @@ impl PrettyPrintable for PathSelection { } match self { - PathSelection::Var(var, path) => { - let rest = path.pretty_print_with_indentation(true, indentation); + Self::Var(var, tail) => { + let rest = tail.pretty_print_with_indentation(true, indentation); result.push_str(var.as_str()); result.push_str(rest.as_str()); } - PathSelection::Key(key, path) => { - let rest = path.pretty_print_with_indentation(true, indentation); - result.push_str(key.dotted().as_str()); + Self::Key(key, tail) => { + result.push('.'); + result.push_str(key.pretty_print().as_str()); + let rest = tail.pretty_print_with_indentation(true, indentation); + result.push_str(rest.as_str()); + } + Self::Expr(expr, tail) => { + let rest = tail.pretty_print_with_indentation(true, indentation); + result.push_str("$("); + result.push_str( + expr.pretty_print_with_indentation(true, indentation) + .as_str(), + ); + result.push(')'); result.push_str(rest.as_str()); } - PathSelection::Selection(sub) => { + Self::Method(method, args, tail) => { + result.push_str("->"); + result.push_str(method.as_str()); + if let Some(args) = args { + result.push_str( + args.pretty_print_with_indentation(true, indentation) + .as_str(), + ); + } + result.push_str( + tail.pretty_print_with_indentation(true, indentation) + .as_str(), + ); + } + Self::Selection(sub) => { let sub = sub.pretty_print_with_indentation(true, indentation); result.push(' '); result.push_str(sub.as_str()); } - PathSelection::Empty => {} + Self::Empty => {} } result } } -impl PrettyPrintable for NamedSelection { +impl PrettyPrintable for MethodArgs { fn pretty_print_with_indentation(&self, inline: bool, indentation: usize) -> String { let mut result = String::new(); @@ -121,28 +170,122 @@ impl PrettyPrintable for NamedSelection { result.push_str(indent_chars(indentation).as_str()); } + result.push('('); + + // TODO Break long argument lists across multiple lines, with indentation? + for (i, arg) in self.args.iter().enumerate() { + if i > 0 { + result.push_str(", "); + } + result.push_str( + arg.pretty_print_with_indentation(true, indentation) + .as_str(), + ); + } + + result.push(')'); + + result + } +} + +impl PrettyPrintable for LitExpr { + fn pretty_print_with_indentation(&self, inline: bool, indentation: usize) -> String { + let mut result = String::new(); + if !inline { + result.push_str(indent_chars(indentation).as_str()); + } + match self { - NamedSelection::Field(alias, field_name, sub) => { - if let Some(alias) = alias { - result.push_str(alias.name.as_str()); + Self::String(s) => { + let safely_quoted = serde_json_bytes::Value::String(s.clone().into()).to_string(); + result.push_str(safely_quoted.as_str()); + } + Self::Number(n) => result.push_str(n.to_string().as_str()), + Self::Bool(b) => result.push_str(b.to_string().as_str()), + Self::Null => result.push_str("null"), + Self::Object(map) => { + result.push('{'); + + if !map.is_empty() { + if inline { + result.push(' '); + } else { + result.push('\n'); + } + } + + let mut is_first = true; + for (key, value) in map { + if is_first { + is_first = false; + } else if inline { + result.push_str(", "); + } else { + result.push_str(",\n"); + } + result.push_str(key.pretty_print().as_str()); result.push_str(": "); + result.push_str( + value + .pretty_print_with_indentation(true, indentation + 1) + .as_str(), + ); + } + + if !map.is_empty() { + if inline { + result.push(' '); + } else { + result.push('\n'); + } } - result.push_str(field_name.as_str()); + result.push('}'); + } + Self::Array(vec) => { + result.push('['); + let mut is_first = true; + for value in vec { + if is_first { + is_first = false; + } else { + result.push_str(", "); + } + result.push_str( + value + .pretty_print_with_indentation(true, indentation) + .as_str(), + ); + } + result.push(']'); + } + Self::Path(path) => { + let path = path.pretty_print_with_indentation(inline, indentation); + result.push_str(path.as_str()); + } + } - if let Some(sub) = sub { - let sub = sub.pretty_print_with_indentation(true, indentation); + result + } +} + +impl PrettyPrintable for NamedSelection { + fn pretty_print_with_indentation(&self, inline: bool, indentation: usize) -> String { + let mut result = String::new(); + + if !inline { + result.push_str(indent_chars(indentation).as_str()); + } + + match self { + Self::Field(alias, field_key, sub) => { + if let Some(alias) = alias { + result.push_str(alias.pretty_print().as_str()); result.push(' '); - result.push_str(sub.as_str()); } - } - NamedSelection::Quoted(alias, literal, sub) => { - result.push_str(alias.name.as_str()); - result.push_str(": "); - let safely_quoted = - serde_json_bytes::Value::String(literal.clone().into()).to_string(); - result.push_str(safely_quoted.as_str()); + result.push_str(field_key.pretty_print().as_str()); if let Some(sub) = sub { let sub = sub.pretty_print_with_indentation(true, indentation); @@ -150,16 +293,24 @@ impl PrettyPrintable for NamedSelection { result.push_str(sub.as_str()); } } - NamedSelection::Path(alias, path) => { - result.push_str(alias.name.as_str()); - result.push_str(": "); - + Self::Path { + inline, + alias, + path, + } => { + if *inline { + result.push_str("... "); + } + if let Some(alias) = alias { + result.push_str(alias.pretty_print().as_str()); + result.push(' '); + } let path = path.pretty_print_with_indentation(true, indentation); result.push_str(path.trim_start()); } - NamedSelection::Group(alias, sub) => { - result.push_str(alias.name.as_str()); - result.push_str(": "); + Self::Group(alias, sub) => { + result.push_str(alias.pretty_print().as_str()); + result.push(' '); let sub = sub.pretty_print_with_indentation(true, indentation); result.push_str(sub.as_str()); @@ -170,7 +321,7 @@ impl PrettyPrintable for NamedSelection { } } -impl PrettyPrintable for StarSelection { +impl PrettyPrintable for Alias { fn pretty_print_with_indentation(&self, inline: bool, indentation: usize) -> String { let mut result = String::new(); @@ -178,29 +329,30 @@ impl PrettyPrintable for StarSelection { result.push_str(indent_chars(indentation).as_str()); } - if let Some(alias) = self.0.as_ref() { - result.push_str(alias.name.as_str()); - result.push_str(": "); - } + let name = self.name.pretty_print_with_indentation(true, indentation); + result.push_str(name.as_str()); + result.push(':'); - result.push('*'); + result + } +} - if let Some(sub) = self.1.as_ref() { - let sub = sub.pretty_print_with_indentation(true, indentation); - result.push(' '); - result.push_str(sub.as_str()); +impl PrettyPrintable for Key { + fn pretty_print_with_indentation(&self, _inline: bool, _indentation: usize) -> String { + match self { + Self::Field(name) => name.clone(), + Self::Quoted(name) => serde_json_bytes::Value::String(name.as_str().into()).to_string(), } - - result } } #[cfg(test)] mod tests { + use crate::sources::connect::json_selection::location::new_span; use crate::sources::connect::json_selection::pretty::indent_chars; use crate::sources::connect::json_selection::NamedSelection; use crate::sources::connect::json_selection::PrettyPrintable; - use crate::sources::connect::json_selection::StarSelection; + use crate::sources::connect::JSONSelection; use crate::sources::connect::PathSelection; use crate::sources::connect::SubSelection; @@ -221,7 +373,7 @@ mod tests { let prettified_inline = selection.pretty_print_with_indentation(true, indentation); assert_eq!( - prettified_inline, + prettified_inline.trim_start(), expected_indented.trim_start(), "pretty printing inline did not match: {prettified_inline} != {}", expected_indented.trim_start() @@ -234,22 +386,6 @@ mod tests { ); } - #[test] - fn it_prints_a_star_selection() { - let (unmatched, star_selection) = StarSelection::parse("rest: *").unwrap(); - assert!(unmatched.is_empty()); - - test_permutations(star_selection, "rest: *"); - } - - #[test] - fn it_prints_a_star_selection_with_subselection() { - let (unmatched, star_selection) = StarSelection::parse("rest: * { a b }").unwrap(); - assert!(unmatched.is_empty()); - - test_permutations(star_selection, "rest: * {\n a\n b\n}"); - } - #[test] fn it_prints_a_named_selection() { let selections = [ @@ -258,7 +394,7 @@ mod tests { "cool: beans", "cool: beans {\n whoa\n}", // Path - "cool: .one.two.three", + "cool: one.two.three", // Quoted r#"cool: "b e a n s""#, "cool: \"b e a n s\" {\n a\n b\n}", @@ -266,7 +402,7 @@ mod tests { "cool: {\n a\n b\n}", ]; for selection in selections { - let (unmatched, named_selection) = NamedSelection::parse(selection).unwrap(); + let (unmatched, named_selection) = NamedSelection::parse(new_span(selection)).unwrap(); assert!( unmatched.is_empty(), "static named selection was not fully parsed: '{selection}' ({named_selection:?}) had unmatched '{unmatched}'" @@ -282,14 +418,21 @@ mod tests { // Var "$.one.two.three", "$this.a.b", - "$id.first {\n username\n}", + "$this.id.first {\n username\n}", // Key - ".first", - ".a.b.c.d.e", - ".one.two.three {\n a\n b\n}", + "$.first", + "a.b.c.d.e", + "one.two.three {\n a\n b\n}", + "$.single {\n x\n}", + "results->slice($(-1)->mul($args.suffixLength))", + "$(1234)->add($(5678)->mul(2))", + "$(true)->and($(false)->not)", + "$(12345678987654321)->div(111111111)->eq(111111111)", + "$(\"Product\")->slice(0, $(4)->mul(-1))->eq(\"Pro\")", + "$($args.unnecessary.parens)->eq(42)", ]; for path in paths { - let (unmatched, path_selection) = PathSelection::parse(path).unwrap(); + let (unmatched, path_selection) = PathSelection::parse(new_span(path)).unwrap(); assert!( unmatched.is_empty(), "static path was not fully parsed: '{path}' ({path_selection:?}) had unmatched '{unmatched}'" @@ -302,7 +445,7 @@ mod tests { #[test] fn it_prints_a_sub_selection() { let sub = "{\n a\n b\n}"; - let (unmatched, sub_selection) = SubSelection::parse(sub).unwrap(); + let (unmatched, sub_selection) = SubSelection::parse(new_span(sub)).unwrap(); assert!( unmatched.is_empty(), "static path was not fully parsed: '{sub}' ({sub_selection:?}) had unmatched '{unmatched}'" @@ -323,7 +466,8 @@ mod tests { let sub_indented = "{\n a {\n b {\n c\n }\n }\n}"; let sub_super_indented = " {\n a {\n b {\n c\n }\n }\n }"; - let (unmatched, sub_selection) = SubSelection::parse(sub).unwrap(); + let (unmatched, sub_selection) = SubSelection::parse(new_span(sub)).unwrap(); + assert!( unmatched.is_empty(), "static nested sub was not fully parsed: '{sub}' ({sub_selection:?}) had unmatched '{unmatched}'" @@ -349,4 +493,10 @@ mod tests { "nested inline sub pretty printing did not match: {pretty} != {sub_super_indented}", ); } + + #[test] + fn it_prints_root_selection() { + let root_selection = JSONSelection::parse("id name").unwrap(); + test_permutations(root_selection, "id\nname"); + } } diff --git a/apollo-federation/src/sources/connect/json_selection/selection_set.rs b/apollo-federation/src/sources/connect/json_selection/selection_set.rs new file mode 100644 index 0000000000..5721e704c8 --- /dev/null +++ b/apollo-federation/src/sources/connect/json_selection/selection_set.rs @@ -0,0 +1,668 @@ +//! Functions for applying a [`SelectionSet`] to a [`JSONSelection`]. This creates a new +//! `JSONSelection` mapping to the fields on the selection set, and excluding parts of the +//! original `JSONSelection` that are not needed by the selection set. + +#![cfg_attr( + not(test), + deny( + clippy::exit, + clippy::panic, + clippy::unwrap_used, + clippy::expect_used, + clippy::indexing_slicing, + clippy::unimplemented, + clippy::todo, + missing_docs + ) +)] + +use apollo_compiler::executable::Field; +use apollo_compiler::executable::Selection; +use apollo_compiler::executable::SelectionSet; +use apollo_compiler::ExecutableDocument; +use apollo_compiler::Node; +use multimap::MultiMap; + +use super::known_var::KnownVariable; +use super::lit_expr::LitExpr; +use super::location::Ranged; +use super::location::WithRange; +use super::parser::MethodArgs; +use super::parser::PathList; +use crate::sources::connect::json_selection::Alias; +use crate::sources::connect::json_selection::NamedSelection; +use crate::sources::connect::JSONSelection; +use crate::sources::connect::PathSelection; +use crate::sources::connect::SubSelection; + +impl JSONSelection { + /// Apply a selection set to create a new [`JSONSelection`] + pub fn apply_selection_set( + &self, + document: &ExecutableDocument, + selection_set: &SelectionSet, + ) -> Self { + match self { + Self::Named(sub) => Self::Named(sub.apply_selection_set(document, selection_set)), + Self::Path(path) => Self::Path(path.apply_selection_set(document, selection_set)), + } + } +} + +impl SubSelection { + /// Apply a selection set to create a new [`SubSelection`] + pub fn apply_selection_set( + &self, + document: &ExecutableDocument, + selection_set: &SelectionSet, + ) -> Self { + let mut new_selections = Vec::new(); + let field_map = map_fields_by_name(document, selection_set); + + // When the operation contains __typename, it might be used to complete + // an entity reference (e.g. `__typename id`) for a subsequent fetch. + // This encodes the typename selection as `__typename: $->echo("Product")` + // + // TODO: this must change before we support interfaces and unions + // because it will emit the abstract type's name which is invalid. + if field_map.contains_key("__typename") { + new_selections.push(NamedSelection::Path { + alias: Some(Alias::new("__typename")), + path: PathSelection { + path: WithRange::new( + PathList::Var( + WithRange::new(KnownVariable::Dollar, None), + WithRange::new( + PathList::Method( + WithRange::new("echo".to_string(), None), + Some(MethodArgs { + args: vec![WithRange::new( + LitExpr::String(selection_set.ty.to_string()), + None, + )], + ..Default::default() + }), + WithRange::new(PathList::Empty, None), + ), + None, + ), + ), + None, + ), + }, + inline: false, + }); + } + + for selection in &self.selections { + match selection { + NamedSelection::Field(alias, name, sub) => { + let key = alias + .as_ref() + .map(|a| a.name.as_str()) + .unwrap_or(name.as_str()); + if let Some(fields) = field_map.get_vec(key) { + for field in fields { + let field_response_key = field.response_key().as_str(); + new_selections.push(NamedSelection::Field( + if field_response_key == name.as_str() { + None + } else { + Some(Alias::new(field_response_key)) + }, + name.clone(), + sub.as_ref().map(|sub| { + sub.apply_selection_set(document, &field.selection_set) + }), + )); + } + } + } + NamedSelection::Path { + alias, + path: path_selection, + inline, + } => { + let inline = *inline; + if let Some(key) = alias.as_ref().map(|a| a.name.as_str()) { + // If the NamedSelection::Path has an alias (meaning + // it's a NamedPathSelection according to the grammar), + // use the alias name to look up the corresponding + // fields in the selection set. + if let Some(fields) = field_map.get_vec(key) { + for field in fields { + new_selections.push(NamedSelection::Path { + alias: Some(Alias::new(field.response_key().as_str())), + path: path_selection + .apply_selection_set(document, &field.selection_set), + inline, + }); + } + } + } else { + // If the NamedSelection::Path has no alias (meaning + // it's a PathWithSubSelection according to the + // grammar), apply the selection set to the path and add + // the new PathWithSubSelection to the new_selections. + new_selections.push(NamedSelection::Path { + alias: None, + path: path_selection.apply_selection_set(document, selection_set), + inline, + }); + } + } + NamedSelection::Group(alias, sub) => { + let key = alias.name.as_str(); + if let Some(fields) = field_map.get_vec(key) { + for field in fields { + new_selections.push(NamedSelection::Group( + Alias::new(field.response_key().as_str()), + sub.apply_selection_set(document, &field.selection_set), + )); + } + } + } + } + } + + Self { + selections: new_selections, + // Keep the old range even though it may be inaccurate after the + // removal of selections, since it still indicates where the + // original SubSelection came from. + range: self.range.clone(), + } + } +} + +impl PathSelection { + /// Apply a selection set to create a new [`PathSelection`] + pub fn apply_selection_set( + &self, + document: &ExecutableDocument, + selection_set: &SelectionSet, + ) -> Self { + Self { + path: WithRange::new( + self.path.apply_selection_set(document, selection_set), + self.path.range(), + ), + } + } +} + +impl PathList { + pub(crate) fn apply_selection_set( + &self, + document: &ExecutableDocument, + selection_set: &SelectionSet, + ) -> Self { + match self { + Self::Var(name, path) => Self::Var( + name.clone(), + WithRange::new( + path.apply_selection_set(document, selection_set), + path.range(), + ), + ), + Self::Key(key, path) => Self::Key( + key.clone(), + WithRange::new( + path.apply_selection_set(document, selection_set), + path.range(), + ), + ), + Self::Expr(expr, path) => Self::Expr( + expr.clone(), + WithRange::new( + path.apply_selection_set(document, selection_set), + path.range(), + ), + ), + Self::Method(method_name, args, path) => Self::Method( + method_name.clone(), + args.clone(), + WithRange::new( + path.apply_selection_set(document, selection_set), + path.range(), + ), + ), + Self::Selection(sub) => { + Self::Selection(sub.apply_selection_set(document, selection_set)) + } + Self::Empty => Self::Empty, + } + } +} + +fn map_fields_by_name<'a>( + document: &'a ExecutableDocument, + set: &'a SelectionSet, +) -> MultiMap> { + let mut map = MultiMap::new(); + map_fields_by_name_impl(document, set, &mut map); + map +} + +fn map_fields_by_name_impl<'a>( + document: &'a ExecutableDocument, + set: &'a SelectionSet, + map: &mut MultiMap>, +) { + for selection in &set.selections { + match selection { + Selection::Field(field) => { + map.insert(field.name.to_string(), field); + } + Selection::FragmentSpread(f) => { + if let Some(fragment) = f.fragment_def(document) { + map_fields_by_name_impl(document, &fragment.selection_set, map); + } + } + Selection::InlineFragment(fragment) => { + map_fields_by_name_impl(document, &fragment.selection_set, map); + } + } + } +} + +#[cfg(test)] +mod tests { + use apollo_compiler::executable::SelectionSet; + use apollo_compiler::validation::Valid; + use apollo_compiler::ExecutableDocument; + use apollo_compiler::Schema; + use pretty_assertions::assert_eq; + + fn selection_set(schema: &Valid, s: &str) -> (ExecutableDocument, SelectionSet) { + let document = ExecutableDocument::parse_and_validate(schema, s, "./").unwrap(); + let selection_set = document + .operations + .anonymous + .as_ref() + .unwrap() + .selection_set + .fields() + .next() + .unwrap() + .selection_set + .clone(); + (document.into_inner(), selection_set) + } + + #[test] + fn test() { + let json = super::JSONSelection::parse( + r###" + $.result { + a + b: c + d: e.f + g + h: 'i-j' + k: { l m: n } + } + "###, + ) + .unwrap(); + + let schema = Schema::parse_and_validate( + r###" + type Query { + t: T + } + + type T { + a: String + b: String + d: String + g: String + h: String + k: K + } + + type K { + l: String + m: String + } + "###, + "./", + ) + .unwrap(); + + let (document, selection_set) = selection_set( + &schema, + "{ t { z: a, y: b, x: d, w: h v: k { u: l t: m } } }", + ); + + let transformed = json.apply_selection_set(&document, &selection_set); + assert_eq!( + transformed.to_string(), + r###"$.result { + z: a + y: c + x: e.f + w: "i-j" + v: { + u: l + t: n + } +}"### + ); + } + + #[test] + fn test_star() { + let json_selection = super::JSONSelection::parse( + r###" + $.result { + a + b_alias: b + c { + d + e_alias: e + h: "h" + i: "i" + group: { + j + k + } + } + path_to_f: c.f + } + "###, + ) + .unwrap(); + + let schema = Schema::parse_and_validate( + r###" + type Query { + t: T + } + + type T { + a: String + b_alias: String + c: C + path_to_f: String + } + + type C { + d: String + e_alias: String + h: String + i: String + group: Group + } + + type Group { + j: String + k: String + } + "###, + "./", + ) + .unwrap(); + + let (document, selection_set) = selection_set( + &schema, + "{ t { a b_alias c { e: e_alias h group { j } } path_to_f } }", + ); + + let transformed = json_selection.apply_selection_set(&document, &selection_set); + assert_eq!( + transformed.to_string(), + r###"$.result { + a + b_alias: b + c { + e + "h" + group: { + j + } + } + path_to_f: c.f +}"### + ); + + let data = serde_json_bytes::json!({ + "result": { + "a": "a", + "b": "b", + "c": { + "d": "d", + "e": "e", + "f": "f", + "g": "g", + "h": "h", + "i": "i", + "j": "j", + "k": "k", + }, + } + }); + let result = transformed.apply_to(&data); + assert_eq!( + result, + ( + Some(serde_json_bytes::json!( + { + "a": "a", + "b_alias": "b", + "c": { + "e": "e", + "h": "h", + "group": { + "j": "j" + }, + }, + "path_to_f": "f", + })), + vec![] + ) + ); + } + + #[test] + fn test_depth() { + let json = super::JSONSelection::parse( + r###" + $.result { + a { + b { + renamed: c + } + } + } + "###, + ) + .unwrap(); + + let schema = Schema::parse_and_validate( + r###" + type Query { + t: T + } + + type T { + a: A + } + + type A { + b: B + } + + type B { + renamed: String + } + "###, + "./", + ) + .unwrap(); + + let (document, selection_set) = selection_set(&schema, "{ t { a { b { renamed } } } }"); + + let transformed = json.apply_selection_set(&document, &selection_set); + assert_eq!( + transformed.to_string(), + r###"$.result { + a { + b { + renamed: c + } + } +}"### + ); + + let data = serde_json_bytes::json!({ + "result": { + "a": { + "b": { + "c": "c", + } + } + } + } + ); + let result = transformed.apply_to(&data); + assert_eq!( + result, + ( + Some(serde_json_bytes::json!({"a": { "b": { "renamed": "c" } } } )), + vec![] + ) + ); + } + + #[test] + fn test_typename() { + let json = super::JSONSelection::parse( + r###" + $.result { + id + author: { + id: authorId + } + } + "###, + ) + .unwrap(); + + let schema = Schema::parse_and_validate( + r###" + type Query { + t: T + } + + type T { + id: ID + author: A + } + + type A { + id: ID + } + "###, + "./", + ) + .unwrap(); + + let (document, selection_set) = + selection_set(&schema, "{ t { id __typename author { __typename id } } }"); + + let transformed = json.apply_selection_set(&document, &selection_set); + assert_eq!( + transformed.to_string(), + r###"$.result { + __typename: $->echo("T") + id + author: { + __typename: $->echo("A") + id: authorId + } +}"### + ); + } + + #[test] + fn test_fragments() { + let json = super::JSONSelection::parse( + r###" + reviews: result { + id + product: { upc: product_upc } + author: { id: author_id } + } + "###, + ) + .unwrap(); + + let schema = Schema::parse_and_validate( + r###" + type Query { + _entities(representations: [_Any!]!): [_Entity] + } + + scalar _Any + + union _Entity = Product + + type Product { + upc: String + reviews: [Review] + } + + type Review { + id: ID + product: Product + author: User + } + + type User { + id: ID + } + "###, + "./", + ) + .unwrap(); + + let (document, selection_set) = selection_set( + &schema, + "query ($representations: [_Any!]!) { + _entities(representations: $representations) { + ..._generated_onProduct1_0 + } + } + fragment _generated_onProduct1_0 on Product { + reviews { + id + product { + __typename + upc + } + author { + __typename + id + } + } + }", + ); + + let transformed = json.apply_selection_set(&document, &selection_set); + assert_eq!( + transformed.to_string(), + r###"reviews: result { + id + product: { + __typename: $->echo("Product") + upc: product_upc + } + author: { + __typename: $->echo("User") + id: author_id + } +}"### + ); + } +} diff --git a/apollo-federation/src/sources/connect/json_selection/snapshots/apollo_federation__sources__connect__json_selection__location__tests__arrow_path_ranges.snap b/apollo-federation/src/sources/connect/json_selection/snapshots/apollo_federation__sources__connect__json_selection__location__tests__arrow_path_ranges.snap new file mode 100644 index 0000000000..961cd69f44 --- /dev/null +++ b/apollo-federation/src/sources/connect/json_selection/snapshots/apollo_federation__sources__connect__json_selection__location__tests__arrow_path_ranges.snap @@ -0,0 +1,82 @@ +--- +source: apollo-federation/src/sources/connect/json_selection/location.rs +expression: parsed +--- +Named( + SubSelection { + selections: [ + Path { + alias: Some( + Alias { + name: WithRange { + node: Field( + "__typename", + ), + range: Some( + 2..12, + ), + }, + range: Some( + 2..13, + ), + }, + ), + inline: false, + path: PathSelection { + path: WithRange { + node: Var( + WithRange { + node: @, + range: Some( + 14..15, + ), + }, + WithRange { + node: Method( + WithRange { + node: "echo", + range: Some( + 19..23, + ), + }, + Some( + MethodArgs { + args: [ + WithRange { + node: String( + "Frog", + ), + range: Some( + 26..32, + ), + }, + ], + range: Some( + 24..36, + ), + }, + ), + WithRange { + node: Empty, + range: Some( + 36..36, + ), + }, + ), + range: Some( + 16..36, + ), + }, + ), + range: Some( + 14..36, + ), + }, + }, + }, + ], + range: Some( + 2..36, + ), + }, +) diff --git a/apollo-federation/src/sources/connect/json_selection/snapshots/apollo_federation__sources__connect__json_selection__location__tests__parse_with_range_snapshots.snap b/apollo-federation/src/sources/connect/json_selection/snapshots/apollo_federation__sources__connect__json_selection__location__tests__parse_with_range_snapshots.snap new file mode 100644 index 0000000000..5b988af8b5 --- /dev/null +++ b/apollo-federation/src/sources/connect/json_selection/snapshots/apollo_federation__sources__connect__json_selection__location__tests__parse_with_range_snapshots.snap @@ -0,0 +1,468 @@ +--- +source: apollo-federation/src/sources/connect/json_selection/location.rs +expression: "format!(\"{:#?}\", parsed)" +--- +Named( + SubSelection { + selections: [ + Path { + alias: Some( + Alias { + name: WithRange { + node: Field( + "path", + ), + range: Some( + 9..13, + ), + }, + range: Some( + 9..14, + ), + }, + ), + inline: false, + path: PathSelection { + path: WithRange { + node: Key( + WithRange { + node: Field( + "some", + ), + range: Some( + 15..19, + ), + }, + WithRange { + node: Key( + WithRange { + node: Field( + "nested", + ), + range: Some( + 20..26, + ), + }, + WithRange { + node: Key( + WithRange { + node: Field( + "path", + ), + range: Some( + 27..31, + ), + }, + WithRange { + node: Selection( + SubSelection { + selections: [ + Field( + None, + WithRange { + node: Field( + "isbn", + ), + range: Some( + 34..38, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "author", + ), + range: Some( + 39..45, + ), + }, + Some( + SubSelection { + selections: [ + Field( + None, + WithRange { + node: Field( + "name", + ), + range: Some( + 48..52, + ), + }, + None, + ), + ], + range: Some( + 46..54, + ), + }, + ), + ), + ], + range: Some( + 32..55, + ), + }, + ), + range: Some( + 32..55, + ), + }, + ), + range: Some( + 26..55, + ), + }, + ), + range: Some( + 19..55, + ), + }, + ), + range: Some( + 15..55, + ), + }, + }, + }, + Field( + Some( + Alias { + name: WithRange { + node: Field( + "alias", + ), + range: Some( + 64..69, + ), + }, + range: Some( + 64..70, + ), + }, + ), + WithRange { + node: Quoted( + "not an identifier", + ), + range: Some( + 71..90, + ), + }, + Some( + SubSelection { + selections: [ + Path { + alias: Some( + Alias { + name: WithRange { + node: Field( + "__typename", + ), + range: Some( + 151..161, + ), + }, + range: Some( + 151..162, + ), + }, + ), + inline: false, + path: PathSelection { + path: WithRange { + node: Var( + WithRange { + node: @, + range: Some( + 163..164, + ), + }, + WithRange { + node: Method( + WithRange { + node: "echo", + range: Some( + 166..170, + ), + }, + Some( + MethodArgs { + args: [ + WithRange { + node: String( + "Frog", + ), + range: Some( + 172..178, + ), + }, + ], + range: Some( + 170..182, + ), + }, + ), + WithRange { + node: Empty, + range: Some( + 182..182, + ), + }, + ), + range: Some( + 164..182, + ), + }, + ), + range: Some( + 163..182, + ), + }, + }, + }, + Path { + alias: Some( + Alias { + name: WithRange { + node: Field( + "wrapped", + ), + range: Some( + 195..202, + ), + }, + range: Some( + 195..203, + ), + }, + ), + inline: false, + path: PathSelection { + path: WithRange { + node: Var( + WithRange { + node: $, + range: Some( + 204..205, + ), + }, + WithRange { + node: Method( + WithRange { + node: "echo", + range: Some( + 207..211, + ), + }, + Some( + MethodArgs { + args: [ + WithRange { + node: Object( + { + WithRange { + node: Field( + "wrapped", + ), + range: Some( + 214..221, + ), + }: WithRange { + node: Path( + PathSelection { + path: WithRange { + node: Var( + WithRange { + node: @, + range: Some( + 224..225, + ), + }, + WithRange { + node: Empty, + range: Some( + 225..225, + ), + }, + ), + range: Some( + 224..225, + ), + }, + }, + ), + range: Some( + 224..225, + ), + }, + }, + ), + range: Some( + 212..229, + ), + }, + ], + range: Some( + 211..230, + ), + }, + ), + WithRange { + node: Empty, + range: Some( + 230..230, + ), + }, + ), + range: Some( + 205..230, + ), + }, + ), + range: Some( + 204..230, + ), + }, + }, + }, + Group( + Alias { + name: WithRange { + node: Field( + "group", + ), + range: Some( + 243..248, + ), + }, + range: Some( + 243..249, + ), + }, + SubSelection { + selections: [ + Field( + None, + WithRange { + node: Field( + "a", + ), + range: Some( + 252..253, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "b", + ), + range: Some( + 254..255, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "c", + ), + range: Some( + 256..257, + ), + }, + None, + ), + ], + range: Some( + 250..259, + ), + }, + ), + Path { + alias: Some( + Alias { + name: WithRange { + node: Field( + "arg", + ), + range: Some( + 272..275, + ), + }, + range: Some( + 272..276, + ), + }, + ), + inline: false, + path: PathSelection { + path: WithRange { + node: Var( + WithRange { + node: $args, + range: Some( + 277..282, + ), + }, + WithRange { + node: Key( + WithRange { + node: Field( + "arg", + ), + range: Some( + 285..288, + ), + }, + WithRange { + node: Empty, + range: Some( + 288..288, + ), + }, + ), + range: Some( + 283..288, + ), + }, + ), + range: Some( + 277..288, + ), + }, + }, + }, + Field( + None, + WithRange { + node: Field( + "field", + ), + range: Some( + 301..306, + ), + }, + None, + ), + ], + range: Some( + 91..316, + ), + }, + ), + ), + ], + range: Some( + 9..316, + ), + }, +) diff --git a/apollo-federation/src/sources/connect/json_selection/snapshots/apollo_federation__sources__connect__json_selection__parser__tests__error_snapshots-2.snap b/apollo-federation/src/sources/connect/json_selection/snapshots/apollo_federation__sources__connect__json_selection__parser__tests__error_snapshots-2.snap new file mode 100644 index 0000000000..31ee30f07f --- /dev/null +++ b/apollo-federation/src/sources/connect/json_selection/snapshots/apollo_federation__sources__connect__json_selection__parser__tests__error_snapshots-2.snap @@ -0,0 +1,11 @@ +--- +source: apollo-federation/src/sources/connect/json_selection/parser.rs +expression: "JSONSelection::parse(\"$bogus\")" +--- +Err( + JSONSelectionParseError { + message: "Unknown variable", + fragment: "$bogus", + offset: 0, + }, +) diff --git a/apollo-federation/src/sources/connect/json_selection/snapshots/apollo_federation__sources__connect__json_selection__parser__tests__error_snapshots-3.snap b/apollo-federation/src/sources/connect/json_selection/snapshots/apollo_federation__sources__connect__json_selection__parser__tests__error_snapshots-3.snap new file mode 100644 index 0000000000..ca851bb285 --- /dev/null +++ b/apollo-federation/src/sources/connect/json_selection/snapshots/apollo_federation__sources__connect__json_selection__parser__tests__error_snapshots-3.snap @@ -0,0 +1,11 @@ +--- +source: apollo-federation/src/sources/connect/json_selection/parser.rs +expression: "JSONSelection::parse(\"id $.object\")" +--- +Err( + JSONSelectionParseError { + message: "Named path selection must either begin with alias or ..., or end with subselection", + fragment: "$.object", + offset: 3, + }, +) diff --git a/apollo-federation/src/sources/connect/json_selection/snapshots/apollo_federation__sources__connect__json_selection__parser__tests__error_snapshots.snap b/apollo-federation/src/sources/connect/json_selection/snapshots/apollo_federation__sources__connect__json_selection__parser__tests__error_snapshots.snap new file mode 100644 index 0000000000..a3d6191722 --- /dev/null +++ b/apollo-federation/src/sources/connect/json_selection/snapshots/apollo_federation__sources__connect__json_selection__parser__tests__error_snapshots.snap @@ -0,0 +1,11 @@ +--- +source: apollo-federation/src/sources/connect/json_selection/parser.rs +expression: "JSONSelection::parse(\".data\")" +--- +Err( + JSONSelectionParseError { + message: "Key paths cannot start with just .key (use $.key instead)", + fragment: ".data", + offset: 0, + }, +) diff --git a/apollo-federation/src/sources/connect/json_selection/snapshots/apollo_federation__sources__connect__json_selection__parser__tests__expr_path_selections.snap b/apollo-federation/src/sources/connect/json_selection/snapshots/apollo_federation__sources__connect__json_selection__parser__tests__expr_path_selections.snap new file mode 100644 index 0000000000..96055c9783 --- /dev/null +++ b/apollo-federation/src/sources/connect/json_selection/snapshots/apollo_federation__sources__connect__json_selection__parser__tests__expr_path_selections.snap @@ -0,0 +1,171 @@ +--- +source: apollo-federation/src/sources/connect/json_selection/parser.rs +expression: "selection!(\" suffix : results -> slice ( $( - 1 ) -> mul ( $args . suffixLength ) ) \")" +--- +Named( + SubSelection { + selections: [ + Path { + alias: Some( + Alias { + name: WithRange { + node: Field( + "suffix", + ), + range: Some( + 1..7, + ), + }, + range: Some( + 1..9, + ), + }, + ), + inline: false, + path: PathSelection { + path: WithRange { + node: Key( + WithRange { + node: Field( + "results", + ), + range: Some( + 10..17, + ), + }, + WithRange { + node: Method( + WithRange { + node: "slice", + range: Some( + 21..26, + ), + }, + Some( + MethodArgs { + args: [ + WithRange { + node: Path( + PathSelection { + path: WithRange { + node: Expr( + WithRange { + node: Number( + Number(-1), + ), + range: Some( + 32..35, + ), + }, + WithRange { + node: Method( + WithRange { + node: "mul", + range: Some( + 41..44, + ), + }, + Some( + MethodArgs { + args: [ + WithRange { + node: Path( + PathSelection { + path: WithRange { + node: Var( + WithRange { + node: $args, + range: Some( + 47..52, + ), + }, + WithRange { + node: Key( + WithRange { + node: Field( + "suffixLength", + ), + range: Some( + 55..67, + ), + }, + WithRange { + node: Empty, + range: Some( + 67..67, + ), + }, + ), + range: Some( + 53..67, + ), + }, + ), + range: Some( + 47..67, + ), + }, + }, + ), + range: Some( + 47..67, + ), + }, + ], + range: Some( + 45..69, + ), + }, + ), + WithRange { + node: Empty, + range: Some( + 69..69, + ), + }, + ), + range: Some( + 38..69, + ), + }, + ), + range: Some( + 29..69, + ), + }, + }, + ), + range: Some( + 29..69, + ), + }, + ], + range: Some( + 27..71, + ), + }, + ), + WithRange { + node: Empty, + range: Some( + 71..71, + ), + }, + ), + range: Some( + 18..71, + ), + }, + ), + range: Some( + 10..71, + ), + }, + }, + }, + ], + range: Some( + 1..71, + ), + }, +) diff --git a/apollo-federation/src/sources/connect/json_selection/snapshots/apollo_federation__sources__connect__json_selection__parser__tests__path_with_subselection-2.snap b/apollo-federation/src/sources/connect/json_selection/snapshots/apollo_federation__sources__connect__json_selection__parser__tests__path_with_subselection-2.snap new file mode 100644 index 0000000000..ab32022f05 --- /dev/null +++ b/apollo-federation/src/sources/connect/json_selection/snapshots/apollo_federation__sources__connect__json_selection__parser__tests__path_with_subselection-2.snap @@ -0,0 +1,137 @@ +--- +source: apollo-federation/src/sources/connect/json_selection/parser.rs +expression: "selection!(r#\"\n id\n created\n choices->first.message { content role }\n model\n \"#)" +--- +Named( + SubSelection { + selections: [ + Field( + None, + WithRange { + node: Field( + "id", + ), + range: Some( + 13..15, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "created", + ), + range: Some( + 28..35, + ), + }, + None, + ), + Path { + alias: None, + inline: true, + path: PathSelection { + path: WithRange { + node: Key( + WithRange { + node: Field( + "choices", + ), + range: Some( + 48..55, + ), + }, + WithRange { + node: Method( + WithRange { + node: "first", + range: Some( + 57..62, + ), + }, + None, + WithRange { + node: Key( + WithRange { + node: Field( + "message", + ), + range: Some( + 63..70, + ), + }, + WithRange { + node: Selection( + SubSelection { + selections: [ + Field( + None, + WithRange { + node: Field( + "content", + ), + range: Some( + 73..80, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "role", + ), + range: Some( + 81..85, + ), + }, + None, + ), + ], + range: Some( + 71..87, + ), + }, + ), + range: Some( + 71..87, + ), + }, + ), + range: Some( + 62..87, + ), + }, + ), + range: Some( + 55..87, + ), + }, + ), + range: Some( + 48..87, + ), + }, + }, + }, + Field( + None, + WithRange { + node: Field( + "model", + ), + range: Some( + 100..105, + ), + }, + None, + ), + ], + range: Some( + 13..105, + ), + }, +) diff --git a/apollo-federation/src/sources/connect/json_selection/snapshots/apollo_federation__sources__connect__json_selection__parser__tests__path_with_subselection-3.snap b/apollo-federation/src/sources/connect/json_selection/snapshots/apollo_federation__sources__connect__json_selection__parser__tests__path_with_subselection-3.snap new file mode 100644 index 0000000000..13fd8b2347 --- /dev/null +++ b/apollo-federation/src/sources/connect/json_selection/snapshots/apollo_federation__sources__connect__json_selection__parser__tests__path_with_subselection-3.snap @@ -0,0 +1,227 @@ +--- +source: apollo-federation/src/sources/connect/json_selection/parser.rs +expression: "selection!(r#\"\n id\n created\n choices->first.message { content role }\n model\n choices->last.message { lastContent: content }\n \"#)" +--- +Named( + SubSelection { + selections: [ + Field( + None, + WithRange { + node: Field( + "id", + ), + range: Some( + 13..15, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "created", + ), + range: Some( + 28..35, + ), + }, + None, + ), + Path { + alias: None, + inline: true, + path: PathSelection { + path: WithRange { + node: Key( + WithRange { + node: Field( + "choices", + ), + range: Some( + 48..55, + ), + }, + WithRange { + node: Method( + WithRange { + node: "first", + range: Some( + 57..62, + ), + }, + None, + WithRange { + node: Key( + WithRange { + node: Field( + "message", + ), + range: Some( + 63..70, + ), + }, + WithRange { + node: Selection( + SubSelection { + selections: [ + Field( + None, + WithRange { + node: Field( + "content", + ), + range: Some( + 73..80, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "role", + ), + range: Some( + 81..85, + ), + }, + None, + ), + ], + range: Some( + 71..87, + ), + }, + ), + range: Some( + 71..87, + ), + }, + ), + range: Some( + 62..87, + ), + }, + ), + range: Some( + 55..87, + ), + }, + ), + range: Some( + 48..87, + ), + }, + }, + }, + Field( + None, + WithRange { + node: Field( + "model", + ), + range: Some( + 100..105, + ), + }, + None, + ), + Path { + alias: None, + inline: true, + path: PathSelection { + path: WithRange { + node: Key( + WithRange { + node: Field( + "choices", + ), + range: Some( + 118..125, + ), + }, + WithRange { + node: Method( + WithRange { + node: "last", + range: Some( + 127..131, + ), + }, + None, + WithRange { + node: Key( + WithRange { + node: Field( + "message", + ), + range: Some( + 132..139, + ), + }, + WithRange { + node: Selection( + SubSelection { + selections: [ + Field( + Some( + Alias { + name: WithRange { + node: Field( + "lastContent", + ), + range: Some( + 142..153, + ), + }, + range: Some( + 142..154, + ), + }, + ), + WithRange { + node: Field( + "content", + ), + range: Some( + 155..162, + ), + }, + None, + ), + ], + range: Some( + 140..164, + ), + }, + ), + range: Some( + 140..164, + ), + }, + ), + range: Some( + 131..164, + ), + }, + ), + range: Some( + 125..164, + ), + }, + ), + range: Some( + 118..164, + ), + }, + }, + }, + ], + range: Some( + 13..164, + ), + }, +) diff --git a/apollo-federation/src/sources/connect/json_selection/snapshots/apollo_federation__sources__connect__json_selection__parser__tests__path_with_subselection-4.snap b/apollo-federation/src/sources/connect/json_selection/snapshots/apollo_federation__sources__connect__json_selection__parser__tests__path_with_subselection-4.snap new file mode 100644 index 0000000000..4c5931ac23 --- /dev/null +++ b/apollo-federation/src/sources/connect/json_selection/snapshots/apollo_federation__sources__connect__json_selection__parser__tests__path_with_subselection-4.snap @@ -0,0 +1,11 @@ +--- +source: apollo-federation/src/sources/connect/json_selection/parser.rs +expression: "JSONSelection::parse(r#\"\n id\n created\n choices->first.message\n model\n \"#)" +--- +Err( + JSONSelectionParseError { + message: "Named path selection must either begin with alias or ..., or end with subselection", + fragment: "choices->first.message\n model\n ", + offset: 48, + }, +) diff --git a/apollo-federation/src/sources/connect/json_selection/snapshots/apollo_federation__sources__connect__json_selection__parser__tests__path_with_subselection-5.snap b/apollo-federation/src/sources/connect/json_selection/snapshots/apollo_federation__sources__connect__json_selection__parser__tests__path_with_subselection-5.snap new file mode 100644 index 0000000000..8cce06e58f --- /dev/null +++ b/apollo-federation/src/sources/connect/json_selection/snapshots/apollo_federation__sources__connect__json_selection__parser__tests__path_with_subselection-5.snap @@ -0,0 +1,141 @@ +--- +source: apollo-federation/src/sources/connect/json_selection/parser.rs +expression: "JSONSelection::parse(r#\"\n id: $this.id\n $args.input {\n title\n body\n }\n \"#)" +--- +Ok( + Named( + SubSelection { + selections: [ + Path { + alias: Some( + Alias { + name: WithRange { + node: Field( + "id", + ), + range: Some( + 13..15, + ), + }, + range: Some( + 13..16, + ), + }, + ), + inline: false, + path: PathSelection { + path: WithRange { + node: Var( + WithRange { + node: $this, + range: Some( + 17..22, + ), + }, + WithRange { + node: Key( + WithRange { + node: Field( + "id", + ), + range: Some( + 23..25, + ), + }, + WithRange { + node: Empty, + range: Some( + 25..25, + ), + }, + ), + range: Some( + 22..25, + ), + }, + ), + range: Some( + 17..25, + ), + }, + }, + }, + Path { + alias: None, + inline: true, + path: PathSelection { + path: WithRange { + node: Var( + WithRange { + node: $args, + range: Some( + 38..43, + ), + }, + WithRange { + node: Key( + WithRange { + node: Field( + "input", + ), + range: Some( + 44..49, + ), + }, + WithRange { + node: Selection( + SubSelection { + selections: [ + Field( + None, + WithRange { + node: Field( + "title", + ), + range: Some( + 68..73, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "body", + ), + range: Some( + 90..94, + ), + }, + None, + ), + ], + range: Some( + 50..108, + ), + }, + ), + range: Some( + 50..108, + ), + }, + ), + range: Some( + 43..108, + ), + }, + ), + range: Some( + 38..108, + ), + }, + }, + }, + ], + range: Some( + 13..108, + ), + }, + ), +) diff --git a/apollo-federation/src/sources/connect/json_selection/snapshots/apollo_federation__sources__connect__json_selection__parser__tests__path_with_subselection-6.snap b/apollo-federation/src/sources/connect/json_selection/snapshots/apollo_federation__sources__connect__json_selection__parser__tests__path_with_subselection-6.snap new file mode 100644 index 0000000000..51d51ece4f --- /dev/null +++ b/apollo-federation/src/sources/connect/json_selection/snapshots/apollo_federation__sources__connect__json_selection__parser__tests__path_with_subselection-6.snap @@ -0,0 +1,165 @@ +--- +source: apollo-federation/src/sources/connect/json_selection/parser.rs +expression: "JSONSelection::parse(r#\"\n $this { id }\n $args { $.input { title body } }\n \"#)" +--- +Ok( + Named( + SubSelection { + selections: [ + Path { + alias: None, + inline: true, + path: PathSelection { + path: WithRange { + node: Var( + WithRange { + node: $this, + range: Some( + 13..18, + ), + }, + WithRange { + node: Selection( + SubSelection { + selections: [ + Field( + None, + WithRange { + node: Field( + "id", + ), + range: Some( + 21..23, + ), + }, + None, + ), + ], + range: Some( + 19..25, + ), + }, + ), + range: Some( + 19..25, + ), + }, + ), + range: Some( + 13..25, + ), + }, + }, + }, + Path { + alias: None, + inline: true, + path: PathSelection { + path: WithRange { + node: Var( + WithRange { + node: $args, + range: Some( + 38..43, + ), + }, + WithRange { + node: Selection( + SubSelection { + selections: [ + Path { + alias: None, + inline: true, + path: PathSelection { + path: WithRange { + node: Var( + WithRange { + node: $, + range: Some( + 46..47, + ), + }, + WithRange { + node: Key( + WithRange { + node: Field( + "input", + ), + range: Some( + 48..53, + ), + }, + WithRange { + node: Selection( + SubSelection { + selections: [ + Field( + None, + WithRange { + node: Field( + "title", + ), + range: Some( + 56..61, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "body", + ), + range: Some( + 62..66, + ), + }, + None, + ), + ], + range: Some( + 54..68, + ), + }, + ), + range: Some( + 54..68, + ), + }, + ), + range: Some( + 47..68, + ), + }, + ), + range: Some( + 46..68, + ), + }, + }, + }, + ], + range: Some( + 44..70, + ), + }, + ), + range: Some( + 44..70, + ), + }, + ), + range: Some( + 38..70, + ), + }, + }, + }, + ], + range: Some( + 13..70, + ), + }, + ), +) diff --git a/apollo-federation/src/sources/connect/json_selection/snapshots/apollo_federation__sources__connect__json_selection__parser__tests__path_with_subselection-7.snap b/apollo-federation/src/sources/connect/json_selection/snapshots/apollo_federation__sources__connect__json_selection__parser__tests__path_with_subselection-7.snap new file mode 100644 index 0000000000..6b24a8bee5 --- /dev/null +++ b/apollo-federation/src/sources/connect/json_selection/snapshots/apollo_federation__sources__connect__json_selection__parser__tests__path_with_subselection-7.snap @@ -0,0 +1,272 @@ +--- +source: apollo-federation/src/sources/connect/json_selection/parser.rs +expression: "JSONSelection::parse(r#\"\n # Equivalent to id: $this.id\n $this { id }\n\n $args {\n __typename: $(\"Args\")\n\n # Using $. instead of just . prevents .input from\n # parsing as a key applied to the $(\"Args\") string.\n $.input { title body }\n\n extra\n }\n\n from: $.from\n \"#)" +--- +Ok( + Named( + SubSelection { + selections: [ + Path { + alias: None, + inline: true, + path: PathSelection { + path: WithRange { + node: Var( + WithRange { + node: $this, + range: Some( + 54..59, + ), + }, + WithRange { + node: Selection( + SubSelection { + selections: [ + Field( + None, + WithRange { + node: Field( + "id", + ), + range: Some( + 62..64, + ), + }, + None, + ), + ], + range: Some( + 60..66, + ), + }, + ), + range: Some( + 60..66, + ), + }, + ), + range: Some( + 54..66, + ), + }, + }, + }, + Path { + alias: None, + inline: true, + path: PathSelection { + path: WithRange { + node: Var( + WithRange { + node: $args, + range: Some( + 80..85, + ), + }, + WithRange { + node: Selection( + SubSelection { + selections: [ + Path { + alias: Some( + Alias { + name: WithRange { + node: Field( + "__typename", + ), + range: Some( + 104..114, + ), + }, + range: Some( + 104..115, + ), + }, + ), + inline: false, + path: PathSelection { + path: WithRange { + node: Expr( + WithRange { + node: String( + "Args", + ), + range: Some( + 118..124, + ), + }, + WithRange { + node: Empty, + range: Some( + 125..125, + ), + }, + ), + range: Some( + 116..125, + ), + }, + }, + }, + Path { + alias: None, + inline: true, + path: PathSelection { + path: WithRange { + node: Var( + WithRange { + node: $, + range: Some( + 277..278, + ), + }, + WithRange { + node: Key( + WithRange { + node: Field( + "input", + ), + range: Some( + 279..284, + ), + }, + WithRange { + node: Selection( + SubSelection { + selections: [ + Field( + None, + WithRange { + node: Field( + "title", + ), + range: Some( + 287..292, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "body", + ), + range: Some( + 293..297, + ), + }, + None, + ), + ], + range: Some( + 285..299, + ), + }, + ), + range: Some( + 285..299, + ), + }, + ), + range: Some( + 278..299, + ), + }, + ), + range: Some( + 277..299, + ), + }, + }, + }, + Field( + None, + WithRange { + node: Field( + "extra", + ), + range: Some( + 317..322, + ), + }, + None, + ), + ], + range: Some( + 86..336, + ), + }, + ), + range: Some( + 86..336, + ), + }, + ), + range: Some( + 80..336, + ), + }, + }, + }, + Path { + alias: Some( + Alias { + name: WithRange { + node: Field( + "from", + ), + range: Some( + 350..354, + ), + }, + range: Some( + 350..355, + ), + }, + ), + inline: false, + path: PathSelection { + path: WithRange { + node: Var( + WithRange { + node: $, + range: Some( + 356..357, + ), + }, + WithRange { + node: Key( + WithRange { + node: Field( + "from", + ), + range: Some( + 358..362, + ), + }, + WithRange { + node: Empty, + range: Some( + 362..362, + ), + }, + ), + range: Some( + 357..362, + ), + }, + ), + range: Some( + 356..362, + ), + }, + }, + }, + ], + range: Some( + 54..362, + ), + }, + ), +) diff --git a/apollo-federation/src/sources/connect/json_selection/snapshots/apollo_federation__sources__connect__json_selection__parser__tests__path_with_subselection.snap b/apollo-federation/src/sources/connect/json_selection/snapshots/apollo_federation__sources__connect__json_selection__parser__tests__path_with_subselection.snap new file mode 100644 index 0000000000..c48cdec378 --- /dev/null +++ b/apollo-federation/src/sources/connect/json_selection/snapshots/apollo_federation__sources__connect__json_selection__parser__tests__path_with_subselection.snap @@ -0,0 +1,90 @@ +--- +source: apollo-federation/src/sources/connect/json_selection/parser.rs +expression: "selection!(r#\"\n choices->first.message { content role }\n \"#)" +--- +Path( + PathSelection { + path: WithRange { + node: Key( + WithRange { + node: Field( + "choices", + ), + range: Some( + 13..20, + ), + }, + WithRange { + node: Method( + WithRange { + node: "first", + range: Some( + 22..27, + ), + }, + None, + WithRange { + node: Key( + WithRange { + node: Field( + "message", + ), + range: Some( + 28..35, + ), + }, + WithRange { + node: Selection( + SubSelection { + selections: [ + Field( + None, + WithRange { + node: Field( + "content", + ), + range: Some( + 38..45, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "role", + ), + range: Some( + 46..50, + ), + }, + None, + ), + ], + range: Some( + 36..52, + ), + }, + ), + range: Some( + 36..52, + ), + }, + ), + range: Some( + 27..52, + ), + }, + ), + range: Some( + 20..52, + ), + }, + ), + range: Some( + 13..52, + ), + }, + }, +) diff --git a/apollo-federation/src/sources/connect/json_selection/snapshots/apollo_federation__sources__connect__json_selection__parser__tests__selection.snap b/apollo-federation/src/sources/connect/json_selection/snapshots/apollo_federation__sources__connect__json_selection__parser__tests__selection.snap new file mode 100644 index 0000000000..f7968d8649 --- /dev/null +++ b/apollo-federation/src/sources/connect/json_selection/snapshots/apollo_federation__sources__connect__json_selection__parser__tests__selection.snap @@ -0,0 +1,286 @@ +--- +source: apollo-federation/src/sources/connect/json_selection/parser.rs +expression: "selection!(\"\n # Comments are supported because we parse them as whitespace\n topLevelAlias: topLevelField {\n identifier: 'property name with spaces'\n 'unaliased non-identifier property'\n 'non-identifier alias': identifier\n\n # This extracts the value located at the given path and applies a\n # selection set to it before renaming the result to pathSelection\n pathSelection: some.nested.path {\n still: yet\n more\n properties\n }\n\n # An aliased SubSelection of fields nests the fields together\n # under the given alias\n siblingGroup: { brother sister }\n }\")" +--- +Named( + SubSelection { + selections: [ + Field( + Some( + Alias { + name: WithRange { + node: Field( + "topLevelAlias", + ), + range: Some( + 86..99, + ), + }, + range: Some( + 86..100, + ), + }, + ), + WithRange { + node: Field( + "topLevelField", + ), + range: Some( + 101..114, + ), + }, + Some( + SubSelection { + selections: [ + Field( + Some( + Alias { + name: WithRange { + node: Field( + "identifier", + ), + range: Some( + 133..143, + ), + }, + range: Some( + 133..144, + ), + }, + ), + WithRange { + node: Quoted( + "property name with spaces", + ), + range: Some( + 145..172, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Quoted( + "unaliased non-identifier property", + ), + range: Some( + 189..224, + ), + }, + None, + ), + Field( + Some( + Alias { + name: WithRange { + node: Quoted( + "non-identifier alias", + ), + range: Some( + 241..263, + ), + }, + range: Some( + 241..264, + ), + }, + ), + WithRange { + node: Field( + "identifier", + ), + range: Some( + 265..275, + ), + }, + None, + ), + Path { + alias: Some( + Alias { + name: WithRange { + node: Field( + "pathSelection", + ), + range: Some( + 457..470, + ), + }, + range: Some( + 457..471, + ), + }, + ), + inline: false, + path: PathSelection { + path: WithRange { + node: Key( + WithRange { + node: Field( + "some", + ), + range: Some( + 472..476, + ), + }, + WithRange { + node: Key( + WithRange { + node: Field( + "nested", + ), + range: Some( + 477..483, + ), + }, + WithRange { + node: Key( + WithRange { + node: Field( + "path", + ), + range: Some( + 484..488, + ), + }, + WithRange { + node: Selection( + SubSelection { + selections: [ + Field( + Some( + Alias { + name: WithRange { + node: Field( + "still", + ), + range: Some( + 511..516, + ), + }, + range: Some( + 511..517, + ), + }, + ), + WithRange { + node: Field( + "yet", + ), + range: Some( + 518..521, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "more", + ), + range: Some( + 542..546, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "properties", + ), + range: Some( + 567..577, + ), + }, + None, + ), + ], + range: Some( + 489..595, + ), + }, + ), + range: Some( + 489..595, + ), + }, + ), + range: Some( + 483..595, + ), + }, + ), + range: Some( + 476..595, + ), + }, + ), + range: Some( + 472..595, + ), + }, + }, + }, + Group( + Alias { + name: WithRange { + node: Field( + "siblingGroup", + ), + range: Some( + 731..743, + ), + }, + range: Some( + 731..744, + ), + }, + SubSelection { + selections: [ + Field( + None, + WithRange { + node: Field( + "brother", + ), + range: Some( + 747..754, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "sister", + ), + range: Some( + 755..761, + ), + }, + None, + ), + ], + range: Some( + 745..763, + ), + }, + ), + ], + range: Some( + 115..777, + ), + }, + ), + ), + ], + range: Some( + 86..777, + ), + }, +) diff --git a/apollo-federation/src/sources/connect/mod.rs b/apollo-federation/src/sources/connect/mod.rs index 80a48a74a8..b7f714b109 100644 --- a/apollo-federation/src/sources/connect/mod.rs +++ b/apollo-federation/src/sources/connect/mod.rs @@ -1,12 +1,124 @@ -#![allow(unused_imports)] +use std::fmt::Display; +use std::hash::Hash; +use std::hash::Hasher; +use apollo_compiler::Name; + +pub mod expand; +mod header; mod json_selection; -mod url_path_template; +mod models; +pub(crate) mod spec; +mod string_template; +mod url_template; +pub mod validation; +pub(crate) mod variable; -pub use json_selection::ApplyTo; +use apollo_compiler::name; pub use json_selection::ApplyToError; pub use json_selection::JSONSelection; pub use json_selection::Key; pub use json_selection::PathSelection; pub use json_selection::SubSelection; -pub use url_path_template::URLPathTemplate; +pub use models::CustomConfiguration; +pub use spec::ConnectSpec; +pub use url_template::URLTemplate; +pub use variable::Namespace; + +pub use self::models::Connector; +pub use self::models::EntityResolver; +pub use self::models::HTTPMethod; +pub use self::models::HeaderSource; +pub use self::models::HttpJsonTransport; +use crate::schema::position::ObjectFieldDefinitionPosition; +use crate::schema::position::ObjectOrInterfaceFieldDefinitionPosition; +use crate::schema::position::ObjectOrInterfaceFieldDirectivePosition; + +#[derive(Debug, Clone)] +pub struct ConnectId { + pub label: String, + pub subgraph_name: String, + pub source_name: Option, + pub(crate) directive: ObjectOrInterfaceFieldDirectivePosition, +} + +impl ConnectId { + /// Create a synthetic name for this connect ID + /// + /// Until we have a source-aware query planner, we'll need to split up connectors into + /// their own subgraphs when doing planning. Each subgraph will need a name, so we + /// synthesize one using metadata present on the directive. + pub(crate) fn synthetic_name(&self) -> String { + format!( + "{}_{}_{}_{}", + self.subgraph_name, + self.directive.field.type_name(), + self.directive.field.field_name(), + self.directive.directive_index + ) + } + + pub fn subgraph_source(&self) -> String { + let source = format!(".{}", self.source_name.as_deref().unwrap_or("")); + format!("{}{}", self.subgraph_name, source) + } + + pub fn coordinate(&self) -> String { + format!( + "{}:{}.{}@connect[{}]", + self.subgraph_name, + self.directive.field.type_name(), + self.directive.field.field_name(), + self.directive.directive_index + ) + } +} + +impl PartialEq for ConnectId { + fn eq(&self, other: &Self) -> bool { + self.subgraph_name == other.subgraph_name && self.directive == other.directive + } +} + +impl Eq for ConnectId {} + +impl Hash for ConnectId { + fn hash(&self, state: &mut H) { + self.subgraph_name.hash(state); + self.directive.hash(state); + } +} + +impl Display for ConnectId { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.label) + } +} + +impl ConnectId { + /// Mostly intended for tests in apollo-router + pub fn new( + subgraph_name: String, + source_name: Option, + type_name: Name, + field_name: Name, + index: usize, + label: &str, + ) -> Self { + Self { + label: label.to_string(), + subgraph_name, + source_name, + directive: ObjectOrInterfaceFieldDirectivePosition { + field: ObjectOrInterfaceFieldDefinitionPosition::Object( + ObjectFieldDefinitionPosition { + type_name, + field_name, + }, + ), + directive_name: name!(connect), + directive_index: index, + }, + } + } +} diff --git a/apollo-federation/src/sources/connect/models.rs b/apollo-federation/src/sources/connect/models.rs new file mode 100644 index 0000000000..9c99f2a443 --- /dev/null +++ b/apollo-federation/src/sources/connect/models.rs @@ -0,0 +1,827 @@ +mod keys; + +use std::collections::HashMap; +use std::error::Error; +use std::fmt::Display; +use std::fmt::Formatter; +use std::sync::Arc; + +use apollo_compiler::ast; +use apollo_compiler::collections::HashSet; +use apollo_compiler::collections::IndexMap; +use apollo_compiler::executable::FieldSet; +use apollo_compiler::parser::SourceSpan; +use apollo_compiler::validation::Valid; +use apollo_compiler::Name; +use apollo_compiler::Node; +use apollo_compiler::Schema; +use either::Either; +use http::header; +use http::HeaderName; +use keys::make_key_field_set_from_variables; +use serde_json::Value; +use url::Url; + +use super::json_selection::ExternalVarPaths; +use super::spec::schema::ConnectDirectiveArguments; +use super::spec::schema::SourceDirectiveArguments; +use super::spec::ConnectHTTPArguments; +use super::spec::SourceHTTPArguments; +use super::string_template; +use super::variable::Namespace; +use super::variable::VariableReference; +use super::ConnectId; +use super::JSONSelection; +use super::PathSelection; +use super::URLTemplate; +use crate::error::FederationError; +use crate::internal_error; +use crate::link::Link; +use crate::sources::connect::header::HeaderValue; +use crate::sources::connect::spec::extract_connect_directive_arguments; +use crate::sources::connect::spec::extract_source_directive_arguments; +use crate::sources::connect::spec::schema::HEADERS_ARGUMENT_NAME; +use crate::sources::connect::spec::schema::HTTP_HEADER_MAPPING_FROM_ARGUMENT_NAME; +use crate::sources::connect::spec::schema::HTTP_HEADER_MAPPING_NAME_ARGUMENT_NAME; +use crate::sources::connect::spec::schema::HTTP_HEADER_MAPPING_VALUE_ARGUMENT_NAME; +use crate::sources::connect::ConnectSpec; + +// --- Connector --------------------------------------------------------------- + +#[derive(Debug, Clone)] +pub struct Connector { + pub id: ConnectId, + pub transport: HttpJsonTransport, + pub selection: JSONSelection, + pub config: Option, + pub max_requests: Option, + + /// The type of entity resolver to use for this connector + pub entity_resolver: Option, + /// Which version of the connect spec is this connector using? + pub spec: ConnectSpec, + + pub request_variables: HashSet, + pub response_variables: HashSet, +} + +pub type CustomConfiguration = Arc>; + +/// Entity resolver type +/// +/// A connector can be used as a potential entity resolver for a type, with +/// extra validation rules based on the transport args and field position within +/// a schema. +#[derive(Debug, Clone, PartialEq)] +pub enum EntityResolver { + /// The user defined a connector on a field that acts as an entity resolver + Explicit, + + /// The user defined a connector on a field of a type, so we need an entity resolver for that type + Implicit, +} + +impl Connector { + /// Get a map of connectors from an apollo_compiler::Schema. + /// + /// Note: the function assumes that we've checked that the schema is valid + /// before calling this function. We can't take a Valid or ValidFederationSchema + /// because we use this code in validation, which occurs before we've augmented + /// the schema with types from `@link` directives. + pub(crate) fn from_schema( + schema: &Schema, + subgraph_name: &str, + spec: ConnectSpec, + ) -> Result, FederationError> { + let connect_identity = ConnectSpec::identity(); + let Some((link, _)) = Link::for_identity(schema, &connect_identity) else { + return Ok(Default::default()); + }; + + let source_name = ConnectSpec::source_directive_name(&link); + let source_arguments = extract_source_directive_arguments(schema, &source_name)?; + + let connect_name = ConnectSpec::connect_directive_name(&link); + let connect_arguments = extract_connect_directive_arguments(schema, &connect_name)?; + + connect_arguments + .into_iter() + .map(|args| Self::from_directives(schema, subgraph_name, spec, args, &source_arguments)) + .collect() + } + + fn from_directives( + schema: &Schema, + subgraph_name: &str, + spec: ConnectSpec, + connect: ConnectDirectiveArguments, + source_arguments: &[SourceDirectiveArguments], + ) -> Result<(ConnectId, Self), FederationError> { + let source = connect + .source + .as_ref() + .and_then(|name| source_arguments.iter().find(|s| s.name == *name)); + + let source_name = source.map(|s| s.name.clone()); + let connect_http = connect + .http + .ok_or_else(|| internal_error!("@connect(http:) missing"))?; + let source_http = source.map(|s| &s.http); + + let transport = HttpJsonTransport::from_directive(connect_http, source_http)?; + + let parent_type_name = connect.position.field.type_name().clone(); + let schema_def = &schema.schema_definition; + let on_query = schema_def + .query + .as_ref() + .map(|ty| ty.name == parent_type_name) + .unwrap_or(false); + let on_mutation = schema_def + .mutation + .as_ref() + .map(|ty| ty.name == parent_type_name) + .unwrap_or(false); + let on_root_type = on_query || on_mutation; + + let id = ConnectId { + label: make_label(subgraph_name, &source_name, &transport), + subgraph_name: subgraph_name.to_string(), + source_name: source_name.clone(), + directive: connect.position, + }; + + let entity_resolver = match (connect.entity, on_root_type) { + (true, _) => Some(EntityResolver::Explicit), + (_, false) => Some(EntityResolver::Implicit), + _ => None, + }; + + let request_variables = transport.variables().collect(); + let response_variables = connect.selection.external_variables().collect(); + + let connector = Connector { + id: id.clone(), + transport, + selection: connect.selection, + entity_resolver, + config: None, + max_requests: None, + spec, + request_variables, + response_variables, + }; + + Ok((id, connector)) + } + + pub fn field_name(&self) -> &Name { + self.id.directive.field.field_name() + } + + pub(crate) fn variable_references( + &self, + ) -> impl Iterator> + '_ { + self.transport.variable_references().chain( + self.selection + .external_var_paths() + .into_iter() + .flat_map(PathSelection::variable_reference), + ) + } + + /// Create a field set for a `@key` using $args and $this variables. + pub(crate) fn resolvable_key( + &self, + schema: &Schema, + ) -> Result>, FederationError> { + match &self.entity_resolver { + None => Ok(None), + Some(EntityResolver::Explicit) => { + let output_type = self + .id + .directive + .field + .get(schema) + .map(|f| f.ty.inner_named_type()) + .map_err(|_| { + internal_error!( + "Missing field {}.{}", + self.id.directive.field.type_name(), + self.id.directive.field.field_name() + ) + })?; + make_key_field_set_from_variables( + schema, + output_type, + self.variable_references(), + EntityResolver::Explicit, + ) + .map_err(|_| { + internal_error!("Failed to create key for connector {}", self.id.label) + }) + } + Some(EntityResolver::Implicit) => make_key_field_set_from_variables( + schema, + self.id.directive.field.type_name(), + self.variable_references(), + EntityResolver::Implicit, + ) + .map_err(|_| internal_error!("Failed to create key for connector {}", self.id.label)), + } + } +} + +fn make_label( + subgraph_name: &str, + source: &Option, + transport: &HttpJsonTransport, +) -> String { + let source = format!(".{}", source.as_deref().unwrap_or("")); + format!("{}{} {}", subgraph_name, source, transport.label()) +} + +// --- HTTP JSON --------------------------------------------------------------- +#[derive(Clone, Debug)] +pub struct HttpJsonTransport { + pub source_url: Option, + pub connect_template: URLTemplate, + pub method: HTTPMethod, + pub headers: IndexMap, + pub body: Option, +} + +impl HttpJsonTransport { + fn from_directive( + http: ConnectHTTPArguments, + source: Option<&SourceHTTPArguments>, + ) -> Result { + let (method, connect_url) = if let Some(url) = &http.get { + (HTTPMethod::Get, url) + } else if let Some(url) = &http.post { + (HTTPMethod::Post, url) + } else if let Some(url) = &http.patch { + (HTTPMethod::Patch, url) + } else if let Some(url) = &http.put { + (HTTPMethod::Put, url) + } else if let Some(url) = &http.delete { + (HTTPMethod::Delete, url) + } else { + return Err(FederationError::internal("missing http method")); + }; + + #[allow(clippy::mutable_key_type)] + // HeaderName is internally mutable, but we don't mutate it + let mut headers = http.headers; + for (header_name, header_source) in + source.map(|source| &source.headers).into_iter().flatten() + { + if !headers.contains_key(header_name) { + headers.insert(header_name.clone(), header_source.clone()); + } + } + + Ok(Self { + source_url: source.map(|s| s.base_url.clone()), + connect_template: connect_url.parse().map_err(|e: string_template::Error| { + FederationError::internal(format!( + "could not parse URL template: {message}", + message = e.message + )) + })?, + method, + headers, + body: http.body.clone(), + }) + } + + fn label(&self) -> String { + format!("http: {} {}", self.method.as_str(), self.connect_template) + } + + fn variables(&self) -> impl Iterator + '_ { + self.variable_references() + .map(|var_ref| var_ref.namespace.namespace) + } + + fn variable_references(&self) -> impl Iterator> + '_ { + let url_selections = self.connect_template.expressions().map(|e| &e.expression); + let header_selections = self + .headers + .iter() + .flat_map(|(_, source)| source.expressions()); + url_selections + .chain(header_selections) + .chain(self.body.iter()) + .flat_map(|b| { + b.external_var_paths() + .into_iter() + .flat_map(PathSelection::variable_reference) + }) + } +} + +/// The HTTP arguments needed for a connect request +#[derive(Debug, Clone, strum_macros::Display)] +pub enum HTTPMethod { + Get, + Post, + Patch, + Put, + Delete, +} + +impl HTTPMethod { + #[inline] + pub fn as_str(&self) -> &str { + match self { + HTTPMethod::Get => "GET", + HTTPMethod::Post => "POST", + HTTPMethod::Patch => "PATCH", + HTTPMethod::Put => "PUT", + HTTPMethod::Delete => "DELETE", + } + } +} + +#[derive(Clone, Debug)] +pub enum HeaderSource { + From(HeaderName), + Value(HeaderValue), +} + +impl HeaderSource { + pub(crate) fn expressions(&self) -> impl Iterator { + match self { + HeaderSource::From(_) => Either::Left(std::iter::empty()), + HeaderSource::Value(value) => Either::Right(value.expressions().map(|e| &e.expression)), + } + } +} + +#[derive(Clone, Debug)] +pub(crate) struct Header<'a> { + pub(crate) name: HeaderName, + pub(crate) name_node: &'a Node, + pub(crate) source: HeaderSource, + pub(crate) source_node: &'a Node, +} + +impl<'a> Header<'a> { + /// Get a list of headers from the `headers` argument in a `@connect` or `@source` directive. + pub(crate) fn from_headers_arg( + node: &'a Node, + ) -> Vec>> { + if let Some(values) = node.as_list() { + values.iter().map(Self::from_single).collect() + } else if node.as_object().is_some() { + vec![Self::from_single(node)] + } else { + vec![Err(HeaderParseError::Other { + message: format!("`{HEADERS_ARGUMENT_NAME}` must be an object or list of objects"), + node, + })] + } + } + + /// Build a single [`Self`] from a single entry in the `headers` arg. + fn from_single(node: &'a Node) -> Result> { + let mappings = node.as_object().ok_or_else(|| HeaderParseError::Other { + message: "the HTTP header mapping is not an object".to_string(), + node, + })?; + let name_node = mappings + .iter() + .find_map(|(name, value)| { + (*name == HTTP_HEADER_MAPPING_NAME_ARGUMENT_NAME).then_some(value) + }) + .ok_or_else(|| HeaderParseError::Other { + message: format!("missing `{HTTP_HEADER_MAPPING_NAME_ARGUMENT_NAME}` field"), + node, + })?; + let name = name_node + .as_str() + .ok_or_else(|| format!("`{HTTP_HEADER_MAPPING_NAME_ARGUMENT_NAME}` is not a string")) + .and_then(|name_str| { + HeaderName::try_from(name_str) + .map_err(|_| format!("the value `{name_str}` is an invalid HTTP header name")) + }) + .map_err(|message| HeaderParseError::Other { + message, + node: name_node, + })?; + + if Self::is_reserved(&name) { + return Err(HeaderParseError::Other { + message: format!("header '{name}' is reserved and cannot be set by a connector"), + node: name_node, + }); + } + + let from = mappings + .iter() + .find(|(name, _value)| *name == HTTP_HEADER_MAPPING_FROM_ARGUMENT_NAME); + let value = mappings + .iter() + .find(|(name, _value)| *name == HTTP_HEADER_MAPPING_VALUE_ARGUMENT_NAME); + + match (from, value) { + (Some(_), None) if Self::is_static(&name) => { + Err(HeaderParseError::Other{ message: format!( + "header '{name}' can't be set with `{HTTP_HEADER_MAPPING_FROM_ARGUMENT_NAME}`, only with `{HTTP_HEADER_MAPPING_VALUE_ARGUMENT_NAME}`" + ), node: name_node}) + } + (Some((_, from_node)), None) => { + from_node.as_str() + .ok_or_else(|| format!("`{HTTP_HEADER_MAPPING_FROM_ARGUMENT_NAME}` is not a string")) + .and_then(|from_str| { + HeaderName::try_from(from_str).map_err(|_| { + format!("the value `{from_str}` is an invalid HTTP header name") + }) + }) + .map(|from| Self { + name, + name_node, + source: HeaderSource::From(from), + source_node: from_node, + }) + .map_err(|message| HeaderParseError::Other{ message, node: from_node}) + } + (None, Some((_, value_node))) => { + value_node + .as_str() + .ok_or_else(|| HeaderParseError::Other{ message: format!("`{HTTP_HEADER_MAPPING_VALUE_ARGUMENT_NAME}` field in HTTP header mapping must be a string"), node: value_node}) + .and_then(|value_str| { + value_str + .parse::() + .map_err(|err| HeaderParseError::ValueError {err, node: value_node}) + }) + .map(|value| Self { + name, + name_node, + source: HeaderSource::Value(value), + source_node: value_node, + }) + } + (None, None) => { + Err(HeaderParseError::Other { + message: format!("either `{HTTP_HEADER_MAPPING_FROM_ARGUMENT_NAME}` or `{HTTP_HEADER_MAPPING_VALUE_ARGUMENT_NAME}` must be set"), + node, + }) + }, + (Some((from_name, _)), Some((value_name, _))) => { + Err(HeaderParseError::ConflictingArguments { + message: format!("`{HTTP_HEADER_MAPPING_FROM_ARGUMENT_NAME}` and `{HTTP_HEADER_MAPPING_VALUE_ARGUMENT_NAME}` can't be set at the same time"), + from_location: from_name.location(), + value_location: value_name.location(), + }) + } + } + } + + /// These headers are not allowed to be defined by connect directives at all. + /// Copied from Router's plugins::headers + /// Headers from https://datatracker.ietf.org/doc/html/rfc2616#section-13.5.1 + /// These are not propagated by default using a regex match as they will not make sense for the + /// second hop. + /// In addition, because our requests are not regular proxy requests content-type, content-length + /// and host are also in the exclude list. + fn is_reserved(header_name: &HeaderName) -> bool { + static KEEP_ALIVE: HeaderName = HeaderName::from_static("keep-alive"); + matches!( + *header_name, + header::CONNECTION + | header::PROXY_AUTHENTICATE + | header::PROXY_AUTHORIZATION + | header::TE + | header::TRAILER + | header::TRANSFER_ENCODING + | header::UPGRADE + | header::CONTENT_LENGTH + | header::CONTENT_ENCODING + | header::HOST + | header::ACCEPT_ENCODING + ) || header_name == KEEP_ALIVE + } + + /// These headers can be defined as static values in connect directives, but can't be + /// forwarded by the user. + fn is_static(header_name: &HeaderName) -> bool { + matches!(*header_name, header::CONTENT_TYPE | header::ACCEPT,) + } +} + +#[derive(Debug)] +pub(crate) enum HeaderParseError<'a> { + ValueError { + err: string_template::Error, + node: &'a Node, + }, + /// Both `value` and `from` are set + ConflictingArguments { + message: String, + from_location: Option, + value_location: Option, + }, + Other { + message: String, + node: &'a Node, + }, +} + +impl Display for HeaderParseError<'_> { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + match self { + Self::ConflictingArguments { message, .. } | Self::Other { message, .. } => { + write!(f, "{}", message) + } + Self::ValueError { err, .. } => write!(f, "{err}"), + } + } +} + +impl Error for HeaderParseError<'_> {} + +#[cfg(test)] +mod tests { + use apollo_compiler::Schema; + use insta::assert_debug_snapshot; + + use super::*; + use crate::schema::FederationSchema; + use crate::supergraph::extract_subgraphs_from_supergraph; + use crate::ValidFederationSubgraphs; + + static SIMPLE_SUPERGRAPH: &str = include_str!("./tests/schemas/simple.graphql"); + + fn get_subgraphs(supergraph_sdl: &str) -> ValidFederationSubgraphs { + let schema = Schema::parse(supergraph_sdl, "supergraph.graphql").unwrap(); + let supergraph_schema = FederationSchema::new(schema).unwrap(); + extract_subgraphs_from_supergraph(&supergraph_schema, Some(true)).unwrap() + } + + #[test] + fn test_from_schema() { + let subgraphs = get_subgraphs(SIMPLE_SUPERGRAPH); + let subgraph = subgraphs.get("connectors").unwrap(); + let connectors = + Connector::from_schema(subgraph.schema.schema(), "connectors", ConnectSpec::V0_1) + .unwrap(); + assert_debug_snapshot!(&connectors, @r###" + { + ConnectId { + label: "connectors.json http: GET /users", + subgraph_name: "connectors", + source_name: Some( + "json", + ), + directive: ObjectOrInterfaceFieldDirectivePosition { + field: Object(Query.users), + directive_name: "connect", + directive_index: 0, + }, + }: Connector { + id: ConnectId { + label: "connectors.json http: GET /users", + subgraph_name: "connectors", + source_name: Some( + "json", + ), + directive: ObjectOrInterfaceFieldDirectivePosition { + field: Object(Query.users), + directive_name: "connect", + directive_index: 0, + }, + }, + transport: HttpJsonTransport { + source_url: Some( + Url { + scheme: "https", + cannot_be_a_base: false, + username: "", + password: None, + host: Some( + Domain( + "jsonplaceholder.typicode.com", + ), + ), + port: None, + path: "/", + query: None, + fragment: None, + }, + ), + connect_template: URLTemplate { + base: None, + path: [ + StringTemplate { + parts: [ + Constant( + Constant { + value: "users", + location: 1..6, + }, + ), + ], + }, + ], + query: [], + }, + method: Get, + headers: { + "authtoken": From( + "x-auth-token", + ), + "user-agent": Value( + HeaderValue( + StringTemplate { + parts: [ + Constant( + Constant { + value: "Firefox", + location: 0..7, + }, + ), + ], + }, + ), + ), + }, + body: None, + }, + selection: Named( + SubSelection { + selections: [ + Field( + None, + WithRange { + node: Field( + "id", + ), + range: Some( + 0..2, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "name", + ), + range: Some( + 3..7, + ), + }, + None, + ), + ], + range: Some( + 0..7, + ), + }, + ), + config: None, + max_requests: None, + entity_resolver: None, + spec: V0_1, + request_variables: {}, + response_variables: {}, + }, + ConnectId { + label: "connectors.json http: GET /posts", + subgraph_name: "connectors", + source_name: Some( + "json", + ), + directive: ObjectOrInterfaceFieldDirectivePosition { + field: Object(Query.posts), + directive_name: "connect", + directive_index: 0, + }, + }: Connector { + id: ConnectId { + label: "connectors.json http: GET /posts", + subgraph_name: "connectors", + source_name: Some( + "json", + ), + directive: ObjectOrInterfaceFieldDirectivePosition { + field: Object(Query.posts), + directive_name: "connect", + directive_index: 0, + }, + }, + transport: HttpJsonTransport { + source_url: Some( + Url { + scheme: "https", + cannot_be_a_base: false, + username: "", + password: None, + host: Some( + Domain( + "jsonplaceholder.typicode.com", + ), + ), + port: None, + path: "/", + query: None, + fragment: None, + }, + ), + connect_template: URLTemplate { + base: None, + path: [ + StringTemplate { + parts: [ + Constant( + Constant { + value: "posts", + location: 1..6, + }, + ), + ], + }, + ], + query: [], + }, + method: Get, + headers: { + "authtoken": From( + "x-auth-token", + ), + "user-agent": Value( + HeaderValue( + StringTemplate { + parts: [ + Constant( + Constant { + value: "Firefox", + location: 0..7, + }, + ), + ], + }, + ), + ), + }, + body: None, + }, + selection: Named( + SubSelection { + selections: [ + Field( + None, + WithRange { + node: Field( + "id", + ), + range: Some( + 0..2, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "title", + ), + range: Some( + 3..8, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "body", + ), + range: Some( + 9..13, + ), + }, + None, + ), + ], + range: Some( + 0..13, + ), + }, + ), + config: None, + max_requests: None, + entity_resolver: None, + spec: V0_1, + request_variables: {}, + response_variables: {}, + }, + } + "###); + } +} diff --git a/apollo-federation/src/sources/connect/models/keys.rs b/apollo-federation/src/sources/connect/models/keys.rs new file mode 100644 index 0000000000..f063efdc0f --- /dev/null +++ b/apollo-federation/src/sources/connect/models/keys.rs @@ -0,0 +1,123 @@ +use std::fmt; +use std::fmt::Display; +use std::fmt::Formatter; + +use apollo_compiler::collections::IndexMap; +use apollo_compiler::executable::FieldSet; +use apollo_compiler::validation::Valid; +use apollo_compiler::validation::WithErrors; +use apollo_compiler::Name; +use apollo_compiler::Schema; +use itertools::Itertools; + +use super::VariableReference; +use crate::sources::connect::EntityResolver; +use crate::sources::connect::Namespace; + +/// Given the variables relevant to entity fetching, synthesize a FieldSet +/// appropriate for use in a @key directive. +pub(crate) fn make_key_field_set_from_variables<'a>( + schema: &Schema, + object_type_name: &Name, + variables: impl Iterator>, + resolver: EntityResolver, +) -> Result>, WithErrors
> { + // TODO: does this work with subselections like $this { something }? + let params = variables + .filter(|var| match resolver { + EntityResolver::Explicit => var.namespace.namespace == Namespace::Args, + EntityResolver::Implicit => var.namespace.namespace == Namespace::This, + }) + .unique() + .collect_vec(); + + if params.is_empty() { + return Ok(None); + } + + let mut merged = TrieNode::default(); + for param in params { + merged.insert(¶m.path.iter().map(|p| p.as_str()).collect::>()); + } + + FieldSet::parse_and_validate( + Valid::assume_valid_ref(schema), + object_type_name.clone(), + merged.to_string(), + "", + ) + .map(Some) +} + +#[derive(Default)] +struct TrieNode(IndexMap); + +impl TrieNode { + fn insert(&mut self, path: &[&str]) { + let mut node = self; + for head in path { + node = node.0.entry(head.to_string()).or_default(); + } + } +} + +impl Display for TrieNode { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + for (i, (key, node)) in self.0.iter().enumerate() { + write!(f, "{}", key)?; + if !node.0.is_empty() { + write!(f, " {{ {} }}", node)?; + } + if i != self.0.len() - 1 { + write!(f, " ")?; + } + } + Ok(()) + } +} + +#[cfg(test)] +mod tests { + use apollo_compiler::name; + use apollo_compiler::Schema; + + use super::make_key_field_set_from_variables; + use super::TrieNode; + use crate::sources::connect::PathSelection; + + #[test] + fn test_trie() { + let mut trie = TrieNode::default(); + trie.insert(&["a", "b", "c"]); + trie.insert(&["a", "b", "d"]); + trie.insert(&["a", "b", "e"]); + trie.insert(&["a", "c"]); + trie.insert(&["a", "d"]); + trie.insert(&["b"]); + assert_eq!(trie.to_string(), "a { b { c d e } c d } b"); + } + + #[test] + fn test_make_key_field_set_from_variables() { + let result = make_key_field_set_from_variables( + &Schema::parse_and_validate("type Query { t: T } type T { a: A b: ID } type A { b: B c: ID d: ID } type B { c: ID d: ID e: ID }", "").unwrap(), + &name!("T"), + vec![ + PathSelection::parse("$args.a.b.c".into()).unwrap().1.variable_reference().unwrap(), + PathSelection::parse("$args.a.b.d".into()).unwrap().1.variable_reference().unwrap(), + PathSelection::parse("$args.a.b.e".into()).unwrap().1.variable_reference().unwrap(), + PathSelection::parse("$args.a.c".into()).unwrap().1.variable_reference().unwrap(), + PathSelection::parse("$args.a.d".into()).unwrap().1.variable_reference().unwrap(), + PathSelection::parse("$args.b".into()).unwrap().1.variable_reference().unwrap(), + ].into_iter(), + super::EntityResolver::Explicit, + ) + .unwrap() + .unwrap(); + + assert_eq!( + result.serialize().no_indent().to_string(), + "a { b { c d e } c d } b" + ); + } +} diff --git a/apollo-federation/src/sources/connect/snapshots/apollo_federation__sources__connect__string_template__test_parse__expressions_with_nested_braces.snap b/apollo-federation/src/sources/connect/snapshots/apollo_federation__sources__connect__string_template__test_parse__expressions_with_nested_braces.snap new file mode 100644 index 0000000000..d2d303a4a3 --- /dev/null +++ b/apollo-federation/src/sources/connect/snapshots/apollo_federation__sources__connect__string_template__test_parse__expressions_with_nested_braces.snap @@ -0,0 +1,103 @@ +--- +source: apollo-federation/src/sources/connect/string_template.rs +expression: "StringTemplate::::parse(\"const{$config.one { two { three } }}another-const\",\n0).unwrap()" +--- +StringTemplate { + parts: [ + Constant( + Constant { + value: "const", + location: 0..5, + }, + ), + Expression( + Expression { + expression: Path( + PathSelection { + path: WithRange { + node: Var( + WithRange { + node: $config, + range: Some( + 0..7, + ), + }, + WithRange { + node: Key( + WithRange { + node: Field( + "one", + ), + range: Some( + 8..11, + ), + }, + WithRange { + node: Selection( + SubSelection { + selections: [ + Field( + None, + WithRange { + node: Field( + "two", + ), + range: Some( + 14..17, + ), + }, + Some( + SubSelection { + selections: [ + Field( + None, + WithRange { + node: Field( + "three", + ), + range: Some( + 20..25, + ), + }, + None, + ), + ], + range: Some( + 18..27, + ), + }, + ), + ), + ], + range: Some( + 12..29, + ), + }, + ), + range: Some( + 12..29, + ), + }, + ), + range: Some( + 7..29, + ), + }, + ), + range: Some( + 0..29, + ), + }, + }, + ), + location: 6..35, + }, + ), + Constant( + Constant { + value: "another-const", + location: 36..49, + }, + ), + ], +} diff --git a/apollo-federation/src/sources/connect/snapshots/apollo_federation__sources__connect__string_template__test_parse__mixed_constant_and_expression.snap b/apollo-federation/src/sources/connect/snapshots/apollo_federation__sources__connect__string_template__test_parse__mixed_constant_and_expression.snap new file mode 100644 index 0000000000..a55abd8940 --- /dev/null +++ b/apollo-federation/src/sources/connect/snapshots/apollo_federation__sources__connect__string_template__test_parse__mixed_constant_and_expression.snap @@ -0,0 +1,63 @@ +--- +source: apollo-federation/src/sources/connect/string_template.rs +expression: "StringTemplate::::parse(\"text{$config.one}text\", 0).unwrap()" +--- +StringTemplate { + parts: [ + Constant( + Constant { + value: "text", + location: 0..4, + }, + ), + Expression( + Expression { + expression: Path( + PathSelection { + path: WithRange { + node: Var( + WithRange { + node: $config, + range: Some( + 0..7, + ), + }, + WithRange { + node: Key( + WithRange { + node: Field( + "one", + ), + range: Some( + 8..11, + ), + }, + WithRange { + node: Empty, + range: Some( + 11..11, + ), + }, + ), + range: Some( + 7..11, + ), + }, + ), + range: Some( + 0..11, + ), + }, + }, + ), + location: 5..16, + }, + ), + Constant( + Constant { + value: "text", + location: 17..21, + }, + ), + ], +} diff --git a/apollo-federation/src/sources/connect/snapshots/apollo_federation__sources__connect__string_template__test_parse__offset.snap b/apollo-federation/src/sources/connect/snapshots/apollo_federation__sources__connect__string_template__test_parse__offset.snap new file mode 100644 index 0000000000..c3d863376f --- /dev/null +++ b/apollo-federation/src/sources/connect/snapshots/apollo_federation__sources__connect__string_template__test_parse__offset.snap @@ -0,0 +1,63 @@ +--- +source: apollo-federation/src/sources/connect/string_template.rs +expression: "StringTemplate::::parse(\"text{$config.one}text\", 9).unwrap()" +--- +StringTemplate { + parts: [ + Constant( + Constant { + value: "text", + location: 9..13, + }, + ), + Expression( + Expression { + expression: Path( + PathSelection { + path: WithRange { + node: Var( + WithRange { + node: $config, + range: Some( + 0..7, + ), + }, + WithRange { + node: Key( + WithRange { + node: Field( + "one", + ), + range: Some( + 8..11, + ), + }, + WithRange { + node: Empty, + range: Some( + 11..11, + ), + }, + ), + range: Some( + 7..11, + ), + }, + ), + range: Some( + 0..11, + ), + }, + }, + ), + location: 14..25, + }, + ), + Constant( + Constant { + value: "text", + location: 26..30, + }, + ), + ], +} diff --git a/apollo-federation/src/sources/connect/snapshots/apollo_federation__sources__connect__string_template__test_parse__simple_constant.snap b/apollo-federation/src/sources/connect/snapshots/apollo_federation__sources__connect__string_template__test_parse__simple_constant.snap new file mode 100644 index 0000000000..1c4fdc72ac --- /dev/null +++ b/apollo-federation/src/sources/connect/snapshots/apollo_federation__sources__connect__string_template__test_parse__simple_constant.snap @@ -0,0 +1,14 @@ +--- +source: apollo-federation/src/sources/connect/string_template.rs +expression: template +--- +StringTemplate { + parts: [ + Constant( + Constant { + value: "text", + location: 0..4, + }, + ), + ], +} diff --git a/apollo-federation/src/sources/connect/snapshots/apollo_federation__sources__connect__string_template__test_parse__simple_expression.snap b/apollo-federation/src/sources/connect/snapshots/apollo_federation__sources__connect__string_template__test_parse__simple_expression.snap new file mode 100644 index 0000000000..3ad221d40d --- /dev/null +++ b/apollo-federation/src/sources/connect/snapshots/apollo_federation__sources__connect__string_template__test_parse__simple_expression.snap @@ -0,0 +1,51 @@ +--- +source: apollo-federation/src/sources/connect/string_template.rs +expression: "StringTemplate::::parse(\"{$config.one}\", 0).unwrap()" +--- +StringTemplate { + parts: [ + Expression( + Expression { + expression: Path( + PathSelection { + path: WithRange { + node: Var( + WithRange { + node: $config, + range: Some( + 0..7, + ), + }, + WithRange { + node: Key( + WithRange { + node: Field( + "one", + ), + range: Some( + 8..11, + ), + }, + WithRange { + node: Empty, + range: Some( + 11..11, + ), + }, + ), + range: Some( + 7..11, + ), + }, + ), + range: Some( + 0..11, + ), + }, + }, + ), + location: 1..12, + }, + ), + ], +} diff --git a/apollo-federation/src/sources/connect/snapshots/apollo_federation__sources__connect__url_template__test_parse__absolute_url_with_path.snap b/apollo-federation/src/sources/connect/snapshots/apollo_federation__sources__connect__url_template__test_parse__absolute_url_with_path.snap new file mode 100644 index 0000000000..031473d297 --- /dev/null +++ b/apollo-federation/src/sources/connect/snapshots/apollo_federation__sources__connect__url_template__test_parse__absolute_url_with_path.snap @@ -0,0 +1,48 @@ +--- +source: apollo-federation/src/sources/connect/url_template.rs +expression: "URLTemplate::from_str(\"http://example.com/abc/def\")" +--- +Ok( + URLTemplate { + base: Some( + Url { + scheme: "http", + cannot_be_a_base: false, + username: "", + password: None, + host: Some( + Domain( + "example.com", + ), + ), + port: None, + path: "/", + query: None, + fragment: None, + }, + ), + path: [ + StringTemplate { + parts: [ + Constant( + Constant { + value: "abc", + location: 19..22, + }, + ), + ], + }, + StringTemplate { + parts: [ + Constant( + Constant { + value: "def", + location: 23..26, + }, + ), + ], + }, + ], + query: [], + }, +) diff --git a/apollo-federation/src/sources/connect/snapshots/apollo_federation__sources__connect__url_template__test_parse__absolute_url_with_path_variable.snap b/apollo-federation/src/sources/connect/snapshots/apollo_federation__sources__connect__url_template__test_parse__absolute_url_with_path_variable.snap new file mode 100644 index 0000000000..a74d525dcb --- /dev/null +++ b/apollo-federation/src/sources/connect/snapshots/apollo_federation__sources__connect__url_template__test_parse__absolute_url_with_path_variable.snap @@ -0,0 +1,85 @@ +--- +source: apollo-federation/src/sources/connect/url_template.rs +expression: "URLTemplate::from_str(\"http://example.com/{$args.abc}/def\")" +--- +Ok( + URLTemplate { + base: Some( + Url { + scheme: "http", + cannot_be_a_base: false, + username: "", + password: None, + host: Some( + Domain( + "example.com", + ), + ), + port: None, + path: "/", + query: None, + fragment: None, + }, + ), + path: [ + StringTemplate { + parts: [ + Expression( + Expression { + expression: Path( + PathSelection { + path: WithRange { + node: Var( + WithRange { + node: $args, + range: Some( + 0..5, + ), + }, + WithRange { + node: Key( + WithRange { + node: Field( + "abc", + ), + range: Some( + 6..9, + ), + }, + WithRange { + node: Empty, + range: Some( + 9..9, + ), + }, + ), + range: Some( + 5..9, + ), + }, + ), + range: Some( + 0..9, + ), + }, + }, + ), + location: 20..29, + }, + ), + ], + }, + StringTemplate { + parts: [ + Constant( + Constant { + value: "def", + location: 31..34, + }, + ), + ], + }, + ], + query: [], + }, +) diff --git a/apollo-federation/src/sources/connect/snapshots/apollo_federation__sources__connect__url_template__test_parse__absolute_url_with_query.snap b/apollo-federation/src/sources/connect/snapshots/apollo_federation__sources__connect__url_template__test_parse__absolute_url_with_query.snap new file mode 100644 index 0000000000..86a7faf164 --- /dev/null +++ b/apollo-federation/src/sources/connect/snapshots/apollo_federation__sources__connect__url_template__test_parse__absolute_url_with_query.snap @@ -0,0 +1,50 @@ +--- +source: apollo-federation/src/sources/connect/url_template.rs +expression: "URLTemplate::from_str(\"http://example.com?abc=def\")" +--- +Ok( + URLTemplate { + base: Some( + Url { + scheme: "http", + cannot_be_a_base: false, + username: "", + password: None, + host: Some( + Domain( + "example.com", + ), + ), + port: None, + path: "/", + query: None, + fragment: None, + }, + ), + path: [], + query: [ + ( + StringTemplate { + parts: [ + Constant( + Constant { + value: "abc", + location: 19..22, + }, + ), + ], + }, + StringTemplate { + parts: [ + Constant( + Constant { + value: "def", + location: 23..26, + }, + ), + ], + }, + ), + ], + }, +) diff --git a/apollo-federation/src/sources/connect/snapshots/apollo_federation__sources__connect__url_template__test_parse__absolute_url_with_query_variable.snap b/apollo-federation/src/sources/connect/snapshots/apollo_federation__sources__connect__url_template__test_parse__absolute_url_with_query_variable.snap new file mode 100644 index 0000000000..25dd23d2e3 --- /dev/null +++ b/apollo-federation/src/sources/connect/snapshots/apollo_federation__sources__connect__url_template__test_parse__absolute_url_with_query_variable.snap @@ -0,0 +1,87 @@ +--- +source: apollo-federation/src/sources/connect/url_template.rs +expression: "URLTemplate::from_str(\"http://example.com?abc={$args.abc}\")" +--- +Ok( + URLTemplate { + base: Some( + Url { + scheme: "http", + cannot_be_a_base: false, + username: "", + password: None, + host: Some( + Domain( + "example.com", + ), + ), + port: None, + path: "/", + query: None, + fragment: None, + }, + ), + path: [], + query: [ + ( + StringTemplate { + parts: [ + Constant( + Constant { + value: "abc", + location: 19..22, + }, + ), + ], + }, + StringTemplate { + parts: [ + Expression( + Expression { + expression: Path( + PathSelection { + path: WithRange { + node: Var( + WithRange { + node: $args, + range: Some( + 0..5, + ), + }, + WithRange { + node: Key( + WithRange { + node: Field( + "abc", + ), + range: Some( + 6..9, + ), + }, + WithRange { + node: Empty, + range: Some( + 9..9, + ), + }, + ), + range: Some( + 5..9, + ), + }, + ), + range: Some( + 0..9, + ), + }, + }, + ), + location: 24..33, + }, + ), + ], + }, + ), + ], + }, +) diff --git a/apollo-federation/src/sources/connect/snapshots/apollo_federation__sources__connect__url_template__test_parse__basic_absolute_url.snap b/apollo-federation/src/sources/connect/snapshots/apollo_federation__sources__connect__url_template__test_parse__basic_absolute_url.snap new file mode 100644 index 0000000000..b7e27d5b7b --- /dev/null +++ b/apollo-federation/src/sources/connect/snapshots/apollo_federation__sources__connect__url_template__test_parse__basic_absolute_url.snap @@ -0,0 +1,27 @@ +--- +source: apollo-federation/src/sources/connect/url_template.rs +expression: "URLTemplate::from_str(\"http://example.com\")" +--- +Ok( + URLTemplate { + base: Some( + Url { + scheme: "http", + cannot_be_a_base: false, + username: "", + password: None, + host: Some( + Domain( + "example.com", + ), + ), + port: None, + path: "/", + query: None, + fragment: None, + }, + ), + path: [], + query: [], + }, +) diff --git a/apollo-federation/src/sources/connect/snapshots/apollo_federation__sources__connect__url_template__test_parse__expression_missing_closing_bracket.snap b/apollo-federation/src/sources/connect/snapshots/apollo_federation__sources__connect__url_template__test_parse__expression_missing_closing_bracket.snap new file mode 100644 index 0000000000..b82a29b7ed --- /dev/null +++ b/apollo-federation/src/sources/connect/snapshots/apollo_federation__sources__connect__url_template__test_parse__expression_missing_closing_bracket.snap @@ -0,0 +1,10 @@ +--- +source: apollo-federation/src/sources/connect/url_template.rs +expression: "URLTemplate::from_str(\"{$this { x: { y } }\")" +--- +Err( + Error { + message: "Invalid expression, missing closing }", + location: 0..19, + }, +) diff --git a/apollo-federation/src/sources/connect/snapshots/apollo_federation__sources__connect__url_template__test_parse__nested_braces_in_expression.snap b/apollo-federation/src/sources/connect/snapshots/apollo_federation__sources__connect__url_template__test_parse__nested_braces_in_expression.snap new file mode 100644 index 0000000000..c4b61cf44b --- /dev/null +++ b/apollo-federation/src/sources/connect/snapshots/apollo_federation__sources__connect__url_template__test_parse__nested_braces_in_expression.snap @@ -0,0 +1,104 @@ +--- +source: apollo-federation/src/sources/connect/url_template.rs +expression: "URLTemplate::from_str(\"/position/xz/{$this { x { y } } }\")" +--- +Ok( + URLTemplate { + base: None, + path: [ + StringTemplate { + parts: [ + Constant( + Constant { + value: "position", + location: 1..9, + }, + ), + ], + }, + StringTemplate { + parts: [ + Constant( + Constant { + value: "xz", + location: 10..12, + }, + ), + ], + }, + StringTemplate { + parts: [ + Expression( + Expression { + expression: Path( + PathSelection { + path: WithRange { + node: Var( + WithRange { + node: $this, + range: Some( + 0..5, + ), + }, + WithRange { + node: Selection( + SubSelection { + selections: [ + Field( + None, + WithRange { + node: Field( + "x", + ), + range: Some( + 8..9, + ), + }, + Some( + SubSelection { + selections: [ + Field( + None, + WithRange { + node: Field( + "y", + ), + range: Some( + 12..13, + ), + }, + None, + ), + ], + range: Some( + 10..15, + ), + }, + ), + ), + ], + range: Some( + 6..17, + ), + }, + ), + range: Some( + 6..17, + ), + }, + ), + range: Some( + 0..17, + ), + }, + }, + ), + location: 14..32, + }, + ), + ], + }, + ], + query: [], + }, +) diff --git a/apollo-federation/src/sources/connect/snapshots/apollo_federation__sources__connect__url_template__test_parse__path_list-2.snap b/apollo-federation/src/sources/connect/snapshots/apollo_federation__sources__connect__url_template__test_parse__path_list-2.snap new file mode 100644 index 0000000000..8e160a560b --- /dev/null +++ b/apollo-federation/src/sources/connect/snapshots/apollo_federation__sources__connect__url_template__test_parse__path_list-2.snap @@ -0,0 +1,32 @@ +--- +source: apollo-federation/src/sources/connect/url_template.rs +expression: "URLTemplate::from_str(\"/abc/def\")" +--- +Ok( + URLTemplate { + base: None, + path: [ + StringTemplate { + parts: [ + Constant( + Constant { + value: "abc", + location: 1..4, + }, + ), + ], + }, + StringTemplate { + parts: [ + Constant( + Constant { + value: "def", + location: 5..8, + }, + ), + ], + }, + ], + query: [], + }, +) diff --git a/apollo-federation/src/sources/connect/snapshots/apollo_federation__sources__connect__url_template__test_parse__path_list-3.snap b/apollo-federation/src/sources/connect/snapshots/apollo_federation__sources__connect__url_template__test_parse__path_list-3.snap new file mode 100644 index 0000000000..1cf75e6ba9 --- /dev/null +++ b/apollo-federation/src/sources/connect/snapshots/apollo_federation__sources__connect__url_template__test_parse__path_list-3.snap @@ -0,0 +1,69 @@ +--- +source: apollo-federation/src/sources/connect/url_template.rs +expression: "URLTemplate::from_str(\"/abc/{$args.def}\")" +--- +Ok( + URLTemplate { + base: None, + path: [ + StringTemplate { + parts: [ + Constant( + Constant { + value: "abc", + location: 1..4, + }, + ), + ], + }, + StringTemplate { + parts: [ + Expression( + Expression { + expression: Path( + PathSelection { + path: WithRange { + node: Var( + WithRange { + node: $args, + range: Some( + 0..5, + ), + }, + WithRange { + node: Key( + WithRange { + node: Field( + "def", + ), + range: Some( + 6..9, + ), + }, + WithRange { + node: Empty, + range: Some( + 9..9, + ), + }, + ), + range: Some( + 5..9, + ), + }, + ), + range: Some( + 0..9, + ), + }, + }, + ), + location: 6..15, + }, + ), + ], + }, + ], + query: [], + }, +) diff --git a/apollo-federation/src/sources/connect/snapshots/apollo_federation__sources__connect__url_template__test_parse__path_list-4.snap b/apollo-federation/src/sources/connect/snapshots/apollo_federation__sources__connect__url_template__test_parse__path_list-4.snap new file mode 100644 index 0000000000..60f6f1e377 --- /dev/null +++ b/apollo-federation/src/sources/connect/snapshots/apollo_federation__sources__connect__url_template__test_parse__path_list-4.snap @@ -0,0 +1,94 @@ +--- +source: apollo-federation/src/sources/connect/url_template.rs +expression: "URLTemplate::from_str(\"/abc/{$this.def.thing}/ghi\")" +--- +Ok( + URLTemplate { + base: None, + path: [ + StringTemplate { + parts: [ + Constant( + Constant { + value: "abc", + location: 1..4, + }, + ), + ], + }, + StringTemplate { + parts: [ + Expression( + Expression { + expression: Path( + PathSelection { + path: WithRange { + node: Var( + WithRange { + node: $this, + range: Some( + 0..5, + ), + }, + WithRange { + node: Key( + WithRange { + node: Field( + "def", + ), + range: Some( + 6..9, + ), + }, + WithRange { + node: Key( + WithRange { + node: Field( + "thing", + ), + range: Some( + 10..15, + ), + }, + WithRange { + node: Empty, + range: Some( + 15..15, + ), + }, + ), + range: Some( + 9..15, + ), + }, + ), + range: Some( + 5..15, + ), + }, + ), + range: Some( + 0..15, + ), + }, + }, + ), + location: 6..21, + }, + ), + ], + }, + StringTemplate { + parts: [ + Constant( + Constant { + value: "ghi", + location: 23..26, + }, + ), + ], + }, + ], + query: [], + }, +) diff --git a/apollo-federation/src/sources/connect/snapshots/apollo_federation__sources__connect__url_template__test_parse__path_list.snap b/apollo-federation/src/sources/connect/snapshots/apollo_federation__sources__connect__url_template__test_parse__path_list.snap new file mode 100644 index 0000000000..5dc5defe31 --- /dev/null +++ b/apollo-federation/src/sources/connect/snapshots/apollo_federation__sources__connect__url_template__test_parse__path_list.snap @@ -0,0 +1,22 @@ +--- +source: apollo-federation/src/sources/connect/url_template.rs +expression: "URLTemplate::from_str(\"/abc\")" +--- +Ok( + URLTemplate { + base: None, + path: [ + StringTemplate { + parts: [ + Constant( + Constant { + value: "abc", + location: 1..4, + }, + ), + ], + }, + ], + query: [], + }, +) diff --git a/apollo-federation/src/sources/connect/snapshots/apollo_federation__sources__connect__url_template__test_parse__url_path_template_parse-2.snap b/apollo-federation/src/sources/connect/snapshots/apollo_federation__sources__connect__url_template__test_parse__url_path_template_parse-2.snap new file mode 100644 index 0000000000..8e686adec5 --- /dev/null +++ b/apollo-federation/src/sources/connect/snapshots/apollo_federation__sources__connect__url_template__test_parse__url_path_template_parse-2.snap @@ -0,0 +1,262 @@ +--- +source: apollo-federation/src/sources/connect/url_template.rs +expression: "URLTemplate::from_str(\"/users/{$this.user_id}?a={$args.b}&c={$args.d}&e={$args.f.g}\")" +--- +Ok( + URLTemplate { + base: None, + path: [ + StringTemplate { + parts: [ + Constant( + Constant { + value: "users", + location: 1..6, + }, + ), + ], + }, + StringTemplate { + parts: [ + Expression( + Expression { + expression: Path( + PathSelection { + path: WithRange { + node: Var( + WithRange { + node: $this, + range: Some( + 0..5, + ), + }, + WithRange { + node: Key( + WithRange { + node: Field( + "user_id", + ), + range: Some( + 6..13, + ), + }, + WithRange { + node: Empty, + range: Some( + 13..13, + ), + }, + ), + range: Some( + 5..13, + ), + }, + ), + range: Some( + 0..13, + ), + }, + }, + ), + location: 8..21, + }, + ), + ], + }, + ], + query: [ + ( + StringTemplate { + parts: [ + Constant( + Constant { + value: "a", + location: 23..24, + }, + ), + ], + }, + StringTemplate { + parts: [ + Expression( + Expression { + expression: Path( + PathSelection { + path: WithRange { + node: Var( + WithRange { + node: $args, + range: Some( + 0..5, + ), + }, + WithRange { + node: Key( + WithRange { + node: Field( + "b", + ), + range: Some( + 6..7, + ), + }, + WithRange { + node: Empty, + range: Some( + 7..7, + ), + }, + ), + range: Some( + 5..7, + ), + }, + ), + range: Some( + 0..7, + ), + }, + }, + ), + location: 26..33, + }, + ), + ], + }, + ), + ( + StringTemplate { + parts: [ + Constant( + Constant { + value: "c", + location: 35..36, + }, + ), + ], + }, + StringTemplate { + parts: [ + Expression( + Expression { + expression: Path( + PathSelection { + path: WithRange { + node: Var( + WithRange { + node: $args, + range: Some( + 0..5, + ), + }, + WithRange { + node: Key( + WithRange { + node: Field( + "d", + ), + range: Some( + 6..7, + ), + }, + WithRange { + node: Empty, + range: Some( + 7..7, + ), + }, + ), + range: Some( + 5..7, + ), + }, + ), + range: Some( + 0..7, + ), + }, + }, + ), + location: 38..45, + }, + ), + ], + }, + ), + ( + StringTemplate { + parts: [ + Constant( + Constant { + value: "e", + location: 47..48, + }, + ), + ], + }, + StringTemplate { + parts: [ + Expression( + Expression { + expression: Path( + PathSelection { + path: WithRange { + node: Var( + WithRange { + node: $args, + range: Some( + 0..5, + ), + }, + WithRange { + node: Key( + WithRange { + node: Field( + "f", + ), + range: Some( + 6..7, + ), + }, + WithRange { + node: Key( + WithRange { + node: Field( + "g", + ), + range: Some( + 8..9, + ), + }, + WithRange { + node: Empty, + range: Some( + 9..9, + ), + }, + ), + range: Some( + 7..9, + ), + }, + ), + range: Some( + 5..9, + ), + }, + ), + range: Some( + 0..9, + ), + }, + }, + ), + location: 50..59, + }, + ), + ], + }, + ), + ], + }, +) diff --git a/apollo-federation/src/sources/connect/snapshots/apollo_federation__sources__connect__url_template__test_parse__url_path_template_parse-3.snap b/apollo-federation/src/sources/connect/snapshots/apollo_federation__sources__connect__url_template__test_parse__url_path_template_parse-3.snap new file mode 100644 index 0000000000..f8638beac0 --- /dev/null +++ b/apollo-federation/src/sources/connect/snapshots/apollo_federation__sources__connect__url_template__test_parse__url_path_template_parse-3.snap @@ -0,0 +1,135 @@ +--- +source: apollo-federation/src/sources/connect/url_template.rs +expression: "URLTemplate::from_str(\"/users/{$this.id}?a={$config.b}#junk\")" +--- +Ok( + URLTemplate { + base: None, + path: [ + StringTemplate { + parts: [ + Constant( + Constant { + value: "users", + location: 1..6, + }, + ), + ], + }, + StringTemplate { + parts: [ + Expression( + Expression { + expression: Path( + PathSelection { + path: WithRange { + node: Var( + WithRange { + node: $this, + range: Some( + 0..5, + ), + }, + WithRange { + node: Key( + WithRange { + node: Field( + "id", + ), + range: Some( + 6..8, + ), + }, + WithRange { + node: Empty, + range: Some( + 8..8, + ), + }, + ), + range: Some( + 5..8, + ), + }, + ), + range: Some( + 0..8, + ), + }, + }, + ), + location: 8..16, + }, + ), + ], + }, + ], + query: [ + ( + StringTemplate { + parts: [ + Constant( + Constant { + value: "a", + location: 18..19, + }, + ), + ], + }, + StringTemplate { + parts: [ + Expression( + Expression { + expression: Path( + PathSelection { + path: WithRange { + node: Var( + WithRange { + node: $config, + range: Some( + 0..7, + ), + }, + WithRange { + node: Key( + WithRange { + node: Field( + "b", + ), + range: Some( + 8..9, + ), + }, + WithRange { + node: Empty, + range: Some( + 9..9, + ), + }, + ), + range: Some( + 7..9, + ), + }, + ), + range: Some( + 0..9, + ), + }, + }, + ), + location: 21..30, + }, + ), + Constant( + Constant { + value: "#junk", + location: 31..36, + }, + ), + ], + }, + ), + ], + }, +) diff --git a/apollo-federation/src/sources/connect/snapshots/apollo_federation__sources__connect__url_template__test_parse__url_path_template_parse-4.snap b/apollo-federation/src/sources/connect/snapshots/apollo_federation__sources__connect__url_template__test_parse__url_path_template_parse-4.snap new file mode 100644 index 0000000000..c537ad86a9 --- /dev/null +++ b/apollo-federation/src/sources/connect/snapshots/apollo_federation__sources__connect__url_template__test_parse__url_path_template_parse-4.snap @@ -0,0 +1,118 @@ +--- +source: apollo-federation/src/sources/connect/url_template.rs +expression: "URLTemplate::from_str(\"/location/{$this.lat},{$this.lon}\")" +--- +Ok( + URLTemplate { + base: None, + path: [ + StringTemplate { + parts: [ + Constant( + Constant { + value: "location", + location: 1..9, + }, + ), + ], + }, + StringTemplate { + parts: [ + Expression( + Expression { + expression: Path( + PathSelection { + path: WithRange { + node: Var( + WithRange { + node: $this, + range: Some( + 0..5, + ), + }, + WithRange { + node: Key( + WithRange { + node: Field( + "lat", + ), + range: Some( + 6..9, + ), + }, + WithRange { + node: Empty, + range: Some( + 9..9, + ), + }, + ), + range: Some( + 5..9, + ), + }, + ), + range: Some( + 0..9, + ), + }, + }, + ), + location: 11..20, + }, + ), + Constant( + Constant { + value: ",", + location: 21..22, + }, + ), + Expression( + Expression { + expression: Path( + PathSelection { + path: WithRange { + node: Var( + WithRange { + node: $this, + range: Some( + 0..5, + ), + }, + WithRange { + node: Key( + WithRange { + node: Field( + "lon", + ), + range: Some( + 6..9, + ), + }, + WithRange { + node: Empty, + range: Some( + 9..9, + ), + }, + ), + range: Some( + 5..9, + ), + }, + ), + range: Some( + 0..9, + ), + }, + }, + ), + location: 23..32, + }, + ), + ], + }, + ], + query: [], + }, +) diff --git a/apollo-federation/src/sources/connect/snapshots/apollo_federation__sources__connect__url_template__test_parse__url_path_template_parse.snap b/apollo-federation/src/sources/connect/snapshots/apollo_federation__sources__connect__url_template__test_parse__url_path_template_parse.snap new file mode 100644 index 0000000000..419f8ddf2d --- /dev/null +++ b/apollo-federation/src/sources/connect/snapshots/apollo_federation__sources__connect__url_template__test_parse__url_path_template_parse.snap @@ -0,0 +1,92 @@ +--- +source: apollo-federation/src/sources/connect/url_template.rs +expression: "URLTemplate::from_str(\"/users/{$config.user_id}?a=b\")" +--- +Ok( + URLTemplate { + base: None, + path: [ + StringTemplate { + parts: [ + Constant( + Constant { + value: "users", + location: 1..6, + }, + ), + ], + }, + StringTemplate { + parts: [ + Expression( + Expression { + expression: Path( + PathSelection { + path: WithRange { + node: Var( + WithRange { + node: $config, + range: Some( + 0..7, + ), + }, + WithRange { + node: Key( + WithRange { + node: Field( + "user_id", + ), + range: Some( + 8..15, + ), + }, + WithRange { + node: Empty, + range: Some( + 15..15, + ), + }, + ), + range: Some( + 7..15, + ), + }, + ), + range: Some( + 0..15, + ), + }, + }, + ), + location: 8..23, + }, + ), + ], + }, + ], + query: [ + ( + StringTemplate { + parts: [ + Constant( + Constant { + value: "a", + location: 25..26, + }, + ), + ], + }, + StringTemplate { + parts: [ + Constant( + Constant { + value: "b", + location: 27..28, + }, + ), + ], + }, + ), + ], + }, +) diff --git a/apollo-federation/src/sources/connect/snapshots/apollo_federation__sources__connect__url_template__test_parse__variable_param_key.snap b/apollo-federation/src/sources/connect/snapshots/apollo_federation__sources__connect__url_template__test_parse__variable_param_key.snap new file mode 100644 index 0000000000..958e7e9c50 --- /dev/null +++ b/apollo-federation/src/sources/connect/snapshots/apollo_federation__sources__connect__url_template__test_parse__variable_param_key.snap @@ -0,0 +1,138 @@ +--- +source: apollo-federation/src/sources/connect/url_template.rs +expression: "URLTemplate::from_str(\"?{$args.filter.field}={$args.filter.value}\")" +--- +Ok( + URLTemplate { + base: None, + path: [], + query: [ + ( + StringTemplate { + parts: [ + Expression( + Expression { + expression: Path( + PathSelection { + path: WithRange { + node: Var( + WithRange { + node: $args, + range: Some( + 0..5, + ), + }, + WithRange { + node: Key( + WithRange { + node: Field( + "filter", + ), + range: Some( + 6..12, + ), + }, + WithRange { + node: Key( + WithRange { + node: Field( + "field", + ), + range: Some( + 13..18, + ), + }, + WithRange { + node: Empty, + range: Some( + 18..18, + ), + }, + ), + range: Some( + 12..18, + ), + }, + ), + range: Some( + 5..18, + ), + }, + ), + range: Some( + 0..18, + ), + }, + }, + ), + location: 2..20, + }, + ), + ], + }, + StringTemplate { + parts: [ + Expression( + Expression { + expression: Path( + PathSelection { + path: WithRange { + node: Var( + WithRange { + node: $args, + range: Some( + 0..5, + ), + }, + WithRange { + node: Key( + WithRange { + node: Field( + "filter", + ), + range: Some( + 6..12, + ), + }, + WithRange { + node: Key( + WithRange { + node: Field( + "value", + ), + range: Some( + 13..18, + ), + }, + WithRange { + node: Empty, + range: Some( + 18..18, + ), + }, + ), + range: Some( + 12..18, + ), + }, + ), + range: Some( + 5..18, + ), + }, + ), + range: Some( + 0..18, + ), + }, + }, + ), + location: 23..41, + }, + ), + ], + }, + ), + ], + }, +) diff --git a/apollo-federation/src/sources/connect/spec/directives.rs b/apollo-federation/src/sources/connect/spec/directives.rs new file mode 100644 index 0000000000..847659a90e --- /dev/null +++ b/apollo-federation/src/sources/connect/spec/directives.rs @@ -0,0 +1,615 @@ +use apollo_compiler::ast::Directive; +use apollo_compiler::ast::Value; +use apollo_compiler::schema::Component; +use apollo_compiler::Name; +use apollo_compiler::Node; +use apollo_compiler::Schema; +use itertools::Itertools; + +use super::schema::ConnectDirectiveArguments; +use super::schema::ConnectHTTPArguments; +use super::schema::SourceDirectiveArguments; +use super::schema::SourceHTTPArguments; +use super::schema::CONNECT_BODY_ARGUMENT_NAME; +use super::schema::CONNECT_ENTITY_ARGUMENT_NAME; +use super::schema::CONNECT_SELECTION_ARGUMENT_NAME; +use super::schema::HEADERS_ARGUMENT_NAME; +use super::schema::HTTP_ARGUMENT_NAME; +use super::schema::SOURCE_BASE_URL_ARGUMENT_NAME; +use super::schema::SOURCE_NAME_ARGUMENT_NAME; +use crate::error::FederationError; +use crate::schema::position::InterfaceFieldDefinitionPosition; +use crate::schema::position::ObjectOrInterfaceFieldDefinitionPosition; +use crate::schema::position::ObjectOrInterfaceFieldDirectivePosition; +use crate::sources::connect::json_selection::JSONSelection; +use crate::sources::connect::models::Header; +use crate::sources::connect::spec::schema::CONNECT_SOURCE_ARGUMENT_NAME; +use crate::sources::connect::ObjectFieldDefinitionPosition; + +macro_rules! internal { + ($s:expr) => { + FederationError::internal($s) + }; +} + +pub(crate) fn extract_source_directive_arguments( + schema: &Schema, + name: &Name, +) -> Result, FederationError> { + schema + .schema_definition + .directives + .iter() + .filter(|directive| directive.name == *name) + .map(SourceDirectiveArguments::try_from) + .collect() +} + +pub(crate) fn extract_connect_directive_arguments( + schema: &Schema, + name: &Name, +) -> Result, FederationError> { + schema + .types + .iter() + .filter_map(|(name, ty)| match ty { + apollo_compiler::schema::ExtendedType::Object(node) => { + Some((name, &node.fields, /* is_interface */ false)) + } + apollo_compiler::schema::ExtendedType::Interface(node) => { + Some((name, &node.fields, /* is_interface */ true)) + } + _ => None, + }) + .flat_map(|(type_name, fields, is_interface)| { + fields.iter().flat_map(move |(field_name, field_def)| { + field_def + .directives + .iter() + .enumerate() + .filter(|(_, directive)| directive.name == *name) + .map(move |(i, directive)| { + let field_pos = if is_interface { + ObjectOrInterfaceFieldDefinitionPosition::Interface( + InterfaceFieldDefinitionPosition { + type_name: type_name.clone(), + field_name: field_name.clone(), + }, + ) + } else { + ObjectOrInterfaceFieldDefinitionPosition::Object( + ObjectFieldDefinitionPosition { + type_name: type_name.clone(), + field_name: field_name.clone(), + }, + ) + }; + + let position = ObjectOrInterfaceFieldDirectivePosition { + field: field_pos, + directive_name: directive.name.clone(), + directive_index: i, + }; + ConnectDirectiveArguments::from_position_and_directive(position, directive) + }) + }) + }) + .collect() +} + +/// Internal representation of the object type pairs +type ObjectNode = [(Name, Node)]; + +impl TryFrom<&Component> for SourceDirectiveArguments { + type Error = FederationError; + + fn try_from(value: &Component) -> Result { + let args = &value.arguments; + + // We'll have to iterate over the arg list and keep the properties by their name + let mut name = None; + let mut http = None; + for arg in args { + let arg_name = arg.name.as_str(); + + if arg_name == SOURCE_NAME_ARGUMENT_NAME.as_str() { + name = Some(arg.value.as_str().ok_or(internal!( + "`name` field in `@source` directive is not a string" + ))?); + } else if arg_name == HTTP_ARGUMENT_NAME.as_str() { + let http_value = arg.value.as_object().ok_or(internal!( + "`http` field in `@source` directive is not an object" + ))?; + let http_value = SourceHTTPArguments::try_from(http_value)?; + + http = Some(http_value); + } else { + return Err(internal!(format!( + "unknown argument in `@source` directive: {arg_name}" + ))); + } + } + + Ok(Self { + name: name + .ok_or(internal!("missing `name` field in `@source` directive"))? + .to_string(), + http: http.ok_or(internal!("missing `http` field in `@source` directive"))?, + }) + } +} + +impl TryFrom<&ObjectNode> for SourceHTTPArguments { + type Error = FederationError; + + fn try_from(values: &ObjectNode) -> Result { + let mut base_url = None; + let mut headers = None; + for (name, value) in values { + let name = name.as_str(); + + if name == SOURCE_BASE_URL_ARGUMENT_NAME.as_str() { + let base_url_value = value.as_str().ok_or(internal!( + "`baseURL` field in `@source` directive's `http` field is not a string" + ))?; + + base_url = Some( + base_url_value + .parse() + .map_err(|err| internal!(format!("Invalid base URL: {}", err)))?, + ); + } else if name == HEADERS_ARGUMENT_NAME.as_str() { + headers = Some( + Header::from_headers_arg(value) + .into_iter() + .map_ok(|Header { name, source, .. }| (name, source)) + .try_collect() + .map_err(|err| internal!(err.to_string()))?, + ); + } else { + return Err(internal!(format!( + "unknown argument in `@source` directive's `http` field: {name}" + ))); + } + } + + Ok(Self { + base_url: base_url.ok_or(internal!( + "missing `base_url` field in `@source` directive's `http` argument" + ))?, + headers: headers.unwrap_or_default(), + }) + } +} + +impl ConnectDirectiveArguments { + fn from_position_and_directive( + position: ObjectOrInterfaceFieldDirectivePosition, + value: &Node, + ) -> Result { + let args = &value.arguments; + + // We'll have to iterate over the arg list and keep the properties by their name + let mut source = None; + let mut http = None; + let mut selection = None; + let mut entity = None; + for arg in args { + let arg_name = arg.name.as_str(); + + if arg_name == CONNECT_SOURCE_ARGUMENT_NAME.as_str() { + let source_value = arg.value.as_str().ok_or(internal!( + "`source` field in `@source` directive is not a string" + ))?; + + source = Some(source_value); + } else if arg_name == HTTP_ARGUMENT_NAME.as_str() { + let http_value = arg.value.as_object().ok_or(internal!( + "`http` field in `@connect` directive is not an object" + ))?; + + http = Some(ConnectHTTPArguments::try_from(http_value)?); + } else if arg_name == CONNECT_SELECTION_ARGUMENT_NAME.as_str() { + let selection_value = arg.value.as_str().ok_or(internal!( + "`selection` field in `@connect` directive is not a string" + ))?; + selection = + Some(JSONSelection::parse(selection_value).map_err(|e| internal!(e.message))?); + } else if arg_name == CONNECT_ENTITY_ARGUMENT_NAME.as_str() { + let entity_value = arg.value.to_bool().ok_or(internal!( + "`entity` field in `@connect` directive is not a boolean" + ))?; + + entity = Some(entity_value); + } else { + return Err(internal!(format!( + "unknown argument in `@connect` directive: {arg_name}" + ))); + } + } + + Ok(Self { + position, + source: source.map(|s| s.to_string()), + http, + selection: selection.ok_or(internal!("`@connect` directive is missing a selection"))?, + entity: entity.unwrap_or_default(), + }) + } +} + +impl TryFrom<&ObjectNode> for ConnectHTTPArguments { + type Error = FederationError; + + fn try_from(values: &ObjectNode) -> Result { + let mut get = None; + let mut post = None; + let mut patch = None; + let mut put = None; + let mut delete = None; + let mut body = None; + let mut headers = None; + for (name, value) in values { + let name = name.as_str(); + + if name == CONNECT_BODY_ARGUMENT_NAME.as_str() { + let body_value = value.as_str().ok_or(internal!( + "`body` field in `@connect` directive's `http` field is not a string" + ))?; + body = Some(JSONSelection::parse(body_value).map_err(|e| internal!(e.message))?); + } else if name == HEADERS_ARGUMENT_NAME.as_str() { + headers = Some( + Header::from_headers_arg(value) + .into_iter() + .map_ok(|Header { name, source, .. }| (name, source)) + .try_collect() + .map_err(|err| internal!(err.to_string()))?, + ); + } else if name == "GET" { + get = Some(value.as_str().ok_or(internal!( + "supplied HTTP template URL in `@connect` directive's `http` field is not a string" + ))?.to_string()); + } else if name == "POST" { + post = Some(value.as_str().ok_or(internal!( + "supplied HTTP template URL in `@connect` directive's `http` field is not a string" + ))?.to_string()); + } else if name == "PATCH" { + patch = Some(value.as_str().ok_or(internal!( + "supplied HTTP template URL in `@connect` directive's `http` field is not a string" + ))?.to_string()); + } else if name == "PUT" { + put = Some(value.as_str().ok_or(internal!( + "supplied HTTP template URL in `@connect` directive's `http` field is not a string" + ))?.to_string()); + } else if name == "DELETE" { + delete = Some(value.as_str().ok_or(internal!( + "supplied HTTP template URL in `@connect` directive's `http` field is not a string" + ))?.to_string()); + } + } + + Ok(Self { + get, + post, + patch, + put, + delete, + body, + headers: headers.unwrap_or_default(), + }) + } +} + +#[cfg(test)] +mod tests { + use apollo_compiler::name; + use apollo_compiler::Schema; + + use crate::schema::FederationSchema; + use crate::sources::connect::spec::schema::SourceDirectiveArguments; + use crate::sources::connect::spec::schema::CONNECT_DIRECTIVE_NAME_IN_SPEC; + use crate::sources::connect::spec::schema::SOURCE_DIRECTIVE_NAME_IN_SPEC; + use crate::supergraph::extract_subgraphs_from_supergraph; + use crate::ValidFederationSubgraphs; + + static SIMPLE_SUPERGRAPH: &str = include_str!("../tests/schemas/simple.graphql"); + + fn get_subgraphs(supergraph_sdl: &str) -> ValidFederationSubgraphs { + let schema = Schema::parse(supergraph_sdl, "supergraph.graphql").unwrap(); + let supergraph_schema = FederationSchema::new(schema).unwrap(); + extract_subgraphs_from_supergraph(&supergraph_schema, Some(true)).unwrap() + } + + #[test] + fn it_parses_at_source() { + let subgraphs = get_subgraphs(SIMPLE_SUPERGRAPH); + let subgraph = subgraphs.get("connectors").unwrap(); + + let actual_definition = subgraph + .schema + .get_directive_definition(&SOURCE_DIRECTIVE_NAME_IN_SPEC) + .unwrap() + .get(subgraph.schema.schema()) + .unwrap(); + + insta::assert_snapshot!(actual_definition.to_string(), @"directive @source(name: String!, http: connect__SourceHTTP) repeatable on SCHEMA"); + + insta::assert_debug_snapshot!( + subgraph.schema + .referencers() + .get_directive(SOURCE_DIRECTIVE_NAME_IN_SPEC.as_str()) + .unwrap(), + @r###" + DirectiveReferencers { + schema: Some( + SchemaDefinitionPosition, + ), + scalar_types: {}, + object_types: {}, + object_fields: {}, + object_field_arguments: {}, + interface_types: {}, + interface_fields: {}, + interface_field_arguments: {}, + union_types: {}, + enum_types: {}, + enum_values: {}, + input_object_types: {}, + input_object_fields: {}, + directive_arguments: {}, + } + "### + ); + } + + #[test] + fn it_parses_at_connect() { + let subgraphs = get_subgraphs(SIMPLE_SUPERGRAPH); + let subgraph = subgraphs.get("connectors").unwrap(); + let schema = &subgraph.schema; + + let actual_definition = schema + .get_directive_definition(&CONNECT_DIRECTIVE_NAME_IN_SPEC) + .unwrap() + .get(schema.schema()) + .unwrap(); + + insta::assert_snapshot!( + actual_definition.to_string(), + @"directive @connect(source: String, http: connect__ConnectHTTP, selection: connect__JSONSelection!, entity: Boolean = false) repeatable on FIELD_DEFINITION" + ); + + let fields = schema + .referencers() + .get_directive(CONNECT_DIRECTIVE_NAME_IN_SPEC.as_str()) + .unwrap() + .object_fields + .iter() + .map(|f| f.get(schema.schema()).unwrap().to_string()) + .collect::>() + .join("\n"); + + insta::assert_snapshot!( + fields, + @r###" + users: [User] @connect(source: "json", http: {GET: "/users"}, selection: "id name") + posts: [Post] @connect(source: "json", http: {GET: "/posts"}, selection: "id title body") + "### + ); + } + + #[test] + fn it_extracts_at_source() { + let subgraphs = get_subgraphs(SIMPLE_SUPERGRAPH); + let subgraph = subgraphs.get("connectors").unwrap(); + let schema = &subgraph.schema; + + // Try to extract the source information from the valid schema + // TODO: This should probably be handled by the rest of the stack + let sources = schema + .referencers() + .get_directive(&SOURCE_DIRECTIVE_NAME_IN_SPEC) + .unwrap(); + + // Extract the sources from the schema definition and map them to their `Source` equivalent + let schema_directive_refs = sources.schema.as_ref().unwrap(); + let sources: Result, _> = schema_directive_refs + .get(schema.schema()) + .directives + .iter() + .filter(|directive| directive.name == SOURCE_DIRECTIVE_NAME_IN_SPEC) + .map(SourceDirectiveArguments::try_from) + .collect(); + + insta::assert_debug_snapshot!( + sources.unwrap(), + @r###" + [ + SourceDirectiveArguments { + name: "json", + http: SourceHTTPArguments { + base_url: Url { + scheme: "https", + cannot_be_a_base: false, + username: "", + password: None, + host: Some( + Domain( + "jsonplaceholder.typicode.com", + ), + ), + port: None, + path: "/", + query: None, + fragment: None, + }, + headers: { + "authtoken": From( + "x-auth-token", + ), + "user-agent": Value( + HeaderValue( + StringTemplate { + parts: [ + Constant( + Constant { + value: "Firefox", + location: 0..7, + }, + ), + ], + }, + ), + ), + }, + }, + }, + ] + "### + ); + } + + #[test] + fn it_extracts_at_connect() { + let subgraphs = get_subgraphs(SIMPLE_SUPERGRAPH); + let subgraph = subgraphs.get("connectors").unwrap(); + let schema = &subgraph.schema; + + // Extract the connects from the schema definition and map them to their `Connect` equivalent + let connects = super::extract_connect_directive_arguments(schema.schema(), &name!(connect)); + + insta::assert_debug_snapshot!( + connects.unwrap(), + @r###" + [ + ConnectDirectiveArguments { + position: ObjectOrInterfaceFieldDirectivePosition { + field: Object(Query.users), + directive_name: "connect", + directive_index: 0, + }, + source: Some( + "json", + ), + http: Some( + ConnectHTTPArguments { + get: Some( + "/users", + ), + post: None, + patch: None, + put: None, + delete: None, + body: None, + headers: {}, + }, + ), + selection: Named( + SubSelection { + selections: [ + Field( + None, + WithRange { + node: Field( + "id", + ), + range: Some( + 0..2, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "name", + ), + range: Some( + 3..7, + ), + }, + None, + ), + ], + range: Some( + 0..7, + ), + }, + ), + entity: false, + }, + ConnectDirectiveArguments { + position: ObjectOrInterfaceFieldDirectivePosition { + field: Object(Query.posts), + directive_name: "connect", + directive_index: 0, + }, + source: Some( + "json", + ), + http: Some( + ConnectHTTPArguments { + get: Some( + "/posts", + ), + post: None, + patch: None, + put: None, + delete: None, + body: None, + headers: {}, + }, + ), + selection: Named( + SubSelection { + selections: [ + Field( + None, + WithRange { + node: Field( + "id", + ), + range: Some( + 0..2, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "title", + ), + range: Some( + 3..8, + ), + }, + None, + ), + Field( + None, + WithRange { + node: Field( + "body", + ), + range: Some( + 9..13, + ), + }, + None, + ), + ], + range: Some( + 0..13, + ), + }, + ), + entity: false, + }, + ] + "### + ); + } +} diff --git a/apollo-federation/src/sources/connect/spec/mod.rs b/apollo-federation/src/sources/connect/spec/mod.rs new file mode 100644 index 0000000000..127e4f4edd --- /dev/null +++ b/apollo-federation/src/sources/connect/spec/mod.rs @@ -0,0 +1,171 @@ +// No panics allowed in this module +// The expansion code is called with potentially invalid schemas during the +// authoring process and we can't panic in the language server. +#![cfg_attr( + not(test), + deny( + clippy::exit, + clippy::panic, + clippy::unwrap_used, + clippy::expect_used, + clippy::indexing_slicing, + clippy::unimplemented, + clippy::todo + ) +)] + +mod directives; +pub(crate) mod schema; +mod type_and_directive_specifications; + +use std::fmt::Display; + +use apollo_compiler::ast::Argument; +use apollo_compiler::ast::Directive; +use apollo_compiler::ast::Value; +use apollo_compiler::name; +use apollo_compiler::Name; +use apollo_compiler::Schema; +pub(crate) use directives::extract_connect_directive_arguments; +pub(crate) use directives::extract_source_directive_arguments; +pub(crate) use schema::ConnectHTTPArguments; +pub(crate) use schema::SourceHTTPArguments; + +use self::schema::CONNECT_DIRECTIVE_NAME_IN_SPEC; +use self::schema::SOURCE_DIRECTIVE_NAME_IN_SPEC; +use crate::error::FederationError; +use crate::error::SingleFederationError; +use crate::link::spec::Identity; +use crate::link::spec::Url; +use crate::link::spec::Version; +use crate::link::spec::APOLLO_SPEC_DOMAIN; +use crate::link::Link; +use crate::schema::FederationSchema; + +/// The known versions of the connect spec +#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)] +pub enum ConnectSpec { + V0_1, +} + +impl ConnectSpec { + pub const fn as_str(&self) -> &'static str { + match self { + Self::V0_1 => "0.1", + } + } + + const IDENTITY_NAME: Name = name!("connect"); + + pub(crate) fn from_directive(directive: &Directive) -> Result, FederationError> { + let Some(url) = directive + .specified_argument_by_name("url") + .and_then(|a| a.as_str()) + else { + return Ok(None); + }; + + let url: Url = url.parse()?; + Self::identity_matches(&url.identity) + .then(|| Self::try_from(&url.version)) + .transpose() + .map_err(FederationError::from) + } + + pub(crate) fn identity_matches(identity: &Identity) -> bool { + identity.domain == APOLLO_SPEC_DOMAIN && identity.name == Self::IDENTITY_NAME + } + + pub(crate) fn identity() -> Identity { + Identity { + domain: APOLLO_SPEC_DOMAIN.to_string(), + name: Self::IDENTITY_NAME, + } + } + + pub(crate) fn url(&self) -> Url { + Url { + identity: Self::identity(), + version: (*self).into(), + } + } + + pub(crate) fn get_from_schema(schema: &Schema) -> Option<(Self, Link)> { + let (link, _) = Link::for_identity(schema, &Self::identity())?; + Self::try_from(&link.url.version) + .ok() + .map(|spec| (spec, link)) + } + + pub(crate) fn check_or_add(schema: &mut FederationSchema) -> Result<(), FederationError> { + let Some(link) = schema + .metadata() + .and_then(|metadata| metadata.for_identity(&Self::identity())) + else { + return Ok(()); + }; + + type_and_directive_specifications::check_or_add(&link, schema) + } + + pub(crate) fn source_directive_name(link: &Link) -> Name { + link.directive_name_in_schema(&SOURCE_DIRECTIVE_NAME_IN_SPEC) + } + + pub(crate) fn connect_directive_name(link: &Link) -> Name { + link.directive_name_in_schema(&CONNECT_DIRECTIVE_NAME_IN_SPEC) + } + + pub(crate) fn join_directive_application(&self) -> Directive { + Directive { + name: name!(join__directive), + arguments: vec![ + Argument { + name: name!("graphs"), + value: Value::List(vec![]).into(), + } + .into(), + Argument { + name: name!("name"), + value: Value::String("link".to_string()).into(), + } + .into(), + Argument { + name: name!("args"), + value: Value::Object(vec![( + name!("url"), + Value::String(self.url().to_string()).into(), + )]) + .into(), + } + .into(), + ], + } + } +} + +impl TryFrom<&Version> for ConnectSpec { + type Error = SingleFederationError; + fn try_from(version: &Version) -> Result { + match (version.major, version.minor) { + (0, 1) => Ok(Self::V0_1), + _ => Err(SingleFederationError::UnknownLinkVersion { + message: format!("Unknown connect version: {version}"), + }), + } + } +} + +impl Display for ConnectSpec { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.write_str(self.as_str()) + } +} + +impl From for Version { + fn from(spec: ConnectSpec) -> Self { + match spec { + ConnectSpec::V0_1 => Version { major: 0, minor: 1 }, + } + } +} diff --git a/apollo-federation/src/sources/connect/spec/schema.rs b/apollo-federation/src/sources/connect/spec/schema.rs new file mode 100644 index 0000000000..8759b22a2b --- /dev/null +++ b/apollo-federation/src/sources/connect/spec/schema.rs @@ -0,0 +1,115 @@ +use apollo_compiler::collections::IndexMap; +use apollo_compiler::name; +use apollo_compiler::Name; +use http::HeaderName; +use url::Url; + +use crate::schema::position::ObjectOrInterfaceFieldDirectivePosition; +use crate::sources::connect::json_selection::JSONSelection; +use crate::sources::connect::HeaderSource; + +pub(crate) const CONNECT_DIRECTIVE_NAME_IN_SPEC: Name = name!("connect"); +pub(crate) const CONNECT_SOURCE_ARGUMENT_NAME: Name = name!("source"); +pub(crate) const CONNECT_HTTP_ARGUMENT_GET_METHOD_NAME: Name = name!("GET"); +pub(crate) const CONNECT_HTTP_ARGUMENT_POST_METHOD_NAME: Name = name!("POST"); +pub(crate) const CONNECT_HTTP_ARGUMENT_PUT_METHOD_NAME: Name = name!("PUT"); +pub(crate) const CONNECT_HTTP_ARGUMENT_PATCH_METHOD_NAME: Name = name!("PATCH"); +pub(crate) const CONNECT_HTTP_ARGUMENT_DELETE_METHOD_NAME: Name = name!("DELETE"); +pub(crate) const CONNECT_SELECTION_ARGUMENT_NAME: Name = name!("selection"); +pub(crate) const CONNECT_ENTITY_ARGUMENT_NAME: Name = name!("entity"); + +pub(crate) const CONNECT_HTTP_NAME_IN_SPEC: Name = name!("ConnectHTTP"); +pub(crate) const CONNECT_BODY_ARGUMENT_NAME: Name = name!("body"); + +pub(crate) const SOURCE_DIRECTIVE_NAME_IN_SPEC: Name = name!("source"); +pub(crate) const SOURCE_NAME_ARGUMENT_NAME: Name = name!("name"); + +pub(crate) const SOURCE_HTTP_NAME_IN_SPEC: Name = name!("SourceHTTP"); +pub(crate) const SOURCE_BASE_URL_ARGUMENT_NAME: Name = name!("baseURL"); +pub(crate) const HTTP_ARGUMENT_NAME: Name = name!("http"); +pub(crate) const HEADERS_ARGUMENT_NAME: Name = name!("headers"); + +pub(crate) const HTTP_HEADER_MAPPING_NAME_IN_SPEC: Name = name!("HTTPHeaderMapping"); +pub(crate) const HTTP_HEADER_MAPPING_NAME_ARGUMENT_NAME: Name = name!("name"); +pub(crate) const HTTP_HEADER_MAPPING_FROM_ARGUMENT_NAME: Name = name!("from"); +pub(crate) const HTTP_HEADER_MAPPING_VALUE_ARGUMENT_NAME: Name = name!("value"); + +pub(crate) const JSON_SELECTION_SCALAR_NAME: Name = name!("JSONSelection"); +pub(crate) const URL_PATH_TEMPLATE_SCALAR_NAME: Name = name!("URLTemplate"); + +/// Arguments to the `@source` directive +/// +/// Refer to [SourceSpecDefinition] for more info. +#[cfg_attr(test, derive(Debug))] +pub(crate) struct SourceDirectiveArguments { + /// The friendly name of this source for use in `@connect` directives + pub(crate) name: String, + + /// Common HTTP options + pub(crate) http: SourceHTTPArguments, +} + +/// Common HTTP options for a connector [SourceSpecDefinition] +#[cfg_attr(test, derive(Debug))] +pub(crate) struct SourceHTTPArguments { + /// The base URL containing all sub API endpoints + pub(crate) base_url: Url, + + /// HTTP headers used when requesting resources from the upstream source. + /// Can be overridden by name with headers in a @connect directive. + pub(crate) headers: IndexMap, +} + +/// Arguments to the `@connect` directive +/// +/// Refer to [ConnectSpecDefinition] for more info. +#[cfg_attr(test, derive(Debug))] +pub(crate) struct ConnectDirectiveArguments { + pub(crate) position: ObjectOrInterfaceFieldDirectivePosition, + + /// The upstream source for shared connector configuration. + /// + /// Must match the `name` argument of a @source directive in this schema. + pub(crate) source: Option, + + /// HTTP options for this connector + /// + /// Marked as optional in the GraphQL schema to allow for future transports, + /// but is currently required. + pub(crate) http: Option, + + /// Fields to extract from the upstream JSON response. + /// + /// Uses the JSONSelection syntax to define a mapping of connector response to + /// GraphQL schema. + pub(crate) selection: JSONSelection, + + /// Entity resolver marker + /// + /// Marks this connector as a canonical resolver for an entity (uniquely + /// identified domain model.) If true, the connector must be defined on a field + /// of the Query type. + pub(crate) entity: bool, +} + +/// The HTTP arguments needed for a connect request +#[cfg_attr(test, derive(Debug))] +pub(crate) struct ConnectHTTPArguments { + pub(crate) get: Option, + pub(crate) post: Option, + pub(crate) patch: Option, + pub(crate) put: Option, + pub(crate) delete: Option, + + /// Request body + /// + /// Define a request body using JSONSelection. Selections can include values from + /// field arguments using `$args.argName` and from fields on the parent type using + /// `$this.fieldName`. + pub(crate) body: Option, + + /// Configuration for headers to attach to the request. + /// + /// Overrides headers from the associated @source by name. + pub(crate) headers: IndexMap, +} diff --git a/apollo-federation/src/sources/connect/spec/type_and_directive_specifications.rs b/apollo-federation/src/sources/connect/spec/type_and_directive_specifications.rs new file mode 100644 index 0000000000..430568e2a6 --- /dev/null +++ b/apollo-federation/src/sources/connect/spec/type_and_directive_specifications.rs @@ -0,0 +1,428 @@ +use apollo_compiler::ast::DirectiveLocation; +use apollo_compiler::ast::InputValueDefinition; +use apollo_compiler::ast::Type; +use apollo_compiler::ast::Value; +use apollo_compiler::collections::IndexMap; +use apollo_compiler::name; +use apollo_compiler::schema::Component; +use apollo_compiler::schema::InputObjectType; +use apollo_compiler::ty; + +use super::schema::CONNECT_DIRECTIVE_NAME_IN_SPEC; +use super::schema::CONNECT_ENTITY_ARGUMENT_NAME; +use super::schema::CONNECT_HTTP_NAME_IN_SPEC; +use super::schema::CONNECT_SELECTION_ARGUMENT_NAME; +use super::schema::CONNECT_SOURCE_ARGUMENT_NAME; +use super::schema::HEADERS_ARGUMENT_NAME; +use super::schema::HTTP_ARGUMENT_NAME; +use super::schema::HTTP_HEADER_MAPPING_NAME_IN_SPEC; +use super::schema::JSON_SELECTION_SCALAR_NAME; +use super::schema::SOURCE_DIRECTIVE_NAME_IN_SPEC; +use super::schema::SOURCE_HTTP_NAME_IN_SPEC; +use super::schema::SOURCE_NAME_ARGUMENT_NAME; +use super::schema::URL_PATH_TEMPLATE_SCALAR_NAME; +use crate::error::FederationError; +use crate::error::SingleFederationError; +use crate::link::Link; +use crate::schema::position::InputObjectTypeDefinitionPosition; +use crate::schema::type_and_directive_specification::ArgumentSpecification; +use crate::schema::type_and_directive_specification::DirectiveArgumentSpecification; +use crate::schema::type_and_directive_specification::DirectiveSpecification; +use crate::schema::type_and_directive_specification::ScalarTypeSpecification; +use crate::schema::type_and_directive_specification::TypeAndDirectiveSpecification; +use crate::schema::FederationSchema; +use crate::sources::connect::spec::schema::CONNECT_BODY_ARGUMENT_NAME; +use crate::sources::connect::spec::schema::HTTP_HEADER_MAPPING_FROM_ARGUMENT_NAME; +use crate::sources::connect::spec::schema::HTTP_HEADER_MAPPING_NAME_ARGUMENT_NAME; +use crate::sources::connect::spec::schema::HTTP_HEADER_MAPPING_VALUE_ARGUMENT_NAME; +use crate::sources::connect::spec::schema::SOURCE_BASE_URL_ARGUMENT_NAME; +use crate::sources::connect::spec::ConnectSpec; + +pub(super) fn check_or_add( + link: &Link, + schema: &mut FederationSchema, +) -> Result<(), FederationError> { + // the `get_type` closure expects a SingleFederationError, so we can't + // use FederationError::internal() + macro_rules! internal { + ($s:expr) => { + SingleFederationError::Internal { + message: $s.to_string(), + } + }; + } + + // scalar JSONSelection + let json_selection_spec = ScalarTypeSpecification { + name: link.type_name_in_schema(&JSON_SELECTION_SCALAR_NAME), + }; + + // scalar URLTemplate + let url_path_template_spec = ScalarTypeSpecification { + name: link.type_name_in_schema(&URL_PATH_TEMPLATE_SCALAR_NAME), + }; + + // ------------------------------------------------------------------------- + let http_header_mapping_field_list = vec![ + InputValueDefinition { + description: None, + name: HTTP_HEADER_MAPPING_NAME_ARGUMENT_NAME.clone(), + ty: ty!(String!).into(), + default_value: None, + directives: Default::default(), + }, + InputValueDefinition { + description: None, + name: HTTP_HEADER_MAPPING_FROM_ARGUMENT_NAME.clone(), + ty: ty!(String).into(), + default_value: None, + directives: Default::default(), + }, + InputValueDefinition { + description: None, + name: HTTP_HEADER_MAPPING_VALUE_ARGUMENT_NAME.clone(), + ty: ty!([String!]).into(), + default_value: None, + directives: Default::default(), + }, + ]; + + let mut http_header_mapping_fields = IndexMap::with_hasher(Default::default()); + for field in http_header_mapping_field_list { + http_header_mapping_fields.insert(field.name.clone(), Component::new(field)); + } + + // input HTTPHeaderMapping { + // name: String! + // as: String + // value: [String!] + // } + let http_header_mapping = InputObjectType { + description: None, + name: link.type_name_in_schema(&HTTP_HEADER_MAPPING_NAME_IN_SPEC), + directives: Default::default(), + fields: http_header_mapping_fields, + }; + + let http_header_mapping_pos = InputObjectTypeDefinitionPosition { + type_name: http_header_mapping.name.clone(), + }; + + // ------------------------------------------------------------------------- + + let connect_http_field_list = vec![ + InputValueDefinition { + description: None, + name: name!(GET), + ty: Type::Named(url_path_template_spec.name.clone()).into(), + default_value: None, + directives: Default::default(), + }, + InputValueDefinition { + description: None, + name: name!(POST), + ty: Type::Named(url_path_template_spec.name.clone()).into(), + default_value: None, + directives: Default::default(), + }, + InputValueDefinition { + description: None, + name: name!(PUT), + ty: Type::Named(url_path_template_spec.name.clone()).into(), + default_value: None, + directives: Default::default(), + }, + InputValueDefinition { + description: None, + name: name!(PATCH), + ty: Type::Named(url_path_template_spec.name.clone()).into(), + default_value: None, + directives: Default::default(), + }, + InputValueDefinition { + description: None, + name: name!(DELETE), + ty: Type::Named(url_path_template_spec.name.clone()).into(), + default_value: None, + directives: Default::default(), + }, + InputValueDefinition { + description: None, + name: CONNECT_BODY_ARGUMENT_NAME.clone(), + ty: Type::Named(json_selection_spec.name.clone()).into(), + default_value: None, + directives: Default::default(), + }, + InputValueDefinition { + description: None, + name: HEADERS_ARGUMENT_NAME.clone(), + ty: Type::List(Box::new(Type::NonNullNamed( + http_header_mapping.name.clone(), + ))) + .into(), + default_value: None, + directives: Default::default(), + }, + ]; + + let mut connect_http_fields = IndexMap::with_hasher(Default::default()); + for field in connect_http_field_list { + connect_http_fields.insert(field.name.clone(), Component::new(field)); + } + + let connect_http = InputObjectType { + name: link.type_name_in_schema(&CONNECT_HTTP_NAME_IN_SPEC), + description: None, + directives: Default::default(), + fields: connect_http_fields, + }; + + let connect_http_pos = InputObjectTypeDefinitionPosition { + type_name: connect_http.name.clone(), + }; + + // ------------------------------------------------------------------------- + + // directive @connect( + // source: String + // http: ConnectHTTP + // selection: JSONSelection! + // entity: Boolean = false + // ) repeatable on FIELD_DEFINITION + let connect_spec = DirectiveSpecification::new( + link.directive_name_in_schema(&CONNECT_DIRECTIVE_NAME_IN_SPEC), + &[ + DirectiveArgumentSpecification { + base_spec: ArgumentSpecification { + name: CONNECT_SOURCE_ARGUMENT_NAME.clone(), + get_type: |_| Ok(ty!(String)), + default_value: None, + }, + composition_strategy: None, + }, + DirectiveArgumentSpecification { + base_spec: ArgumentSpecification { + name: HTTP_ARGUMENT_NAME.clone(), + get_type: |s| { + let name = s + .metadata() + .ok_or_else(|| internal!("missing metadata"))? + .for_identity(&ConnectSpec::identity()) + .ok_or_else(|| internal!("missing connect spec"))? + .type_name_in_schema(&CONNECT_HTTP_NAME_IN_SPEC); + Ok(Type::Named(name)) + }, + default_value: None, + }, + composition_strategy: None, + }, + DirectiveArgumentSpecification { + base_spec: ArgumentSpecification { + name: CONNECT_SELECTION_ARGUMENT_NAME.clone(), + get_type: |s| { + let name = s + .metadata() + .ok_or_else(|| internal!("missing metadata"))? + .for_identity(&ConnectSpec::identity()) + .ok_or_else(|| internal!("missing connect spec"))? + .type_name_in_schema(&JSON_SELECTION_SCALAR_NAME); + Ok(Type::NonNullNamed(name)) + }, + default_value: None, + }, + composition_strategy: None, + }, + DirectiveArgumentSpecification { + base_spec: ArgumentSpecification { + name: CONNECT_ENTITY_ARGUMENT_NAME.clone(), + get_type: |_| Ok(Type::Named(name!(Boolean))), + default_value: Some(Value::Boolean(false)), + }, + composition_strategy: None, + }, + ], + true, + &[DirectiveLocation::FieldDefinition], + false, + None, + ); + + // ------------------------------------------------------------------------- + + let source_http_field_list = vec![ + InputValueDefinition { + description: None, + name: SOURCE_BASE_URL_ARGUMENT_NAME.clone(), + ty: ty!(String!).into(), + default_value: None, + directives: Default::default(), + }, + InputValueDefinition { + description: None, + name: HEADERS_ARGUMENT_NAME.clone(), + ty: Type::List(Box::new(Type::NonNullNamed( + http_header_mapping.name.clone(), + ))) + .into(), + default_value: None, + directives: Default::default(), + }, + ]; + + let mut source_http_fields = IndexMap::with_hasher(Default::default()); + for field in source_http_field_list { + source_http_fields.insert(field.name.clone(), Component::new(field)); + } + + // input SourceHTTP { + // baseURL: String! + // headers: [HTTPHeaderMapping!] + // } + let source_http_spec = InputObjectType { + name: link.type_name_in_schema(&SOURCE_HTTP_NAME_IN_SPEC), + description: None, + directives: Default::default(), + fields: source_http_fields, + }; + + let source_http_pos = InputObjectTypeDefinitionPosition { + type_name: source_http_spec.name.clone(), + }; + + // ------------------------------------------------------------------------- + + // directive @source( + // name: String! + // http: SourceHTTP + // ) repeatable on SCHEMA + let source_spec = DirectiveSpecification::new( + link.directive_name_in_schema(&SOURCE_DIRECTIVE_NAME_IN_SPEC), + &[ + DirectiveArgumentSpecification { + base_spec: ArgumentSpecification { + name: SOURCE_NAME_ARGUMENT_NAME.clone(), + get_type: |_| Ok(ty!(String!)), + default_value: None, + }, + composition_strategy: None, + }, + DirectiveArgumentSpecification { + base_spec: ArgumentSpecification { + name: HTTP_ARGUMENT_NAME.clone(), + get_type: |s| { + let name = s + .metadata() + .ok_or_else(|| internal!("missing metadata"))? + .for_identity(&ConnectSpec::identity()) + .ok_or_else(|| internal!("missing connect spec"))? + .type_name_in_schema(&SOURCE_HTTP_NAME_IN_SPEC); + Ok(Type::Named(name)) + }, + default_value: None, + }, + composition_strategy: None, + }, + ], + true, + &[DirectiveLocation::Schema], + false, + None, + ); + + json_selection_spec.check_or_add(schema)?; + url_path_template_spec.check_or_add(schema)?; + http_header_mapping_pos.pre_insert(schema)?; + http_header_mapping_pos.insert(schema, http_header_mapping.into())?; + + connect_http_pos.pre_insert(schema)?; + connect_http_pos.insert(schema, connect_http.into())?; + connect_spec.check_or_add(schema)?; + + source_http_pos.pre_insert(schema)?; + source_http_pos.insert(schema, source_http_spec.into())?; + source_spec.check_or_add(schema)?; + + Ok(()) +} + +#[cfg(test)] +mod tests { + use apollo_compiler::Schema; + use insta::assert_snapshot; + + use super::check_or_add; + use crate::schema::FederationSchema; + use crate::sources::connect::spec::ConnectSpec; + + #[test] + fn test() { + let schema = Schema::parse(r#" + type Query { hello: String } + extend schema + @link(url: "https://specs.apollo.dev/link/v1.0") + @link(url: "https://specs.apollo.dev/connect/v0.1", import: ["@source"]) + directive @link(url: String, as: String, for: link__Purpose, import: [link__Import]) repeatable on SCHEMA + enum link__Purpose { SECURITY EXECUTION } + scalar link__Import + "#, "schema.graphql").unwrap(); + + let mut federation_schema = FederationSchema::new(schema).unwrap(); + let link = federation_schema + .metadata() + .unwrap() + .for_identity(&ConnectSpec::identity()) + .unwrap(); + + check_or_add(&link, &mut federation_schema).unwrap(); + + assert_snapshot!(federation_schema.schema().serialize().to_string(), @r###" + schema { + query: Query + } + + extend schema @link(url: "https://specs.apollo.dev/link/v1.0") @link(url: "https://specs.apollo.dev/connect/v0.1", import: ["@source"]) + + directive @link(url: String, as: String, for: link__Purpose, import: [link__Import]) repeatable on SCHEMA + + directive @connect(source: String, http: connect__ConnectHTTP, selection: connect__JSONSelection!, entity: Boolean = false) repeatable on FIELD_DEFINITION + + directive @source(name: String!, http: connect__SourceHTTP) repeatable on SCHEMA + + type Query { + hello: String + } + + enum link__Purpose { + SECURITY + EXECUTION + } + + scalar link__Import + + scalar connect__JSONSelection + + scalar connect__URLTemplate + + input connect__HTTPHeaderMapping { + name: String! + from: String + value: [String!] + } + + input connect__ConnectHTTP { + GET: connect__URLTemplate + POST: connect__URLTemplate + PUT: connect__URLTemplate + PATCH: connect__URLTemplate + DELETE: connect__URLTemplate + body: connect__JSONSelection + headers: [connect__HTTPHeaderMapping!] + } + + input connect__SourceHTTP { + baseURL: String! + headers: [connect__HTTPHeaderMapping!] + } + "###); + } +} diff --git a/apollo-federation/src/sources/connect/string_template.rs b/apollo-federation/src/sources/connect/string_template.rs new file mode 100644 index 0000000000..8142d37888 --- /dev/null +++ b/apollo-federation/src/sources/connect/string_template.rs @@ -0,0 +1,395 @@ +//! A [`StringTemplate`] is a string containing one or more [`Expression`]s. +//! These are used in connector URIs and headers. +//! +//! Parsing (this module) is done by both the router at startup and composition. Validation +//! (in [`crate::sources::connect::validation`]) is done only by composition. + +use std::fmt::Display; +use std::ops::Range; +use std::str::FromStr; + +use apollo_compiler::collections::IndexMap; +use itertools::Itertools; +use serde_json_bytes::Value; + +use crate::sources::connect::JSONSelection; + +/// A parsed string template, containing a series of [`Part`]s. +/// +/// The `Const` generic allows consumers to validate constant pieces of the string with a type of +/// their choice. This is specifically just [`http::HeaderValue`] for headers right now. +#[derive(Clone, Debug)] +pub struct StringTemplate { + pub(crate) parts: Vec>, +} +impl StringTemplate { + /// Parse a [`StringTemplate`]. If this template is nested within another string, provide an + /// `offset` to correct the locations. + /// + /// TODO: Remove the `offset` param once `URLTemplate` can leverage this more directly. + pub(crate) fn parse(input: &str, mut offset: usize) -> Result { + let mut chars = input.chars().peekable(); + let mut parts = Vec::new(); + while let Some(next) = chars.peek() { + if *next == '{' { + let mut braces_count = 0; // Ignore braces within JSONSelection + let expression = chars + .by_ref() + .skip(1) + .take_while(|c| { + if *c == '{' { + braces_count += 1; + } else if *c == '}' { + braces_count -= 1; + } + braces_count >= 0 + }) + .collect::(); + if braces_count >= 0 { + return Err(Error { + message: "Invalid expression, missing closing }".into(), + location: offset..input.len(), + }); + } + offset += 1; // Account for opening brace + let parsed = JSONSelection::parse(&expression).map_err(|err| { + let start_of_parse_error = offset + err.offset; + Error { + message: err.message, + location: start_of_parse_error..(offset + expression.len()), + } + })?; + parts.push(Part::Expression(Expression { + expression: parsed, + location: offset..(offset + expression.len()), + })); + offset += expression.len() + 1; // Account for closing brace + } else { + let constant = chars + .by_ref() + .peeking_take_while(|c| *c != '{') + .collect::(); + let value = Const::from_str(&constant).map_err(|_unhelpful_err| Error { + message: format!("invalid value `{constant}`"), + location: offset..offset + constant.len(), + })?; + parts.push(Part::Constant(Constant { + value, + location: offset..offset + constant.len(), + })); + offset += constant.len(); + } + } + Ok(Self { parts }) + } + + /// Get all the dynamic [`Expression`] pieces of the template for validation. If interpolating + /// the entire template, use [`Self::interpolate`] instead. + pub(crate) fn expressions(&self) -> impl Iterator { + self.parts.iter().filter_map(|part| { + if let Part::Expression(expression) = part { + Some(expression) + } else { + None + } + }) + } +} + +impl StringTemplate { + /// Interpolation for when the constant type is a string. This can't be implemented for + /// arbitrary generic types, so non-string consumers (headers) implement this themselves with + /// any additional validations/transformations they need. + pub(crate) fn interpolate(&self, vars: &IndexMap) -> Result { + self.parts + .iter() + .map(|part| part.interpolate(vars)) + .collect() + } +} + +/// Expressions should be written the same as they were originally, even though we don't keep the +/// original source around. So constants are written as-is and expressions are surrounded with `{ }`. +impl Display for StringTemplate { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + for part in &self.parts { + match part { + Part::Constant(Constant { value, .. }) => write!(f, "{}", value)?, + Part::Expression(Expression { expression, .. }) => write!(f, "{{{}}}", expression)?, + } + } + Ok(()) + } +} + +/// A general-purpose error type which includes both a description of the problem and the offset span +/// within the original expression where the problem occurred. Used for both parsing and interpolation. +#[derive(Debug, PartialEq)] +pub struct Error { + /// A human-readable description of the issue. + pub message: String, + /// The string offsets to the original [`StringTemplate`] (not just the part) where the issue + /// occurred. As per usual, the end of the range is exclusive. + pub(crate) location: Range, +} + +impl Display for Error { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.message) + } +} + +impl std::error::Error for Error {} + +/// One piece of a [`StringTemplate`] +#[derive(Clone, Debug)] +pub(crate) enum Part { + /// A constant string literal—the piece of a [`StringTemplate`] _not_ in `{ }` + Constant(Constant), + /// A dynamic piece of a [`StringTemplate`], which came from inside `{ }` originally. + Expression(Expression), +} + +impl Part { + /// Get the original location of the part from the string which was parsed to form the + /// [`StringTemplate`]. + fn location(&self) -> Range { + match self { + Self::Constant(c) => c.location.clone(), + Self::Expression(e) => e.location.clone(), + } + } +} + +/// These generics are a bit of a mess, but what they're saying is given a generic `Const` type, +/// which again is `String` for the main use case but specialized occasionally (like [`http::HeaderValue`] for headers), +/// we can interpolate the value of the part into that type. +/// +/// For [`Constant`]s this is easy, just clone the value (thus `Const: Clone`). +/// +/// For [`Expression`]s, we first need to interpolate the expression as normal (with [`ApplyTo`]), +/// and then convert the resulting [`Value`] into the `Const` type. For that we require both +/// `Const: FromStr` and `Const: TryFrom` so we don't have to clone all `&str` into `String`s, +/// nor borrow `String` just for them to be re-allocated. The `FromStrErr` and `TryFromStringError` +/// are then required to capture the error types of those two conversion methods. +/// +/// So for `Const = String` these are actually all no-ops with infallible conversions, but we allow +/// for [`http::HeaderValue`] to fail. +impl Part +where + Const: Clone, + Const: FromStr, + FromStrErr: std::error::Error, + Const: TryFrom, + TryFromStringError: std::error::Error, +{ + pub(crate) fn interpolate(&self, vars: &IndexMap) -> Result { + match self { + Part::Constant(Constant { value, .. }) => Ok(value.clone()), + Part::Expression(Expression { expression, .. }) => { + // TODO: do something with the ApplyTo errors + let (value, _errs) = expression.apply_with_vars(&Value::Null, vars); + + match value.unwrap_or(Value::Null) { + Value::Null => Const::from_str("").map_err(|err| Error { + message: err.to_string(), + location: self.location(), + }), + Value::Bool(b) => Const::try_from(b.to_string()).map_err(|err| Error { + message: err.to_string(), + location: self.location(), + }), + Value::Number(n) => Const::try_from(n.to_string()).map_err(|err| Error { + message: err.to_string(), + location: self.location(), + }), + Value::String(s) => Const::from_str(s.as_str()).map_err(|err| Error { + message: err.to_string(), + location: self.location(), + }), + Value::Array(_) | Value::Object(_) => Err(Error { + message: "Expressions can't evaluate to arrays or objects.".to_string(), + location: self.location(), + }), + } + } + } + } +} + +/// A constant string literal—the piece of a [`StringTemplate`] _not_ in `{ }` +#[derive(Clone, Debug)] +pub(crate) struct Constant { + value: T, + location: Range, +} + +/// A dynamic piece of a [`StringTemplate`], which came from inside `{ }` originally. +#[derive(Clone, Debug)] +pub(crate) struct Expression { + pub(crate) expression: JSONSelection, + pub(crate) location: Range, +} + +#[cfg(test)] +mod test_parse { + use insta::assert_debug_snapshot; + + use super::*; + + #[test] + fn simple_constant() { + let template = + StringTemplate::::parse("text", 0).expect("simple template should be valid"); + assert_debug_snapshot!(template); + } + + #[test] + fn simple_expression() { + assert_debug_snapshot!(StringTemplate::::parse("{$config.one}", 0).unwrap()); + } + #[test] + fn mixed_constant_and_expression() { + assert_debug_snapshot!(StringTemplate::::parse("text{$config.one}text", 0).unwrap()); + } + + #[test] + fn offset() { + assert_debug_snapshot!(StringTemplate::::parse("text{$config.one}text", 9).unwrap()); + } + + #[test] + fn expressions_with_nested_braces() { + assert_debug_snapshot!(StringTemplate::::parse( + "const{$config.one { two { three } }}another-const", + 0 + ) + .unwrap()); + } + + #[test] + fn missing_closing_braces() { + assert_debug_snapshot!( + StringTemplate::::parse("{$config.one", 0), + @r###" + Err( + Error { + message: "Invalid expression, missing closing }", + location: 0..12, + }, + ) + "### + ) + } +} + +#[cfg(test)] +mod test_interpolate { + use insta::assert_debug_snapshot; + use pretty_assertions::assert_eq; + use serde_json_bytes::json; + + use super::*; + #[test] + fn test_interpolate() { + let template = StringTemplate::::parse("before {$config.one} after", 0).unwrap(); + let mut vars = IndexMap::default(); + vars.insert("$config".to_string(), json!({"one": "foo"})); + assert_eq!(template.interpolate(&vars).unwrap(), "before foo after"); + } + + #[test] + fn test_interpolate_missing_value() { + let template = StringTemplate::::parse("{$config.one}", 0).unwrap(); + let vars = IndexMap::default(); + assert_eq!(template.interpolate(&vars).unwrap(), ""); + } + + #[test] + fn test_interpolate_value_array() { + let template = StringTemplate::::parse("{$config.one}", 0).unwrap(); + let mut vars = IndexMap::default(); + vars.insert("$config".to_string(), json!({"one": ["one", "two"]})); + assert_debug_snapshot!( + template.interpolate(&vars), + @r###" + Err( + Error { + message: "Expressions can't evaluate to arrays or objects.", + location: 1..12, + }, + ) + "### + ); + } + + #[test] + fn test_interpolate_value_bool() { + let template = StringTemplate::::parse("{$config.one}", 0).unwrap(); + let mut vars = IndexMap::default(); + vars.insert("$config".to_string(), json!({"one": true})); + assert_eq!(template.interpolate(&vars).unwrap(), "true"); + } + + #[test] + fn test_interpolate_value_null() { + let template = StringTemplate::::parse("{$config.one}", 0).unwrap(); + let mut vars = IndexMap::default(); + vars.insert("$config".to_string(), json!({"one": null})); + assert_eq!(template.interpolate(&vars).unwrap(), ""); + } + + #[test] + fn test_interpolate_value_number() { + let template = StringTemplate::::parse("{$config.one}", 0).unwrap(); + let mut vars = IndexMap::default(); + vars.insert("$config".to_string(), json!({"one": 1})); + assert_eq!(template.interpolate(&vars).unwrap(), "1"); + } + + #[test] + fn test_interpolate_value_object() { + let template = StringTemplate::::parse("{$config.one}", 0).unwrap(); + let mut vars = IndexMap::default(); + vars.insert("$config".to_string(), json!({"one": {}})); + assert_debug_snapshot!( + template.interpolate(&vars), + @r###" + Err( + Error { + message: "Expressions can't evaluate to arrays or objects.", + location: 1..12, + }, + ) + "### + ); + } + + #[test] + fn test_interpolate_value_string() { + let template = StringTemplate::::parse("{$config.one}", 0).unwrap(); + let mut vars = IndexMap::default(); + vars.insert("$config".to_string(), json!({"one": "string"})); + assert_eq!(template.interpolate(&vars).unwrap(), "string"); + } +} + +#[cfg(test)] +mod test_get_expressions { + use super::*; + + #[test] + fn test_variable_references() { + let value = + StringTemplate::::parse("a {$this.a.b.c} b {$args.a.b.c} c {$config.a.b.c}", 0) + .unwrap(); + let references: Vec<_> = value + .expressions() + .map(|e| e.expression.to_string()) + .collect(); + assert_eq!( + references, + vec!["$this.a.b.c", "$args.a.b.c", "$config.a.b.c"] + ); + } +} diff --git a/apollo-federation/src/sources/connect/tests/schemas/simple.graphql b/apollo-federation/src/sources/connect/tests/schemas/simple.graphql new file mode 100644 index 0000000000..c17ccb5b15 --- /dev/null +++ b/apollo-federation/src/sources/connect/tests/schemas/simple.graphql @@ -0,0 +1,125 @@ +schema + @link(url: "https://specs.apollo.dev/link/v1.0") + @link(url: "https://specs.apollo.dev/join/v0.4", for: EXECUTION) + @join__directive( + graphs: [CONNECTORS] + name: "link" + args: { + url: "https://specs.apollo.dev/connect/v0.1" + import: ["@connect", "@source"] + } + ) + @join__directive( + graphs: [CONNECTORS] + name: "source" + args: { + name: "json" + http: { + baseURL: "https://jsonplaceholder.typicode.com/" + headers: [ + { name: "AuthToken", from: "X-Auth-Token" } + { name: "user-agent", value: "Firefox" } + ] + } + } + ) { + query: Query +} + +directive @join__directive( + graphs: [join__Graph!] + name: String! + args: join__DirectiveArguments +) repeatable on SCHEMA | OBJECT | INTERFACE | FIELD_DEFINITION + +directive @join__enumValue(graph: join__Graph!) repeatable on ENUM_VALUE + +directive @join__field( + graph: join__Graph + requires: join__FieldSet + provides: join__FieldSet + type: String + external: Boolean + override: String + usedOverridden: Boolean + overrideLabel: String +) repeatable on FIELD_DEFINITION | INPUT_FIELD_DEFINITION + +directive @join__graph(name: String!, url: String!) on ENUM_VALUE + +directive @join__implements( + graph: join__Graph! + interface: String! +) repeatable on OBJECT | INTERFACE + +directive @join__type( + graph: join__Graph! + key: join__FieldSet + extension: Boolean! = false + resolvable: Boolean! = true + isInterfaceObject: Boolean! = false +) repeatable on OBJECT | INTERFACE | UNION | ENUM | INPUT_OBJECT | SCALAR + +directive @join__unionMember( + graph: join__Graph! + member: String! +) repeatable on UNION + +directive @link( + url: String + as: String + for: link__Purpose + import: [link__Import] +) repeatable on SCHEMA + +scalar join__DirectiveArguments + +scalar join__FieldSet + +enum join__Graph { + CONNECTORS @join__graph(name: "connectors", url: "http://unused") +} + +scalar link__Import + +enum link__Purpose { + """ + `SECURITY` features provide metadata necessary to securely resolve fields. + """ + SECURITY + + """ + `EXECUTION` features provide metadata necessary for operation execution. + """ + EXECUTION +} + +type Post @join__type(graph: CONNECTORS) { + id: ID! + title: String + body: String +} + +type Query @join__type(graph: CONNECTORS) { + users: [User] + @join__directive( + graphs: [CONNECTORS] + name: "connect" + args: { source: "json", http: { GET: "/users" }, selection: "id name" } + ) + posts: [Post] + @join__directive( + graphs: [CONNECTORS] + name: "connect" + args: { + source: "json" + http: { GET: "/posts" } + selection: "id title body" + } + ) +} + +type User @join__type(graph: CONNECTORS, key: "id", resolvable: false) { + id: ID! + name: String +} diff --git a/apollo-federation/src/sources/connect/tests/schemas/simple.yaml b/apollo-federation/src/sources/connect/tests/schemas/simple.yaml new file mode 100644 index 0000000000..577cc07b3e --- /dev/null +++ b/apollo-federation/src/sources/connect/tests/schemas/simple.yaml @@ -0,0 +1,47 @@ +# rover supergraph compose --config src/sources/connect/tests/schemas/simple.yaml > src/sources/connect/tests/schemas/simple.graphql +federation_version: =2.7.3-testing.0 +subgraphs: + connectors: + routing_url: http://unused + schema: + sdl: | + extend schema + @link(url: "https://specs.apollo.dev/federation/v2.7", import: ["@key"]) + @link(url: "https://specs.apollo.dev/connect/v0.1", import: ["@connect", "@source"]) + @source( + name: "json" + http: { + baseURL: "https://jsonplaceholder.typicode.com/" + headers: [ + { name: "AuthToken", from: "X-Auth-Token" } + { name: "user-agent", value: "Firefox" } + ] + } + ) + + type Query { + users: [User] + @connect( + source: "json" + http: { GET: "/users" } + selection: "id name" + ) + + posts: [Post] + @connect( + source: "json" + http: { GET: "/posts" } + selection: "id title body" + ) + } + + type User @key(fields: "id", resolvable: false) { + id: ID! + name: String + } + + type Post { + id: ID! + title: String + body: String + } \ No newline at end of file diff --git a/apollo-federation/src/sources/connect/url_path_template.rs b/apollo-federation/src/sources/connect/url_path_template.rs deleted file mode 100644 index 83977d3d45..0000000000 --- a/apollo-federation/src/sources/connect/url_path_template.rs +++ /dev/null @@ -1,1808 +0,0 @@ -use std::fmt::Display; - -use apollo_compiler::collections::IndexMap; -use itertools::Itertools; -use nom::branch::alt; -use nom::bytes::complete::tag; -use nom::character::complete::char; -use nom::character::complete::one_of; -use nom::combinator::opt; -use nom::combinator::recognize; -use nom::multi::many0; -use nom::sequence::pair; -use nom::sequence::preceded; -use nom::sequence::tuple; -use nom::IResult; -use serde::Serialize; -use serde_json_bytes::ByteString; -use serde_json_bytes::Map; -use serde_json_bytes::Value as JSON; - -/// A parser accepting URLPathTemplate syntax, which is useful both for -/// generating new URL paths from provided variables and for extracting variable -/// values from concrete URL paths. - -#[derive(Debug, PartialEq, Clone, Default)] -pub struct URLPathTemplate { - path: Vec, - query: IndexMap, -} - -#[derive(Debug, PartialEq, Clone)] -pub struct ParameterValue { - // The ParameterValue struct represents both path parameter values and query - // parameter values, allowing zero or more variable expressions separated by - // nonempty constant text. - parts: Vec, -} - -#[derive(Debug, PartialEq, Clone)] -pub enum ValuePart { - Text(String), - Var(VariableExpression), -} - -#[derive(Debug, PartialEq, Clone, Default)] -pub struct VariableExpression { - // Variable paths are often a single identifier, but may also consist of a - // sequence of identifiers joined with the . character. We represent dotted - // paths as a single string, rather than a Vec, and these dotted - // path strings are expected for the input keys of generate_path and the - // output keys of extract_vars, rather than a nested JSON object. - var_path: String, - - // When Some, the batch_separator option indicates the variable is a batch - // variable, so the value of the variable is expected to be a JSON array, - // and the separator string separates the batched variable values in the - // parsed/generated URL path. - batch_separator: Option, - - // Variables in the URL path are required by default, whereas variables in - // the query parameter list are optional by default, but can be made - // mandatory by adding a trailing ! to the variable path. - required: bool, -} - -impl URLPathTemplate { - // Top-level parsing entry point for URLPathTemplate syntax. - pub fn parse(input: &str) -> Result { - let mut prefix_suffix = input.splitn(2, '?'); - let path_prefix = prefix_suffix.next(); - let query_suffix = prefix_suffix.next(); - let mut path = vec![]; - - if let Some(path_prefix) = path_prefix { - for path_part in path_prefix.split('/') { - if !path_part.is_empty() { - path.push(ParameterValue::parse(path_part, true)?); - } - } - } - - let mut query = IndexMap::default(); - - if let Some(query_suffix) = query_suffix { - for query_part in query_suffix.split('&') { - if let Some((key, value)) = query_part.split_once('=') { - query.insert(key.to_string(), ParameterValue::parse(value, false)?); - } - } - } - - Ok(URLPathTemplate { path, query }) - } - - // Given a URLPathTemplate and an IndexMap of variables to be interpolated - // into its {...} expressions, generate a new URL path String. - // Guaranteed to return a "/"-prefixed string to make appending to the - // base url easier. - pub fn generate_path(&self, vars: &JSON) -> Result { - let mut path = String::new(); - if let Some(var_map) = vars.as_object() { - for (path_position, param_value) in self.path.iter().enumerate() { - path.push('/'); - - if let Some(value) = param_value.interpolate(var_map)? { - path.push_str(value.as_str()); - } else { - return Err(format!( - "Incomplete path parameter {} at position {} with variables {}", - param_value, - path_position, - JSON::Object(var_map.clone()), - )); - } - } - - let mut params = vec![]; - for (key, param_value) in &self.query { - if let Some(value) = param_value.interpolate(var_map)? { - params.push(format!("{}={}", key, value)); - } - } - if !params.is_empty() { - path.push('?'); - path.push_str(¶ms.join("&")); - } - } else { - return Err(format!("Expected object, got {}", vars)); - } - - if path.is_empty() { - Ok("/".to_string()) - } else if path.starts_with('/') { - Ok(path) - } else { - Ok(format!("/{}", path)) - } - } - - // Given a URLPathTemplate and a concrete URL path, extract any named/nested - // variables from the path and return them as a JSON object. - #[allow(dead_code)] - fn extract_vars(&self, path: &str) -> Result { - let concrete_template = URLPathTemplate::parse(path)?; - - if concrete_template.path.len() != self.path.len() { - return Err(format!( - "Path length {} does not match concrete path length {}", - self.path.len(), - concrete_template.path.len() - )); - } - - let mut var_map = Map::new(); - - for (i, path_value) in self.path.iter().enumerate() { - for (var_path, value) in path_value.extract_vars(&concrete_template.path[i])? { - var_map.insert(var_path, value); - } - } - - // For each query parameter, extract the corresponding variable(s) from - // the concrete template text. - for (key, query_value) in self.query.iter() { - if let Some(concrete_value) = concrete_template.query.get(key) { - for (var_path, value) in query_value.extract_vars(concrete_value)? { - var_map.insert(var_path, value); - } - } else { - // If there is no corresponding query parameter in the concrete - // URL path, we can't extract variables, which is only a problem - // if any of the expected variables are required. - for part in &query_value.parts { - if let ValuePart::Var(var) = part { - if var.required { - return Err(format!( - "Missing required query parameter {}={}", - key, query_value - )); - } - } - } - } - } - - Ok(JSON::Object(var_map)) - } - - pub fn required_parameters(&self) -> Vec { - let mut parameters = IndexSet::default(); - for param_value in &self.path { - parameters.extend(param_value.required_parameters()); - } - for param_value in self.query.values() { - parameters.extend(param_value.required_parameters()); - } - // sorted for a stable SDL - parameters.into_iter().sorted().collect() - } -} - -impl Display for URLPathTemplate { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - for param_value in &self.path { - f.write_str("/")?; - param_value.fmt(f)?; - } - - if !self.query.is_empty() { - f.write_str("?")?; - let mut first = true; - for (key, param_value) in &self.query { - if first { - first = false; - } else { - f.write_str("&")?; - } - f.write_str(key)?; - f.write_str("=")?; - param_value.fmt(f)?; - } - } - - Ok(()) - } -} - -impl Serialize for URLPathTemplate { - fn serialize(&self, serializer: S) -> Result { - serializer.collect_str(self) - } -} - -impl ParameterValue { - fn parse(input: &str, required_by_default: bool) -> Result { - // Split the text around any {...} variable expressions, which must be - // separated by nonempty text. - let mut parts = vec![]; - let mut remaining = input; - - while let Some((prefix, suffix)) = remaining.split_once('{') { - if !prefix.is_empty() { - parts.push(ValuePart::Text(prefix.to_string())); - } - remaining = suffix; - - if let Some((var, suffix)) = remaining.split_once('}') { - parts.push(ValuePart::Var(VariableExpression::parse( - var, - required_by_default, - )?)); - remaining = suffix; - } else { - return Err(format!( - "Missing closing brace in URL suffix {} of path {}", - remaining, input - )); - } - } - - if !remaining.is_empty() { - parts.push(ValuePart::Text(remaining.to_string())); - } - - // Enforce that variable expressions must be separated by nonempty text - // delimiters, though the parameter value may start or end with variable - // expressions without preceding/following text. - let mut prev_part_was_var = false; - for part in &parts { - if let ValuePart::Var(_) = part { - if prev_part_was_var { - return Err(format!( - "Ambiguous adjacent variable expressions in {}", - input, - )); - } - prev_part_was_var = true; - } else { - prev_part_was_var = false; - } - } - - Ok(ParameterValue { parts }) - } - - fn interpolate(&self, vars: &Map) -> Result, String> { - let mut value = String::new(); - let mut missing_vars = vec![]; - let mut some_vars_required = false; - - for part in &self.parts { - match part { - ValuePart::Text(text) => { - value.push_str(text); - } - ValuePart::Var(var) => { - if let Some(var_value) = var.interpolate(vars)? { - value.push_str(&var_value); - } else { - missing_vars.push(var); - } - if var.required { - some_vars_required = true; - } - } - } - } - - // If any variable fails to interpolate, the whole ParameterValue fails - // to interpolate. This can be harmless if none of the variables are - // required, but if any of the variables are required (not just the - // variables that failed to interpolate), then the whole ParameterValue - // is required, so any missing variable becomes an error. - if let Some(missing) = missing_vars.into_iter().next() { - if some_vars_required { - return Err(format!( - "Missing variable {} for required parameter {} given variables {}", - missing.var_path, - self, - JSON::Object(vars.clone()), - )); - } else { - return Ok(None); - } - } - - Ok(Some(value)) - } - - fn extract_vars( - &self, - concrete_value: &ParameterValue, - ) -> Result, String> { - let mut concrete_text = String::new(); - for part in &concrete_value.parts { - concrete_text.push_str(match part { - ValuePart::Text(text) => text, - ValuePart::Var(var) => { - return Err(format!("Unexpected variable expression {{{}}}", var)); - } - }); - } - - let mut concrete_suffix = concrete_text.as_str(); - let mut pending_var: Option<&VariableExpression> = None; - let mut output = Map::new(); - - fn add_var_value( - var: &VariableExpression, - value: &str, - output: &mut Map, - ) { - let key = ByteString::from(var.var_path.as_str()); - if let Some(separator) = &var.batch_separator { - let mut values = vec![]; - for value in value.split(separator) { - if !value.is_empty() { - values.push(JSON::String(ByteString::from(value))); - } - } - output.insert(key, JSON::Array(values)); - } else if !value.is_empty() { - output.insert(key, JSON::String(ByteString::from(value))); - } - } - - for part in &self.parts { - match part { - ValuePart::Text(text) => { - if let Some(var) = pending_var { - if let Some(start) = concrete_suffix.find(text) { - add_var_value(var, &concrete_suffix[..start], &mut output); - concrete_suffix = &concrete_suffix[start..]; - } else { - add_var_value(var, concrete_suffix, &mut output); - concrete_suffix = ""; - } - pending_var = None; - } - - if concrete_suffix.starts_with(text) { - concrete_suffix = &concrete_suffix[text.len()..]; - } else { - return Err(format!( - "Constant text {} not found in {}", - text, concrete_text - )); - } - } - ValuePart::Var(var) => { - if let Some(pending) = pending_var { - return Err(format!( - "Ambiguous adjacent variable expressions {} and {} in parameter value {}", - pending, var, concrete_text - )); - } else { - // This variable's value will be extracted from the - // concrete URL by the ValuePart::Text branch above, on - // the next iteration of the for loop. - pending_var = Some(var); - } - } - } - } - - if let Some(var) = pending_var { - add_var_value(var, concrete_suffix, &mut output); - } - - Ok(output) - } - - fn required_parameters(&self) -> Vec { - let mut parameters = vec![]; - for part in &self.parts { - match part { - ValuePart::Text(_) => {} - ValuePart::Var(var) => { - if var.required { - parameters.push(var.var_path.clone()); - } - } - } - } - parameters - } -} - -impl Display for ParameterValue { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - for part in &self.parts { - part.fmt(f)?; - } - Ok(()) - } -} - -impl Serialize for ValuePart { - fn serialize(&self, serializer: S) -> Result { - serializer.collect_str(self) - } -} - -impl Display for ValuePart { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - ValuePart::Text(text) => { - f.write_str(text)?; - } - ValuePart::Var(var) => { - f.write_str("{")?; - var.fmt(f)?; - f.write_str("}")?; - } - } - Ok(()) - } -} - -impl Serialize for ParameterValue { - fn serialize(&self, serializer: S) -> Result { - serializer.collect_str(self) - } -} - -impl VariableExpression { - // TODO Figure out if this required parameter is really the best way to - // handle ! variables. - fn parse(input: &str, required: bool) -> Result { - tuple(( - nom_parse_identifier_path, - opt(char('!')), - opt(pair(one_of(",;|+ "), tag("..."))), - ))(input) - .map_err(|err| format!("Error parsing variable expression {}: {}", input, err)) - .and_then( - |(remaining, (var_path, exclamation_point, batch_separator))| { - if remaining.is_empty() { - Ok(VariableExpression { - var_path, - required: exclamation_point.is_some() || required, - batch_separator: batch_separator - .map(|(separator, _)| separator.to_string()), - }) - } else { - Err(format!( - "Unexpected trailing characters {} in variable expression {}", - remaining, input - )) - } - }, - ) - } - - fn interpolate(&self, vars: &Map) -> Result, String> { - let var_path_bytes = ByteString::from(self.var_path.as_str()); - if let Some(child_value) = vars.get(&var_path_bytes) { - if let Some(separator) = &self.batch_separator { - if let JSON::Array(array) = child_value { - let mut value_strings = vec![]; - for value in array { - value_strings.push(self.value_as_string(value)); - } - if value_strings.is_empty() { - return Ok(None); - } else { - return Ok(Some(value_strings.join(separator))); - } - } - // Fall through to handle non-array values as single batch inputs. - } - Ok(Some(self.value_as_string(child_value))) - } else if self.required { - return Err(format!( - "Missing required variable {} in {}", - self.var_path, - JSON::Object(vars.clone()), - )); - } else { - return Ok(None); - } - } - - fn value_as_string(&self, value: &JSON) -> String { - // Need to remove quotes from string values, since the quotes don't - // belong in the URL. - if let JSON::String(string) = value { - string.as_str().to_string() - } else { - value.to_string() - } - } -} - -impl Display for VariableExpression { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - f.write_str(&self.var_path)?; - if self.required { - f.write_str("!")?; - } - if let Some(separator) = &self.batch_separator { - f.write_str(separator)?; - f.write_str("...")?; - } - Ok(()) - } -} - -fn nom_parse_identifier_possible_namespace(input: &str) -> IResult<&str, &str> { - recognize(alt((tag("$this"), nom_parse_identifier)))(input) -} - -fn nom_parse_identifier(input: &str) -> IResult<&str, &str> { - recognize(pair( - one_of("abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ_"), - many0(one_of( - "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ_0123456789", - )), - ))(input) -} - -fn nom_parse_identifier_path(input: &str) -> IResult<&str, String> { - let (input, first) = nom_parse_identifier_possible_namespace(input)?; - let (input, mut rest) = many0(preceded(char('.'), nom_parse_identifier))(input)?; - let mut identifier_path = vec![first]; - identifier_path.append(&mut rest); - Ok((input, identifier_path.join("."))) -} - -#[cfg(test)] -mod tests { - use serde_json_bytes::json; - - use super::*; - - #[test] - fn test_parse_identifier() { - assert_eq!(nom_parse_identifier("abc"), Ok(("", "abc"))); - assert_eq!(nom_parse_identifier("abc123"), Ok(("", "abc123"))); - assert_eq!(nom_parse_identifier("abc_123"), Ok(("", "abc_123"))); - assert_eq!(nom_parse_identifier("abc-123"), Ok(("-123", "abc"))); - } - - #[test] - fn test_parse_identifier_path() { - assert_eq!( - nom_parse_identifier_path("abc"), - Ok(("", "abc".to_string())), - ); - assert_eq!( - nom_parse_identifier_path("abc.def"), - Ok(("", "abc.def".to_string())), - ); - assert_eq!( - nom_parse_identifier_path("abc.def.ghi"), - Ok(("", "abc.def.ghi".to_string())), - ); - assert_eq!( - nom_parse_identifier_path("$this.def.ghi"), - Ok(("", "$this.def.ghi".to_string())), - ); - - assert!(nom_parse_identifier_path("$anything.def.ghi").is_err()); - assert_eq!( - nom_parse_identifier_path("abc.$this.ghi"), - Ok((".$this.ghi", "abc".to_string())), - ); - } - - #[test] - fn test_path_list() { - assert_eq!( - URLPathTemplate::parse("/abc"), - Ok(URLPathTemplate { - path: vec![ParameterValue { - parts: vec![ValuePart::Text("abc".to_string())], - },], - ..Default::default() - }), - ); - - assert_eq!( - URLPathTemplate::parse("/abc/def"), - Ok(URLPathTemplate { - path: vec![ - ParameterValue { - parts: vec![ValuePart::Text("abc".to_string())], - }, - ParameterValue { - parts: vec![ValuePart::Text("def".to_string())], - }, - ], - ..Default::default() - }), - ); - - assert_eq!( - URLPathTemplate::parse("/abc/{def}"), - Ok(URLPathTemplate { - path: vec![ - ParameterValue { - parts: vec![ValuePart::Text("abc".to_string())], - }, - ParameterValue { - parts: vec![ValuePart::Var(VariableExpression { - var_path: "def".to_string(), - required: true, - ..Default::default() - })], - }, - ], - ..Default::default() - }), - ); - - assert_eq!( - URLPathTemplate::parse("/abc/{def}/ghi"), - Ok(URLPathTemplate { - path: vec![ - ParameterValue { - parts: vec![ValuePart::Text("abc".to_string())], - }, - ParameterValue { - parts: vec![ValuePart::Var(VariableExpression { - var_path: "def".to_string(), - required: true, - ..Default::default() - })], - }, - ParameterValue { - parts: vec![ValuePart::Text("ghi".to_string())], - }, - ], - ..Default::default() - }), - ); - } - - #[test] - fn test_url_path_template_parse() { - assert_eq!( - URLPathTemplate::parse("/users/{user_id}?a=b"), - Ok(URLPathTemplate { - path: vec![ - ParameterValue { - parts: vec![ValuePart::Text("users".to_string())], - }, - ParameterValue { - parts: vec![ValuePart::Var(VariableExpression { - var_path: "user_id".to_string(), - required: true, - ..Default::default() - })], - }, - ], - query: IndexMap::from([( - "a".to_string(), - ParameterValue { - parts: vec![ValuePart::Text("b".to_string())], - } - )]), - }), - ); - - assert_eq!( - URLPathTemplate::parse("/users/{user_id}?a={b}&c={d!}&e={f.g}"), - Ok(URLPathTemplate { - path: vec![ - ParameterValue { - parts: vec![ValuePart::Text("users".to_string())], - }, - ParameterValue { - parts: vec![ValuePart::Var(VariableExpression { - var_path: "user_id".to_string(), - required: true, - ..Default::default() - })], - }, - ], - query: IndexMap::from([ - ( - "e".to_string(), - ParameterValue { - parts: vec![ValuePart::Var(VariableExpression { - var_path: "f.g".to_string(), - ..Default::default() - })], - }, - ), - ( - "a".to_string(), - ParameterValue { - parts: vec![ValuePart::Var(VariableExpression { - var_path: "b".to_string(), - ..Default::default() - })], - }, - ), - ( - "c".to_string(), - ParameterValue { - parts: vec![ValuePart::Var(VariableExpression { - var_path: "d".to_string(), - required: true, - ..Default::default() - })], - }, - ), - ]), - }), - ); - - assert_eq!( - URLPathTemplate::parse("/users/{id}?a={b}#junk"), - Ok(URLPathTemplate { - path: vec![ - ParameterValue { - parts: vec![ValuePart::Text("users".to_string())], - }, - ParameterValue { - parts: vec![ValuePart::Var(VariableExpression { - var_path: "id".to_string(), - required: true, - ..Default::default() - })], - }, - ], - query: IndexMap::from([( - "a".to_string(), - ParameterValue { - parts: vec![ - ValuePart::Var(VariableExpression { - var_path: "b".to_string(), - ..Default::default() - }), - ValuePart::Text("#junk".to_string()), - ], - }, - )]), - }), - ); - - assert_eq!( - URLPathTemplate::parse("/location/{lat},{lon}"), - Ok(URLPathTemplate { - path: vec![ - ParameterValue { - parts: vec![ValuePart::Text("location".to_string())], - }, - ParameterValue { - parts: vec![ - ValuePart::Var(VariableExpression { - var_path: "lat".to_string(), - required: true, - ..Default::default() - }), - ValuePart::Text(",".to_string()), - ValuePart::Var(VariableExpression { - var_path: "lon".to_string(), - required: true, - ..Default::default() - }), - ], - }, - ], - ..Default::default() - }), - ); - - assert_eq!( - URLPathTemplate::parse("/point3/{x},{y},{z}?a={b}"), - Ok(URLPathTemplate { - path: vec![ - ParameterValue { - parts: vec![ValuePart::Text("point3".to_string())], - }, - ParameterValue { - parts: vec![ - ValuePart::Var(VariableExpression { - var_path: "x".to_string(), - required: true, - ..Default::default() - }), - ValuePart::Text(",".to_string()), - ValuePart::Var(VariableExpression { - var_path: "y".to_string(), - required: true, - ..Default::default() - }), - ValuePart::Text(",".to_string()), - ValuePart::Var(VariableExpression { - var_path: "z".to_string(), - required: true, - ..Default::default() - }), - ], - }, - ], - query: IndexMap::from([( - "a".to_string(), - ParameterValue { - parts: vec![ValuePart::Var(VariableExpression { - var_path: "b".to_string(), - ..Default::default() - })], - }, - )]), - }), - ); - } - - #[test] - fn test_generate_path() { - let template = URLPathTemplate::parse("/users/{user_id}?a={b}&c={d!}&e={f.g}").unwrap(); - - assert_eq!( - template.generate_path(&json!("not an object")), - Err(r#"Expected object, got "not an object""#.to_string()), - ); - - assert_eq!( - template.generate_path(&json!({ - // A variables object without any properties - })), - Err("Missing required variable user_id in {}".to_string()), - ); - - assert_eq!( - template.generate_path(&json!({ - "user_id": 123, - "b": "456", - "d": 789, - "f.g": "abc", - })), - Ok("/users/123?a=456&c=789&e=abc".to_string()), - ); - - assert_eq!( - template.generate_path(&json!({ - "user_id": 123, - "d": 789, - "f": "not an object", - })), - Ok("/users/123?c=789".to_string()), - ); - - assert_eq!( - template.generate_path(&json!({ - "b": "456", - "f.g": "abc", - "user_id": "123", - })), - Err( - r#"Missing required variable d in {"b":"456","f.g":"abc","user_id":"123"}"# - .to_string() - ), - ); - - assert_eq!( - template.generate_path(&json!({ - // The order of the variables should not matter. - "d": "789", - "b": "456", - "user_id": "123", - })), - Ok("/users/123?a=456&c=789".to_string()), - ); - - assert_eq!( - template.generate_path(&json!({ - "user_id": "123", - "b": "a", - "d": "c", - "f.g": "e", - // Extra variables should be ignored. - "extra": "ignored", - })), - Ok("/users/123?a=a&c=c&e=e".to_string()), - ); - - let template_with_nested_required_var = - URLPathTemplate::parse("/repositories/{user.login}/{repo.name}?testing={a.b.c!}") - .unwrap(); - - assert_eq!( - template_with_nested_required_var.generate_path(&json!({ - "repo.name": "repo", - "user.login": "user", - })), - Err( - r#"Missing required variable a.b.c in {"repo.name":"repo","user.login":"user"}"# - .to_string() - ), - ); - - assert_eq!( - template_with_nested_required_var.generate_path(&json!({ - "user.login": "user", - "repo.name": "repo", - "a.b.c": "value", - })), - Ok("/repositories/user/repo?testing=value".to_string()), - ); - } - - #[test] - fn test_generate_path_empty() { - assert_eq!( - URLPathTemplate::parse("") - .unwrap() - .generate_path(&json!({})) - .unwrap(), - "/".to_string() - ); - - assert_eq!( - URLPathTemplate::parse("/") - .unwrap() - .generate_path(&json!({})) - .unwrap(), - "/".to_string() - ); - - assert_eq!( - URLPathTemplate::parse("?foo=bar") - .unwrap() - .generate_path(&json!({})) - .unwrap(), - "/?foo=bar".to_string() - ); - } - - #[test] - fn test_batch_expressions() { - assert_eq!( - URLPathTemplate::parse("/users?ids={id,...}"), - Ok(URLPathTemplate { - path: vec![ParameterValue { - parts: vec![ValuePart::Text("users".to_string())], - }], - query: IndexMap::from([( - "ids".to_string(), - ParameterValue { - parts: vec![ValuePart::Var(VariableExpression { - var_path: "id".to_string(), - batch_separator: Some(",".to_string()), - ..Default::default() - })], - }, - )]), - }), - ); - - assert_eq!( - URLPathTemplate::parse("/v1/products?ids={id ...}&names={name|...}"), - Ok(URLPathTemplate { - path: vec![ - ParameterValue { - parts: vec![ValuePart::Text("v1".to_string())] - }, - ParameterValue { - parts: vec![ValuePart::Text("products".to_string())] - }, - ], - query: IndexMap::from([ - ( - "ids".to_string(), - ParameterValue { - parts: vec![ValuePart::Var(VariableExpression { - var_path: "id".to_string(), - batch_separator: Some(" ".to_string()), - ..Default::default() - })], - }, - ), - ( - "names".to_string(), - ParameterValue { - parts: vec![ValuePart::Var(VariableExpression { - var_path: "name".to_string(), - batch_separator: Some("|".to_string()), - ..Default::default() - })], - }, - ), - ]), - }), - ); - - assert_eq!( - URLPathTemplate::parse("/people?ids={person.id,...}"), - Ok(URLPathTemplate { - path: vec![ParameterValue { - parts: vec![ValuePart::Text("people".to_string())], - }], - query: IndexMap::from([( - "ids".to_string(), - ParameterValue { - parts: vec![ValuePart::Var(VariableExpression { - var_path: "person.id".to_string(), - batch_separator: Some(",".to_string()), - ..Default::default() - })], - }, - )]), - }), - ); - - assert_eq!( - URLPathTemplate::parse("/people/{uid}/notes?ids={note_id;...}"), - Ok(URLPathTemplate { - path: vec![ - ParameterValue { - parts: vec![ValuePart::Text("people".to_string())], - }, - ParameterValue { - parts: vec![ValuePart::Var(VariableExpression { - var_path: "uid".to_string(), - required: true, - ..Default::default() - })], - }, - ParameterValue { - parts: vec![ValuePart::Text("notes".to_string())], - }, - ], - query: IndexMap::from([( - "ids".to_string(), - ParameterValue { - parts: vec![ValuePart::Var(VariableExpression { - var_path: "note_id".to_string(), - batch_separator: Some(";".to_string()), - ..Default::default() - })], - }, - )]), - }), - ); - - assert_eq!( - URLPathTemplate::parse("/people/by_uid:{uid}/notes?ids=[{note_id;...}]"), - Ok(URLPathTemplate { - path: vec![ - ParameterValue { - parts: vec![ValuePart::Text("people".to_string())], - }, - ParameterValue { - parts: vec![ - ValuePart::Text("by_uid:".to_string()), - ValuePart::Var(VariableExpression { - var_path: "uid".to_string(), - required: true, - ..Default::default() - }), - ], - }, - ParameterValue { - parts: vec![ValuePart::Text("notes".to_string())], - }, - ], - - query: IndexMap::from([( - "ids".to_string(), - ParameterValue { - parts: vec![ - ValuePart::Text("[".to_string()), - ValuePart::Var(VariableExpression { - var_path: "note_id".to_string(), - batch_separator: Some(";".to_string()), - ..Default::default() - }), - ValuePart::Text("]".to_string()), - ], - }, - )]), - }), - ); - } - - #[test] - fn test_batch_generation() { - let template = URLPathTemplate::parse("/users?ids={id,...}").unwrap(); - - assert_eq!( - template.generate_path(&json!({ - "id": [1, 2, 3], - })), - Ok("/users?ids=1,2,3".to_string()), - ); - - assert_eq!( - template.generate_path(&json!({ - "id": [1], - })), - Ok("/users?ids=1".to_string()), - ); - - assert_eq!( - template.generate_path(&json!({ - "id": [], - })), - Ok("/users".to_string()), - ); - - assert_eq!( - template.generate_path(&json!({ - "id": [1, 2, 3], - "extra": "ignored", - })), - Ok("/users?ids=1,2,3".to_string()), - ); - - let template = URLPathTemplate::parse("/users?ids={id;...}&names={name|...}").unwrap(); - - assert_eq!( - template.generate_path(&json!({ - "id": [1, 2, 3], - "name": ["a", "b", "c"], - })), - Ok("/users?ids=1;2;3&names=a|b|c".to_string()), - ); - - assert_eq!( - template.generate_path(&json!({ - "id": 123, - "name": "456", - })), - Ok("/users?ids=123&names=456".to_string()), - ); - } - - #[test] - fn test_extract_vars_from_url_path() { - let repo_template = URLPathTemplate::parse("/repository/{user.login}/{repo.name}").unwrap(); - - assert_eq!( - repo_template.extract_vars("/repository/user/repo"), - Ok(json!({ - "user.login": "user", - "repo.name": "repo", - })), - ); - - let template_with_query_params = URLPathTemplate::parse( - "/contacts/{cid}/notes/{nid}?testing={a.b.c!}&testing2={a.b.d}&type={type}", - ) - .unwrap(); - - assert_eq!( - template_with_query_params - .extract_vars("/contacts/123/notes/456?testing=abc&testing2=def&type=ghi"), - Ok(json!({ - "cid": "123", - "nid": "456", - "a.b.c": "abc", - "a.b.d": "def", - "type": "ghi", - })), - ); - - assert_eq!( - template_with_query_params - .extract_vars("/contacts/123/notes/456?testing2=def&type=ghi"), - Err("Missing required query parameter testing={a.b.c!}".to_string()), - ); - - assert_eq!( - template_with_query_params.extract_vars("/contacts/123/notes/456?testing=789"), - Ok(json!({ - "cid": "123", - "nid": "456", - "a.b.c": "789", - })), - ); - - assert_eq!( - template_with_query_params.extract_vars("/contacts/123/notes/{nid}?testing=abc"), - Err("Unexpected variable expression {nid!}".to_string()), - ); - - assert_eq!( - template_with_query_params.extract_vars("/contacts/123/notes/456?testing={wrong}"), - Err("Unexpected variable expression {wrong}".to_string()), - ); - - assert_eq!( - template_with_query_params.extract_vars("/wrong/123/notes/456?testing=abc"), - Err("Constant text contacts not found in wrong".to_string()), - ); - - assert_eq!( - template_with_query_params.extract_vars("/contacts/123/wrong/456?testing=abc"), - Err("Constant text notes not found in wrong".to_string()), - ); - - let template_with_constant_query_param = - URLPathTemplate::parse("/contacts/{cid}?constant=asdf&required={a!}&optional={b}") - .unwrap(); - - assert_eq!( - template_with_constant_query_param - .extract_vars("/contacts/123?required=456&optional=789"), - // Since constant-valued query parameters do not affect the - // extracted variables, we don't need to fail when they are missing - // from a given URL. - Ok(json!({ - "cid": "123", - "a": "456", - "b": "789", - })), - ); - - assert_eq!( - template_with_constant_query_param.generate_path(&json!({ - "cid": "123", - "a": "456", - })), - Ok("/contacts/123?constant=asdf&required=456".to_string()), - ); - - assert_eq!( - template_with_constant_query_param - .extract_vars("/contacts/123?required=456&constant=asdf"), - Ok(json!({ - "cid": "123", - "a": "456", - })), - ); - - assert_eq!( - template_with_constant_query_param - .extract_vars("/contacts/123?optional=789&required=456&constant=asdf"), - Ok(json!({ - "cid": "123", - "a": "456", - "b": "789", - })), - ); - - let template_with_constant_path_part = - URLPathTemplate::parse("/users/123/notes/{nid}").unwrap(); - - assert_eq!( - template_with_constant_path_part.extract_vars("/users/123/notes/456"), - Ok(json!({ - "nid": "456", - })), - ); - - assert_eq!( - template_with_constant_path_part.extract_vars("/users/123/notes/456?ignored=true"), - Ok(json!({ - "nid": "456", - })), - ); - - assert_eq!( - template_with_constant_path_part.extract_vars("/users/abc/notes/456"), - Err("Constant text 123 not found in abc".to_string()), - ); - } - - #[test] - fn test_multi_variable_parameter_values() { - let template = URLPathTemplate::parse( - "/locations/xyz({x},{y},{z})?required={b},{c};{d!}&optional=[{e},{f}]", - ) - .unwrap(); - - assert_eq!( - template.generate_path(&json!({ - "x": 1, - "y": 2, - "z": 3, - "b": 4, - "c": 5, - "d": 6, - "e": 7, - "f": 8, - })), - Ok("/locations/xyz(1,2,3)?required=4,5;6&optional=[7,8]".to_string()), - ); - - assert_eq!( - template.generate_path(&json!({ - "x": 1, - "y": 2, - "z": 3, - "b": 4, - "c": 5, - "d": 6, - "e": 7, - // "f": 8, - })), - Ok("/locations/xyz(1,2,3)?required=4,5;6".to_string()), - ); - - assert_eq!( - template.generate_path(&json!({ - "x": 1, - "y": 2, - "z": 3, - "b": 4, - "c": 5, - "d": 6, - // "e": 7, - "f": 8, - })), - Ok("/locations/xyz(1,2,3)?required=4,5;6".to_string()), - ); - - assert_eq!( - template.generate_path(&json!({ - "x": 1, - "y": 2, - "z": 3, - "b": 4, - "c": 5, - "d": 6, - })), - Ok("/locations/xyz(1,2,3)?required=4,5;6".to_string()), - ); - - assert_eq!( - template.generate_path(&json!({ - // "x": 1, - "y": 2, - "z": 3, - })), - Err(r#"Missing required variable x in {"y":2,"z":3}"#.to_string()), - ); - - assert_eq!( - template.generate_path(&json!({ - "x": 1, - "y": 2, - // "z": 3, - })), - Err(r#"Missing required variable z in {"x":1,"y":2}"#.to_string()), - ); - - assert_eq!( - template.generate_path(&json!({ - "b": 4, - "c": 5, - "x": 1, - "y": 2, - "z": 3, - // "d": 6, - })), - Err(r#"Missing required variable d in {"b":4,"c":5,"x":1,"y":2,"z":3}"#.to_string()), - ); - - assert_eq!( - template.generate_path(&json!({ - "b": 4, - // "c": 5, - "d": 6, - "x": 1, - "y": 2, - "z": 3, - })), - Err(r#"Missing variable c for required parameter {b},{c};{d!} given variables {"b":4,"d":6,"x":1,"y":2,"z":3}"#.to_string()), - ); - - assert_eq!( - template.generate_path(&json!({ - // "b": 4, - // "c": 5, - "d": 6, - "x": 1, - "y": 2, - "z": 3, - })), - Err(r#"Missing variable b for required parameter {b},{c};{d!} given variables {"d":6,"x":1,"y":2,"z":3}"#.to_string()), - ); - - assert_eq!( - URLPathTemplate::parse( - "/locations/xyz({x}{y}{z})?required={b},{c};{d!}&optional=[{e}{f},{g}]" - ), - Err("Ambiguous adjacent variable expressions in xyz({x}{y}{z})".to_string()), - ); - - assert_eq!( - URLPathTemplate::parse( - "/locations/xyz({x},{y},{z})?required={b}{c};{d!}&optional=[{e}{f},{g}]" - ), - Err("Ambiguous adjacent variable expressions in {b}{c};{d!}".to_string()), - ); - - assert_eq!( - URLPathTemplate::parse( - "/locations/xyz({x},{y},{z})?required={b},{c};{d!}&optional=[{e};{f}{g}]" - ), - Err("Ambiguous adjacent variable expressions in [{e};{f}{g}]".to_string()), - ); - - assert_eq!( - template.extract_vars("/locations/xyz(1,2,3)?required=4,5;6&optional=[7,8]"), - Ok(json!({ - "x": "1", - "y": "2", - "z": "3", - "b": "4", - "c": "5", - "d": "6", - "e": "7", - "f": "8", - })), - ); - - assert_eq!( - template.extract_vars("/locations/xyz(3,2,1)?required=-5,10.1;2"), - Ok(json!({ - "x": "3", - "y": "2", - "z": "1", - "b": "-5", - "c": "10.1", - "d": "2", - })), - ); - - assert_eq!( - template.extract_vars("/locations/xyz(3,2,1)?optional=[-5,10.1;2]&required=6,7;8"), - Ok(json!({ - "x": "3", - "y": "2", - "z": "1", - "b": "6", - "c": "7", - "d": "8", - "e": "-5", - "f": "10.1;2", - })), - ); - - assert_eq!( - template.extract_vars("/locations/xyz(3,2,1?required=4,5;6)"), - Err("Constant text ) not found in xyz(3,2,1".to_string()), - ); - - assert_eq!( - template.extract_vars("/locations/xyz(3,2,1)?required=4,5,6"), - Err("Constant text ; not found in 4,5,6".to_string()), - ); - - assert_eq!( - template.extract_vars("/locations/xyz(3,2,1)?optional=[p,q]&required=4,5;6"), - Ok(json!({ - "x": "3", - "y": "2", - "z": "1", - "b": "4", - "c": "5", - "d": "6", - "e": "p", - "f": "q", - })), - ); - - assert_eq!( - template.extract_vars("/locations/xyz(3,2,1)?optional=(r,s)&required=4,5;6"), - Err("Constant text [ not found in (r,s)".to_string()), - ); - - assert_eq!( - template.extract_vars("/locations/xyz(3,2,1)?optional=[r,s)&required=4,5;6"), - Err("Constant text ] not found in [r,s)".to_string()), - ); - - assert_eq!( - template.extract_vars("/locations/xyz(1.25,2,3.5)?required=(4,5.1;6.6,7)"), - Ok(json!({ - "x": "1.25", - "y": "2", - "z": "3.5", - "b": "(4", - "c": "5.1", - "d": "6.6,7)", - })), - ); - - let line_template = - URLPathTemplate::parse("/line/{p1.x},{p1.y},{p1.z}/{p2.x},{p2.y},{p2.z}").unwrap(); - - assert_eq!( - line_template.generate_path(&json!({ - "p1.x": 1, - "p1.y": 2, - "p1.z": 3, - "p2.x": 4, - "p2.y": 5, - "p2.z": 6, - })), - Ok("/line/1,2,3/4,5,6".to_string()), - ); - - assert_eq!( - line_template.generate_path(&json!({ - "p1.x": 1, - "p1.y": 2, - "p1.z": 3, - "p2.x": 4, - "p2.y": 5, - // "p2.z": 6, - })), - Err(r#"Missing required variable p2.z in {"p1.x":1,"p1.y":2,"p1.z":3,"p2.x":4,"p2.y":5}"#.to_string()), - ); - - assert_eq!( - line_template.generate_path(&json!({ - "p1.x": 1, - // "p1.y": 2, - "p1.z": 3, - "p2.x": 4, - "p2.y": 5, - "p2.z": 6, - })), - Err(r#"Missing required variable p1.y in {"p1.x":1,"p1.z":3,"p2.x":4,"p2.y":5,"p2.z":6}"#.to_string()), - ); - - assert_eq!( - line_template.extract_vars("/line/6.6,5.5,4.4/3.3,2.2,1.1"), - Ok(json!({ - "p1.x": "6.6", - "p1.y": "5.5", - "p1.z": "4.4", - "p2.x": "3.3", - "p2.y": "2.2", - "p2.z": "1.1", - })), - ); - - assert_eq!( - line_template.extract_vars("/line/(6,5,4)/[3,2,1]"), - Ok(json!({ - "p1.x": "(6", - "p1.y": "5", - "p1.z": "4)", - "p2.x": "[3", - "p2.y": "2", - "p2.z": "1]", - })), - ); - - assert_eq!( - line_template.extract_vars("/line/6.6,5.5,4.4/3.3,2.2"), - Err("Constant text , not found in 3.3,2.2".to_string()), - ); - } - - #[test] - fn test_extract_batch_vars() { - let template_comma = URLPathTemplate::parse("/users?ids=[{id,...}]").unwrap(); - - assert_eq!( - template_comma.extract_vars("/users?ids=[1,2,3]"), - Ok(json!({ - "id": ["1", "2", "3"], - })), - ); - - assert_eq!( - template_comma.extract_vars("/users?ids=[]"), - Ok(json!({ - "id": [], - })), - ); - - assert_eq!( - template_comma.extract_vars("/users?ids=[123]&extra=ignored"), - Ok(json!({ - "id": ["123"], - })), - ); - - let template_semicolon = URLPathTemplate::parse("/columns/{a,...};{b,...}").unwrap(); - - assert_eq!( - template_semicolon.extract_vars("/columns/1;2"), - Ok(json!({ - "a": ["1"], - "b": ["2"], - })), - ); - - assert_eq!( - template_semicolon.extract_vars("/columns/1,2;3"), - Ok(json!({ - "a": ["1", "2"], - "b": ["3"], - })), - ); - - assert_eq!( - template_semicolon.extract_vars("/columns/1;2,3"), - Ok(json!({ - "a": ["1"], - "b": ["2", "3"], - })), - ); - - assert_eq!( - template_semicolon.extract_vars("/columns/1;2;3"), - Ok(json!({ - "a": ["1"], - "b": ["2;3"], - })), - ); - - assert_eq!( - template_semicolon.extract_vars("/columns/;3,2,1?extra=ignored"), - Ok(json!({ - "a": [], - "b": ["3", "2", "1"], - })), - ); - - assert_eq!( - template_semicolon.extract_vars("/columns/1,2,3;"), - Ok(json!({ - "a": ["1", "2", "3"], - "b": [], - })), - ); - - assert_eq!( - template_semicolon.extract_vars("/columns/1,2,3;9,8,7,6"), - Ok(json!({ - "a": ["1", "2", "3"], - "b": ["9", "8", "7", "6"], - })), - ); - - assert_eq!( - template_semicolon.extract_vars("/columns/;?extra=ignored"), - Ok(json!({ - "a": [], - "b": [], - })), - ); - } - - #[test] - fn test_display_trait() { - assert_eq!( - format!( - "{}", - URLPathTemplate::parse("/users/{user_id}?a={b}&c={d!}&e={f.g}").unwrap() - ), - "/users/{user_id!}?a={b}&c={d!}&e={f.g}".to_string(), - ); - - assert_eq!( - format!( - "{}", - URLPathTemplate::parse("/users/{user_id}?a={b}&c={d!}&e={f.g}").unwrap() - ), - "/users/{user_id!}?a={b}&c={d!}&e={f.g}".to_string(), - ); - - assert_eq!( - format!( - "{}", - URLPathTemplate::parse("/users/{user_id}?a={b}&c={d!}&e={f.g}").unwrap() - ), - "/users/{user_id!}?a={b}&c={d!}&e={f.g}".to_string(), - ); - - assert_eq!( - format!( - "{}", - URLPathTemplate::parse("/users?ids={id,...}&names={name|...}").unwrap() - ), - "/users?ids={id,...}&names={name|...}".to_string(), - ); - - assert_eq!( - format!( - "{}", - URLPathTemplate::parse("/users?ids={id!,...}&names={user.name|...}").unwrap() - ), - "/users?ids={id!,...}&names={user.name|...}".to_string(), - ); - - assert_eq!( - format!("{}", URLPathTemplate::parse("/position/{x},{y}").unwrap(),), - "/position/{x!},{y!}".to_string(), - ); - - assert_eq!( - format!( - "{}", - URLPathTemplate::parse("/position/xyz({x},{y},{z})").unwrap(), - ), - "/position/xyz({x!},{y!},{z!})".to_string(), - ); - - assert_eq!( - format!( - "{}", - URLPathTemplate::parse("/position?xyz=({x},{y},{z})").unwrap(), - ), - "/position?xyz=({x},{y},{z})".to_string(), - ); - } - - #[test] - fn test_required_parameters() { - assert_eq!( - URLPathTemplate::parse("/users/{user_id}?a={b}&c={d.e!}&e={f.g}") - .unwrap() - .required_parameters(), - vec!["d.e", "user_id"], - ); - - assert_eq!( - URLPathTemplate::parse("/users?ids={id,...}&names={name|...}") - .unwrap() - .required_parameters(), - Vec::::new(), - ); - - assert_eq!( - URLPathTemplate::parse("/users?ids={id!,...}&names={user.name|...}") - .unwrap() - .required_parameters(), - vec!["id"], - ); - - assert_eq!( - URLPathTemplate::parse("/position/{x},{y}") - .unwrap() - .required_parameters(), - vec!["x", "y"], - ); - - assert_eq!( - URLPathTemplate::parse("/position/xyz({x},{y},{z})") - .unwrap() - .required_parameters(), - vec!["x", "y", "z"], - ); - - assert_eq!( - URLPathTemplate::parse("/position?xyz=({x!},{y},{z!})") - .unwrap() - .required_parameters(), - vec!["x", "z"], - ); - - assert_eq!( - URLPathTemplate::parse("/users/{id}?user_id={id}") - .unwrap() - .required_parameters(), - vec!["id"], - ); - - assert_eq!( - URLPathTemplate::parse("/users/{$this.id}?foo={$this.bar!}") - .unwrap() - .required_parameters(), - vec!["$this.bar", "$this.id"], - ); - } -} diff --git a/apollo-federation/src/sources/connect/url_template.rs b/apollo-federation/src/sources/connect/url_template.rs new file mode 100644 index 0000000000..e633d1b4bb --- /dev/null +++ b/apollo-federation/src/sources/connect/url_template.rs @@ -0,0 +1,336 @@ +use std::fmt::Display; +use std::str::FromStr; + +use apollo_compiler::collections::IndexMap; +use itertools::Itertools; +use serde::Serialize; +use serde_json_bytes::Value; +use url::Url; + +use crate::sources::connect::string_template::Error; +use crate::sources::connect::string_template::Expression; +use crate::sources::connect::string_template::StringTemplate; + +/// A parser accepting URLTemplate syntax, which is useful both for +/// generating new URL paths from provided variables and for extracting variable +/// values from concrete URL paths. +/// +// TODO: In the future, when we add RFC 6570 features, this could contain one big +// `StringTemplate`, but for now we have to store pieces independently so we can leverage +// `Url` to do percent-encoding for us. +#[derive(Debug, Clone, Default)] +pub struct URLTemplate { + /// Scheme + host if this is an absolute URL + pub base: Option, + path: Vec, + query: Vec<(StringTemplate, StringTemplate)>, +} + +impl URLTemplate { + pub(crate) fn path_expressions(&self) -> impl Iterator { + self.path.iter().flat_map(StringTemplate::expressions) + } + + pub(crate) fn query_expressions(&self) -> impl Iterator { + self.query + .iter() + .flat_map(|(key, value)| key.expressions().chain(value.expressions())) + } + /// Return all variables in the template in the order they appeared + pub(crate) fn expressions(&self) -> impl Iterator { + self.path_expressions().chain(self.query_expressions()) + } + + pub fn interpolate_path(&self, vars: &IndexMap) -> Result, Error> { + self.path + .iter() + .map(|param_value| param_value.interpolate(vars)) + .collect() + } + + pub fn interpolate_query( + &self, + vars: &IndexMap, + ) -> Result, Error> { + self.query + .iter() + .map(|(key, param_value)| Ok((key.interpolate(vars)?, param_value.interpolate(vars)?))) + .collect() + } +} + +impl FromStr for URLTemplate { + type Err = Error; + + /// Top-level parsing entry point for URLTemplate syntax. + fn from_str(input: &str) -> Result { + let (raw_base, rest) = if let Some(end_of_scheme) = input.find("://") { + let start_of_authority = end_of_scheme + 3; + let rest_of_uri = &input[start_of_authority..]; + let end_of_authority = rest_of_uri + .find('/') + .or_else(|| rest_of_uri.find('?')) + .or_else(|| rest_of_uri.find('#')) + .unwrap_or(rest_of_uri.len()) + + start_of_authority; + let authority = Some(&input[..end_of_authority]); + if end_of_authority < input.len() { + (authority, Some(&input[end_of_authority..])) + } else { + (authority, None) + } + } else { + (None, Some(input)) + }; + let base = raw_base + .map(|raw_base| { + Url::parse(raw_base).map_err(|err| Error { + message: err.to_string(), + location: 0..raw_base.len(), + }) + }) + .transpose()?; + + let mut prefix_suffix = rest.into_iter().flat_map(|rest| rest.splitn(2, '?')); + let path_prefix = prefix_suffix.next(); + let query_suffix = prefix_suffix.next(); + + let path = path_prefix + .into_iter() + .flat_map(|path_prefix| path_prefix.split('/')) + .filter(|path_part| !path_part.is_empty()) + .map(|path_part| { + StringTemplate::parse( + path_part, + path_part.as_ptr() as usize - input.as_ptr() as usize, + ) + }) + .try_collect()?; + + let query = query_suffix + .into_iter() + .flat_map(|query_suffix| query_suffix.split('&')) + .map(|query_part| { + let offset = query_part.as_ptr() as usize - input.as_ptr() as usize; + let (key, value) = query_part.split_once('=').ok_or_else(|| { + let end = offset + query_part.len(); + Error { + message: format!("Query parameter {query_part} must have a value"), + location: offset..end, + } + })?; + let key = StringTemplate::parse(key, offset)?; + let value = StringTemplate::parse( + value, + value.as_ptr() as usize - input.as_ptr() as usize, + )?; + Ok::<(StringTemplate, StringTemplate), Self::Err>((key, value)) + }) + .try_collect()?; + + Ok(URLTemplate { base, path, query }) + } +} + +impl Display for URLTemplate { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + if let Some(base) = &self.base { + f.write_str(base.to_string().trim_end_matches('/'))?; + } + + for param_value in &self.path { + f.write_str("/")?; + param_value.fmt(f)?; + } + + if !self.query.is_empty() { + f.write_str("?")?; + let mut first = true; + for (key, param_value) in &self.query { + if first { + first = false; + } else { + f.write_str("&")?; + } + key.fmt(f)?; + f.write_str("=")?; + param_value.fmt(f)?; + } + } + + Ok(()) + } +} + +impl Serialize for URLTemplate { + fn serialize(&self, serializer: S) -> Result { + serializer.collect_str(self) + } +} + +#[cfg(test)] +mod test_parse { + use insta::assert_debug_snapshot; + + use super::*; + + #[test] + fn test_path_list() { + assert_debug_snapshot!(URLTemplate::from_str("/abc")); + + assert_debug_snapshot!(URLTemplate::from_str("/abc/def")); + + assert_debug_snapshot!(URLTemplate::from_str("/abc/{$args.def}")); + + assert_debug_snapshot!(URLTemplate::from_str("/abc/{$this.def.thing}/ghi")); + } + + #[test] + fn test_url_path_template_parse() { + assert_debug_snapshot!(URLTemplate::from_str("/users/{$config.user_id}?a=b")); + + assert_debug_snapshot!(URLTemplate::from_str( + "/users/{$this.user_id}?a={$args.b}&c={$args.d}&e={$args.f.g}" + )); + + assert_debug_snapshot!(URLTemplate::from_str( + "/users/{$this.id}?a={$config.b}#junk" + )); + + assert_debug_snapshot!(URLTemplate::from_str("/location/{$this.lat},{$this.lon}")); + } + + #[test] + fn basic_absolute_url() { + assert_debug_snapshot!(URLTemplate::from_str("http://example.com")); + } + + #[test] + fn absolute_url_with_path() { + assert_debug_snapshot!(URLTemplate::from_str("http://example.com/abc/def")); + } + + #[test] + fn absolute_url_with_path_variable() { + assert_debug_snapshot!(URLTemplate::from_str("http://example.com/{$args.abc}/def")); + } + + #[test] + fn absolute_url_with_query() { + assert_debug_snapshot!(URLTemplate::from_str("http://example.com?abc=def")); + } + + #[test] + fn absolute_url_with_query_variable() { + assert_debug_snapshot!(URLTemplate::from_str("http://example.com?abc={$args.abc}")); + } + + #[test] + fn variable_param_key() { + assert_debug_snapshot!(URLTemplate::from_str( + "?{$args.filter.field}={$args.filter.value}" + )); + } + + #[test] + fn nested_braces_in_expression() { + assert_debug_snapshot!(URLTemplate::from_str("/position/xz/{$this { x { y } } }")); + } + + #[test] + fn expression_missing_closing_bracket() { + assert_debug_snapshot!(URLTemplate::from_str("{$this { x: { y } }")); + } +} + +#[cfg(test)] +#[rstest::rstest] +#[case("/users/{$this.user_id}?a={$this.b}&c={$this.d}&e={$this.f.g}")] +#[case("/position/{$this.x},{$this.y}")] +#[case("/position/xyz({$this.x},{$this.y},{$this.z})")] +#[case("/position?xyz=({$this.x},{$this.y},{$this.z})")] +fn test_display_trait(#[case] template: &str) { + assert_eq!( + URLTemplate::from_str(template).unwrap().to_string(), + template.to_string() + ); +} + +#[cfg(test)] +mod test_interpolate { + use pretty_assertions::assert_eq; + use serde_json_bytes::json; + + use super::*; + + #[test] + fn missing_values_render_as_empty_strings() { + let template = URLTemplate::from_str( + "/something/{$args.id}/blah?{$args.filter.field}={$args.filter.value}", + ) + .unwrap(); + let vars = IndexMap::default(); + assert_eq!( + template.interpolate_query(&vars).unwrap(), + &[("".to_string(), "".to_string())], + ); + assert_eq!( + template.interpolate_path(&vars).unwrap(), + &["something".to_string(), "".to_string(), "blah".to_string()], + ); + } + + #[test] + fn nulls_render_as_empty_strings() { + let template = URLTemplate::from_str( + "/something/{$args.id}/blah?{$args.filter.field}={$args.filter.value}", + ) + .unwrap(); + let mut vars = IndexMap::default(); + vars.insert( + String::from("$args"), + json!({"filter": {"field": null}, "id": null}), + ); + assert_eq!( + template.interpolate_query(&vars).unwrap(), + &[("".to_string(), "".to_string())], + ); + assert_eq!( + template.interpolate_path(&vars).unwrap(), + &["something".to_string(), "".to_string(), "blah".to_string()] + ); + } + + #[test] + fn interpolate_path() { + let template = URLTemplate::from_str("/something/{$args.id->first}/blah").unwrap(); + let mut vars = IndexMap::default(); + vars.insert(String::from("$args"), json!({"id": "value"})); + assert_eq!( + template.interpolate_path(&vars).unwrap(), + &["something".to_string(), "v".to_string(), "blah".to_string()], + "Path parameter interpolated" + ); + } + + #[test] + fn interpolate_query() { + let template = URLTemplate::from_str( + "/something/{$args.id}/blah?{$args.filter.field}={$args.filter.value}", + ) + .unwrap(); + let mut vars = IndexMap::default(); + vars.insert( + String::from("$args"), + json!({"filter": {"field": "name", "value": "value"}}), + ); + vars.insert( + String::from("$args.filter.value"), + json!({"filter": { "value": "value"}}), + ); + assert_eq!( + template.interpolate_query(&vars).unwrap(), + &[("name".to_string(), "value".to_string())], + ); + } +} diff --git a/apollo-federation/src/sources/connect/validation/coordinates.rs b/apollo-federation/src/sources/connect/validation/coordinates.rs new file mode 100644 index 0000000000..2b46b4d100 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/coordinates.rs @@ -0,0 +1,251 @@ +use std::fmt; +use std::fmt::Display; +use std::fmt::Formatter; + +use apollo_compiler::ast::Directive; +use apollo_compiler::ast::FieldDefinition; +use apollo_compiler::ast::Value; +use apollo_compiler::schema::Component; +use apollo_compiler::schema::ObjectType; +use apollo_compiler::Name; +use apollo_compiler::Node; + +use super::DirectiveName; +use crate::sources::connect::spec::schema::CONNECT_BODY_ARGUMENT_NAME; +use crate::sources::connect::spec::schema::CONNECT_ENTITY_ARGUMENT_NAME; +use crate::sources::connect::spec::schema::CONNECT_SELECTION_ARGUMENT_NAME; +use crate::sources::connect::spec::schema::CONNECT_SOURCE_ARGUMENT_NAME; +use crate::sources::connect::spec::schema::HEADERS_ARGUMENT_NAME; +use crate::sources::connect::spec::schema::HTTP_ARGUMENT_NAME; +use crate::sources::connect::spec::schema::SOURCE_BASE_URL_ARGUMENT_NAME; +use crate::sources::connect::spec::schema::SOURCE_NAME_ARGUMENT_NAME; + +/// The location of a field within an object. +#[derive(Clone, Copy)] +pub(super) struct FieldCoordinate<'a> { + pub(super) object: &'a Node, + pub(super) field: &'a Component, +} + +impl Display for FieldCoordinate<'_> { + fn fmt(&self, f: &mut Formatter) -> fmt::Result { + let Self { object, field } = self; + write!( + f, + "`{object}.{field}`", + object = object.name, + field = field.name + ) + } +} + +/// The location of a `@connect` directive. +#[derive(Clone, Copy)] +pub(super) struct ConnectDirectiveCoordinate<'a> { + pub(super) directive: &'a Node, + pub(super) field_coordinate: FieldCoordinate<'a>, +} + +impl Display for ConnectDirectiveCoordinate<'_> { + fn fmt(&self, f: &mut Formatter) -> fmt::Result { + let Self { + directive, + field_coordinate, + } = self; + write!( + f, + "`@{connect_directive_name}` on {field_coordinate}", + connect_directive_name = directive.name + ) + } +} + +#[derive(Clone, Copy)] +pub(super) struct SelectionCoordinate<'a> { + pub(crate) connect_directive_coordinate: ConnectDirectiveCoordinate<'a>, +} + +impl Display for SelectionCoordinate<'_> { + fn fmt(&self, f: &mut Formatter) -> fmt::Result { + let ConnectDirectiveCoordinate { + directive, + field_coordinate, + } = self.connect_directive_coordinate; + write!( + f, + "`@{connect_directive_name}({CONNECT_SELECTION_ARGUMENT_NAME}:)` on {field_coordinate}", + connect_directive_name = directive.name + ) + } +} + +impl<'a> From> for SelectionCoordinate<'a> { + fn from(connect_directive_coordinate: ConnectDirectiveCoordinate<'a>) -> Self { + Self { + connect_directive_coordinate, + } + } +} + +/// The coordinate of an `HTTP` arg within a connect directive. +pub(super) struct ConnectHTTPCoordinate<'a> { + pub(crate) connect_directive_coordinate: ConnectDirectiveCoordinate<'a>, +} + +impl Display for ConnectHTTPCoordinate<'_> { + fn fmt(&self, f: &mut Formatter) -> fmt::Result { + let ConnectDirectiveCoordinate { + directive, + field_coordinate, + } = self.connect_directive_coordinate; + write!( + f, + "`@{connect_directive_name}({HTTP_ARGUMENT_NAME}:)` on {field_coordinate}", + connect_directive_name = directive.name + ) + } +} + +impl<'a> From> for ConnectHTTPCoordinate<'a> { + fn from(connect_directive_coordinate: ConnectDirectiveCoordinate<'a>) -> Self { + Self { + connect_directive_coordinate, + } + } +} + +/// The coordinate of an `HTTP.method` arg within the `@connect` directive. +#[derive(Clone, Copy)] +pub(super) struct HttpMethodCoordinate<'a> { + pub(crate) connect: ConnectDirectiveCoordinate<'a>, + pub(crate) http_method: &'a Name, + pub(crate) node: &'a Node, +} + +impl Display for HttpMethodCoordinate<'_> { + fn fmt(&self, f: &mut Formatter) -> fmt::Result { + let Self { + connect: + ConnectDirectiveCoordinate { + directive, + field_coordinate, + }, + http_method, + node: _node, + } = self; + write!( + f, + "`{http_method}` in `@{connect_directive_name}({HTTP_ARGUMENT_NAME}:)` on {field_coordinate}", + connect_directive_name = directive.name, + ) + } +} + +/// The `baseURL` argument for the `@source` directive +#[derive(Clone, Copy)] +pub(super) struct BaseUrlCoordinate<'a> { + pub(crate) source_directive_name: &'a DirectiveName, +} + +impl Display for BaseUrlCoordinate<'_> { + fn fmt(&self, f: &mut Formatter) -> fmt::Result { + let Self { + source_directive_name, + } = self; + write!( + f, + "`@{source_directive_name}({SOURCE_BASE_URL_ARGUMENT_NAME}:)`", + ) + } +} + +pub(super) fn connect_directive_http_body_coordinate( + connect_directive_name: &Name, + object: &Node, + field: &Name, +) -> String { + format!("`@{connect_directive_name}({HTTP_ARGUMENT_NAME}: {{{CONNECT_BODY_ARGUMENT_NAME}:}})` on `{object_name}.{field}`", object_name = object.name) +} + +pub(super) fn source_http_argument_coordinate(source_directive_name: &DirectiveName) -> String { + format!("`@{source_directive_name}({HTTP_ARGUMENT_NAME}:)`") +} + +pub(super) fn source_name_argument_coordinate(source_directive_name: &DirectiveName) -> String { + format!("`@{source_directive_name}({SOURCE_NAME_ARGUMENT_NAME}:)`") +} + +pub(super) fn source_name_value_coordinate( + source_directive_name: &DirectiveName, + value: &Node, +) -> String { + format!("`@{source_directive_name}({SOURCE_NAME_ARGUMENT_NAME}: {value})`") +} + +pub(super) fn connect_directive_name_coordinate( + connect_directive_name: &Name, + source: &Node, + object_name: &Name, + field_name: &Name, +) -> String { + format!("`@{connect_directive_name}({CONNECT_SOURCE_ARGUMENT_NAME}: {source})` on `{object_name}.{field_name}`") +} + +/// Coordinate for an `HTTP.headers` argument in `@source` or `@connect`. +#[derive(Clone, Copy)] +pub(super) enum HttpHeadersCoordinate<'a> { + Source { + directive_name: &'a Name, + }, + Connect { + connect: ConnectDirectiveCoordinate<'a>, + object: &'a Name, + field: &'a Name, + }, +} + +impl Display for HttpHeadersCoordinate<'_> { + fn fmt(&self, f: &mut Formatter) -> fmt::Result { + match self { + Self::Connect { + connect: + ConnectDirectiveCoordinate { + directive, + field_coordinate: _, + }, + object, + field, + } => { + write!( + f, + "`@{connect_directive_name}({HTTP_ARGUMENT_NAME}.{HEADERS_ARGUMENT_NAME}:)` on `{}.{}`", + object, field, connect_directive_name = directive.name + ) + } + Self::Source { directive_name } => { + write!( + f, + "`@{directive_name}({HTTP_ARGUMENT_NAME}.{HEADERS_ARGUMENT_NAME}:)`", + ) + } + } + } +} + +pub(super) fn connect_directive_entity_argument_coordinate( + connect_directive_entity_argument: &Name, + value: &Value, + object: &Node, + field: &Name, +) -> String { + format!("`@{connect_directive_entity_argument}({CONNECT_ENTITY_ARGUMENT_NAME}: {value})` on `{object_name}.{field}`", object_name = object.name) +} + +pub(super) fn field_with_connect_directive_entity_true_coordinate( + connect_directive_entity_argument: &Name, + value: &Value, + object: &Node, + field: &Name, +) -> String { + format!("`{object_name}.{field}` with `@{connect_directive_entity_argument}({CONNECT_ENTITY_ARGUMENT_NAME}: {value})`", object_name = object.name) +} diff --git a/apollo-federation/src/sources/connect/validation/entity.rs b/apollo-federation/src/sources/connect/validation/entity.rs new file mode 100644 index 0000000000..e935b57413 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/entity.rs @@ -0,0 +1,350 @@ +mod compare_keys; +mod keys; + +use apollo_compiler::ast::Argument; +use apollo_compiler::ast::FieldDefinition; +use apollo_compiler::ast::InputValueDefinition; +use apollo_compiler::ast::Value; +use apollo_compiler::schema::Component; +use apollo_compiler::schema::Directive; +use apollo_compiler::schema::ExtendedType; +use apollo_compiler::schema::InputObjectType; +use apollo_compiler::schema::ObjectType; +use apollo_compiler::Name; +use apollo_compiler::Node; +pub(super) use keys::field_set_error; +pub(super) use keys::EntityKeyChecker; + +use super::coordinates::connect_directive_entity_argument_coordinate; +use super::coordinates::field_with_connect_directive_entity_true_coordinate; +use super::extended_type::ObjectCategory; +use super::Code; +use super::Message; +use crate::sources::connect::expand::visitors::FieldVisitor; +use crate::sources::connect::expand::visitors::GroupVisitor; +use crate::sources::connect::spec::schema::CONNECT_ENTITY_ARGUMENT_NAME; +use crate::sources::connect::validation::graphql::SchemaInfo; +use crate::sources::connect::variable::VariableReference; + +/// Applies additional validations to `@connect` if `entity` is `true`. +pub(super) fn validate_entity_arg( + field: &Component, + connect_directive: &Node, + object: &Node, + schema: &SchemaInfo, + category: ObjectCategory, +) -> Result<(), Message> { + let connect_directive_name = &connect_directive.name; + + let Some(entity_arg) = connect_directive + .arguments + .iter() + .find(|arg| arg.name == CONNECT_ENTITY_ARGUMENT_NAME) + else { + return Ok(()); + }; + let entity_arg_value = &entity_arg.value; + if !entity_arg_value.to_bool().unwrap_or_default() { + // This is not an entity resolver + return Ok(()); + } + + if category != ObjectCategory::Query { + return Err( + Message { + code: Code::EntityNotOnRootQuery, + message: format!( + "{coordinate} is invalid. Entity resolvers can only be declared on root `Query` fields.", + coordinate = connect_directive_entity_argument_coordinate(connect_directive_name, entity_arg_value.as_ref(), object, &field.name) + ), + locations: entity_arg.line_column_range(&schema.sources) + .into_iter() + .collect(), + } + ); + } + + let Some(object_type) = schema.get_object(field.ty.inner_named_type()) else { + return Err(Message { + code: Code::EntityTypeInvalid, + message: format!( + "{coordinate} is invalid. Entity connectors must return object types.", + coordinate = connect_directive_entity_argument_coordinate( + connect_directive_name, + entity_arg_value.as_ref(), + object, + &field.name + ) + ), + locations: entity_arg + .line_column_range(&schema.sources) + .into_iter() + .collect(), + }); + }; + + if field.ty.is_list() || field.ty.is_non_null() { + return Err( + Message { + code: Code::EntityTypeInvalid, + message: format!( + "{coordinate} is invalid. Entity connectors must return non-list, nullable, object types. See https://go.apollo.dev/connectors/directives/#rules-for-entity-true", + coordinate = connect_directive_entity_argument_coordinate( + connect_directive_name, + entity_arg_value.as_ref(), + object, + &field.name + ) + ), + locations: entity_arg + .line_column_range(&schema.sources) + .into_iter() + .collect(), + } + ); + } + + if field.arguments.is_empty() { + return Err(Message { + code: Code::EntityResolverArgumentMismatch, + message: format!( + "{coordinate} must have arguments. See https://go.apollo.dev/connectors/directives/#rules-for-entity-true", + coordinate = field_with_connect_directive_entity_true_coordinate( + connect_directive_name, + entity_arg_value.as_ref(), + object, + &field.name, + ), + ), + locations: entity_arg + .line_column_range(&schema.sources) + .into_iter() + .collect(), + }); + } + + ArgumentVisitor { + schema, + entity_arg, + entity_arg_value, + object, + field: &field.name, + } + .walk(Group::Root { + field, + entity_type: object_type, + }) +} + +#[derive(Clone, Debug)] +enum Group<'schema> { + /// The entity itself, we're matching argument names & types to these fields + Root { + field: &'schema Node, + entity_type: &'schema Node, + }, + /// A child field of the entity we're matching against an input type. + Child { + input_type: &'schema Node, + entity_type: &'schema ExtendedType, + root_entity_type: &'schema Name, + }, +} + +#[derive(Clone, Debug)] +struct Field<'schema> { + node: &'schema Node, + input_type: &'schema ExtendedType, + entity_type: &'schema ExtendedType, + root_entity_type: &'schema Name, +} + +/// Visitor for entity resolver arguments. +/// This validates that the arguments match fields on the entity type. +/// +/// Since input types may contain fields with subtypes, and the fields of those subtypes can be +/// part of composite keys, this potentially requires visiting a tree. +struct ArgumentVisitor<'schema> { + schema: &'schema SchemaInfo<'schema>, + entity_arg: &'schema Node, + entity_arg_value: &'schema Node, + object: &'schema Node, + field: &'schema Name, +} + +impl<'schema> GroupVisitor, Field<'schema>> for ArgumentVisitor<'schema> { + fn try_get_group_for_field( + &self, + field: &Field<'schema>, + ) -> Result>, Self::Error> { + Ok( + // Each input type within an argument to the entity field is another group to visit + if let ExtendedType::InputObject(input_object_type) = field.input_type { + Some(Group::Child { + input_type: input_object_type, + entity_type: field.entity_type, + root_entity_type: field.root_entity_type, + }) + } else { + None + }, + ) + } + + fn enter_group(&mut self, group: &Group<'schema>) -> Result>, Self::Error> { + match group { + Group::Root { + field, entity_type, .. + } => self.enter_root_group(field, entity_type), + Group::Child { + input_type, + entity_type, + root_entity_type, + .. + } => self.enter_child_group(input_type, entity_type, root_entity_type), + } + } + + fn exit_group(&mut self) -> Result<(), Self::Error> { + Ok(()) + } +} + +impl<'schema> FieldVisitor> for ArgumentVisitor<'schema> { + type Error = Message; + + fn visit(&mut self, field: Field<'schema>) -> Result<(), Self::Error> { + let ok = match field.input_type { + ExtendedType::InputObject(_) => field.entity_type.is_object(), + ExtendedType::Scalar(_) | ExtendedType::Enum(_) => { + field.input_type == field.entity_type + } + _ => true, + }; + if ok { + Ok(()) + } else { + Err(Message { + code: Code::EntityResolverArgumentMismatch, + message: format!( + "{coordinate} has invalid arguments. Mismatched type on field `{field_name}` - expected `{entity_type}` but found `{input_type}`.", + coordinate = field_with_connect_directive_entity_true_coordinate( + self.schema.connect_directive_name, + self.entity_arg_value.as_ref(), + self.object, + self.field, + ), + field_name = field.node.name.as_str(), + input_type = field.input_type.name(), + entity_type = field.entity_type.name(), + ), + locations: field.node + .line_column_range(&self.schema.sources) + .into_iter() + .chain(self.entity_arg.line_column_range(&self.schema.sources)) + .collect(), + }) + } + } +} + +impl<'schema> ArgumentVisitor<'schema> { + fn enter_root_group( + &mut self, + field: &'schema Node, + entity_type: &'schema Node, + ) -> Result< + Vec>, + as FieldVisitor>>::Error, + > { + // At the root level, visit each argument to the entity field + field.arguments.iter().filter_map(|arg| { + if let Some(input_type) = self.schema.types.get(arg.ty.inner_named_type()) { + // Check that the argument has a corresponding field on the entity type + let root_entity_type = &entity_type.name; + if let Some(entity_type) = entity_type.fields.get(&*arg.name) + .and_then(|entity_field| self.schema.types.get(entity_field.ty.inner_named_type())) { + Some(Ok(Field { + node: arg, + input_type, + entity_type, + root_entity_type, + })) + } else { + Some(Err(Message { + code: Code::EntityResolverArgumentMismatch, + message: format!( + "{coordinate} has invalid arguments. Argument `{arg_name}` does not have a matching field `{arg_name}` on type `{entity_type}`.", + coordinate = field_with_connect_directive_entity_true_coordinate( + self.schema.connect_directive_name, + self.entity_arg_value.as_ref(), + self.object, + &field.name + ), + arg_name = &*arg.name, + entity_type = entity_type.name, + ), + locations: arg + .line_column_range(&self.schema.sources) + .into_iter() + .chain(self.entity_arg.line_column_range(&self.schema.sources)) + .collect(), + })) + } + } else { + // The input type is missing - this will be reported elsewhere, so just ignore + None + } + }).collect() + } + + fn enter_child_group( + &mut self, + child_input_type: &'schema Node, + entity_type: &'schema ExtendedType, + root_entity_type: &'schema Name, + ) -> Result< + Vec>, + as FieldVisitor>>::Error, + > { + // At the child level, visit each field on the input type + let ExtendedType::Object(entity_object_type) = entity_type else { + // Entity type was not an object type - this will be reported by field visitor + return Ok(Vec::new()); + }; + child_input_type.fields.iter().filter_map(|(name, input_field)| { + if let Some(entity_field) = entity_object_type.fields.get(name) { + let entity_field_type = entity_field.ty.inner_named_type(); + let input_type = self.schema.types.get(input_field.ty.inner_named_type())?; + + self.schema.types.get(entity_field_type).map(|entity_type| Ok(Field { + node: input_field, + input_type, + entity_type, + root_entity_type, + })) + } else { + // The input type field does not have a corresponding field on the entity type + Some(Err(Message { + code: Code::EntityResolverArgumentMismatch, + message: format!( + "{coordinate} has invalid arguments. Field `{name}` on `{input_type}` does not have a matching field `{name}` on `{entity_type}`.", + coordinate = field_with_connect_directive_entity_true_coordinate( + self.schema.connect_directive_name, + self.entity_arg_value.as_ref(), + self.object, + self.field, + ), + input_type = child_input_type.name, + entity_type = entity_object_type.name, + ), + locations: input_field + .line_column_range(&self.schema.sources) + .into_iter() + .chain(self.entity_arg.line_column_range(&self.schema.sources)) + .collect(), + })) + } + }).collect() + } +} diff --git a/apollo-federation/src/sources/connect/validation/entity/compare_keys.rs b/apollo-federation/src/sources/connect/validation/entity/compare_keys.rs new file mode 100644 index 0000000000..29b13bfec7 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/entity/compare_keys.rs @@ -0,0 +1,105 @@ +use apollo_compiler::executable::FieldSet; +use apollo_compiler::executable::Selection; + +/// Returns true if `inner` is a subset of `outer`. +/// +/// Note: apollo_federation::operation::SelectionSet has its own `contains` +/// method I'd love to use, but it requires a ValidFederationSchema, which +/// we don't have during validation. This code can be removed after we rewrite +/// composition in rust and connector validations happen after schema validation +/// and `@link` enrichment. +pub(super) fn field_set_is_subset(inner: &FieldSet, outer: &FieldSet) -> bool { + inner.selection_set.ty == outer.selection_set.ty + && vec_includes_as_set( + &outer.selection_set.selections, + &inner.selection_set.selections, + selection_is_subset, + ) +} + +fn selection_is_subset(x: &Selection, y: &Selection) -> bool { + match (x, y) { + (Selection::Field(x), Selection::Field(y)) => { + x.name == y.name + && x.alias == y.alias + && vec_includes_as_set( + &x.selection_set.selections, + &y.selection_set.selections, + selection_is_subset, + ) + } + (Selection::InlineFragment(x), Selection::InlineFragment(y)) => { + x.type_condition == y.type_condition + && vec_includes_as_set( + &x.selection_set.selections, + &y.selection_set.selections, + selection_is_subset, + ) + } + _ => false, + } +} + +// `this` vector includes `other` vector as a set +fn vec_includes_as_set(this: &[T], other: &[T], item_matches: impl Fn(&T, &T) -> bool) -> bool { + other.iter().all(|other_node| { + this.iter() + .any(|this_node| item_matches(this_node, other_node)) + }) +} + +#[cfg(test)] +mod tests { + use apollo_compiler::executable::FieldSet; + use apollo_compiler::name; + use apollo_compiler::validation::Valid; + use apollo_compiler::Schema; + use rstest::rstest; + + use super::field_set_is_subset; + + fn schema() -> Valid { + Schema::parse_and_validate( + r#" + type Query { + t: T + } + + type T { + a: String + b: B + c: String + } + + type B { + x: String + y: String + } + "#, + "", + ) + .unwrap() + } + + #[rstest] + #[case("a", "a")] + #[case("a b { x } c", "a b { x } c")] + #[case("a", "a c")] + #[case("b { x }", "b { x y }")] + fn test_field_set_is_subset(#[case] inner: &str, #[case] outer: &str) { + let schema = schema(); + let inner = FieldSet::parse_and_validate(&schema, name!(T), inner, "inner").unwrap(); + let outer = FieldSet::parse_and_validate(&schema, name!(T), outer, "outer").unwrap(); + assert!(field_set_is_subset(&inner, &outer)); + } + + #[rstest] + #[case("a b { x } c", "a")] + #[case("b { x y }", "b { x }")] + fn test_field_set_is_not_subset(#[case] inner: &str, #[case] outer: &str) { + let schema = schema(); + let inner = FieldSet::parse_and_validate(&schema, name!(T), inner, "inner").unwrap(); + let outer = FieldSet::parse_and_validate(&schema, name!(T), outer, "outer").unwrap(); + assert!(!field_set_is_subset(&inner, &outer)); + } +} diff --git a/apollo-federation/src/sources/connect/validation/entity/keys.rs b/apollo-federation/src/sources/connect/validation/entity/keys.rs new file mode 100644 index 0000000000..418065f5bf --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/entity/keys.rs @@ -0,0 +1,128 @@ +use std::fmt; +use std::fmt::Formatter; + +use apollo_compiler::ast::Directive; +use apollo_compiler::collections::HashMap; +use apollo_compiler::executable::FieldSet; +use apollo_compiler::validation::Valid; +use apollo_compiler::Name; +use apollo_compiler::Node; +use apollo_compiler::Schema; +use itertools::Itertools; + +use super::compare_keys::field_set_is_subset; +use super::VariableReference; +use crate::link::federation_spec_definition::FEDERATION_FIELDS_ARGUMENT_NAME; +use crate::sources::connect::validation::Code; +use crate::sources::connect::validation::Message; +use crate::sources::connect::Namespace; + +/// Collects keys and entity connectors for comparison and validation. +#[derive(Default)] +pub(crate) struct EntityKeyChecker<'schema> { + /// Any time we see `type T @key(fields: "f")` (with resolvable: true) + resolvable_keys: Vec<(FieldSet, &'schema Node, &'schema Name)>, + /// Any time we see either: + /// - `type Query { t(f: X): T @connect(entity: true) }` (Explicit entity resolver) + /// - `type T { f: X g: Y @connect(... $this.f ...) }` (Implicit entity resolver) + entity_connectors: HashMap>>, +} + +impl<'schema> EntityKeyChecker<'schema> { + pub(crate) fn add_key(&mut self, field_set: &FieldSet, directive: &'schema Node) { + self.resolvable_keys + .push((field_set.clone(), directive, &directive.name)); + } + + pub(crate) fn add_connector(&mut self, field_set: Valid
) { + self.entity_connectors + .entry(field_set.selection_set.ty.clone()) + .or_default() + .push(field_set); + } + + /// For each @key we've seen, check if there's a corresponding entity connector + /// by semantically comparing the @key field set with the synthesized field set + /// from the connector's arguments. + /// + /// The comparison is done by checking if the @key field set is a subset of the + /// entity connector's field set. It's not equality because we convert `@external`/ + /// `@requires` fields to keys for simplicity's sake. + pub(crate) fn check_for_missing_entity_connectors(&self, schema: &Schema) -> Vec { + let mut messages = Vec::new(); + + for (key, directive, _) in &self.resolvable_keys { + let for_type = self.entity_connectors.get(&key.selection_set.ty); + let key_exists = for_type + .map(|connectors| { + connectors + .iter() + .any(|connector| field_set_is_subset(key, connector)) + }) + .unwrap_or(false); + if !key_exists { + messages.push(Message { + code: Code::MissingEntityConnector, + message: format!( + "Entity resolution for `@key(fields: \"{}\")` on `{}` is not implemented by a connector. See https://go.apollo.dev/connectors/directives/#rules-for-entity-true", + directive.argument_by_name(&FEDERATION_FIELDS_ARGUMENT_NAME, schema).ok().and_then(|arg| arg.as_str()).unwrap_or_default(), + key.selection_set.ty, + ), + locations: directive + .line_column_range(&schema.sources) + .into_iter() + .collect(), + }); + } + } + + messages + } +} + +impl fmt::Debug for EntityKeyChecker<'_> { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + f.debug_struct("EntityKeyChecker") + .field( + "resolvable_keys", + &self + .resolvable_keys + .iter() + .map(|(fs, _, _)| { + format!( + "... on {} {}", + fs.selection_set.ty, + fs.selection_set.serialize().no_indent() + ) + }) + .collect_vec(), + ) + .field( + "entity_connectors", + &self + .entity_connectors + .values() + .flatten() + .map(|fs| { + format!( + "... on {} {}", + fs.selection_set.ty, + fs.selection_set.serialize().no_indent() + ) + }) + .collect_vec(), + ) + .finish() + } +} + +pub(crate) fn field_set_error( + variables: &[VariableReference], + type_name: &Name, +) -> Message { + Message { + code: Code::GraphQLError, + message: format!("Variables used in connector (`{}`) for `{}` cannot be used to create a valid `@key` directive.", variables.iter().join("`, `"), type_name), + locations: vec![], + } +} diff --git a/apollo-federation/src/sources/connect/validation/expression.rs b/apollo-federation/src/sources/connect/validation/expression.rs new file mode 100644 index 0000000000..8b4c399476 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/expression.rs @@ -0,0 +1,375 @@ +//! This module is all about validating [`Expression`]s for a given context. This isn't done at +//! runtime, _only_ during composition because it could be expensive. + +use std::str::FromStr; + +use apollo_compiler::collections::IndexMap; +use itertools::Itertools; +use shape::Shape; +use shape::ShapeCase; + +use crate::sources::connect::string_template::Error; +use crate::sources::connect::string_template::Expression; +use crate::sources::connect::validation::coordinates::ConnectDirectiveCoordinate; +use crate::sources::connect::validation::graphql::SchemaInfo; +use crate::sources::connect::Namespace; + +/// Details about the available variables and shapes for the current expression. +/// These should be consistent for all pieces of a connector in the request phase. +pub(super) struct Context<'schema> { + pub(crate) schema: &'schema SchemaInfo<'schema>, + var_lookup: IndexMap, +} + +impl<'schema> Context<'schema> { + /// Create a context valid for expressions within the URI or headers of a `@connect` directive + pub(super) fn for_connect_request( + schema: &'schema SchemaInfo, + coordinate: ConnectDirectiveCoordinate, + ) -> Self { + let object_type = coordinate.field_coordinate.object; + let is_root_type = schema + .schema_definition + .query + .as_ref() + .is_some_and(|query| query.name == object_type.name) + || schema + .schema_definition + .mutation + .as_ref() + .is_some_and(|mutation| mutation.name == object_type.name); + let mut var_lookup: IndexMap = [ + ( + Namespace::Args, + Shape::record( + coordinate + .field_coordinate + .field + .arguments + .iter() + .map(|arg| (arg.name.to_string(), Shape::from(arg.ty.as_ref()))) + .collect(), + ), + ), + (Namespace::Config, Shape::unknown()), + (Namespace::Context, Shape::unknown()), + ] + .into_iter() + .collect(); + if !is_root_type { + var_lookup.insert(Namespace::This, Shape::from(object_type.as_ref())); + } + + Self { schema, var_lookup } + } + + /// Create a context valid for expressions within the `@source` directive + pub(super) fn for_source(schema: &'schema SchemaInfo) -> Self { + let var_lookup: IndexMap = [ + (Namespace::Config, Shape::unknown()), + (Namespace::Context, Shape::unknown()), + ] + .into_iter() + .collect(); + Self { schema, var_lookup } + } +} + +/// Take a single expression and check that it's valid for the given context. This checks that +/// the expression can be executed given the known args and that the output shape is as expected. +/// +/// TODO: this is only useful for URIs and headers right now, because it assumes objects/arrays are invalid. +pub(crate) fn validate(expression: &Expression, context: &Context) -> Result<(), Vec> { + let Expression { + expression, + location, + } = expression; + let shape = expression.shape(); + let errors: Vec = shape + .errors() + .map(|err| Error { + message: err.message.clone(), + location: err + .range + .as_ref() + .map(|range| range.start + location.start..range.end + location.start) + .unwrap_or_else(|| location.clone()), + }) + .collect(); + if !errors.is_empty() { + return Err(errors); + } + + validate_shape(&shape, context).map_err(|message| { + vec![Error { + message, + location: location.clone(), + }] + }) +} + +/// Validate that the shape is an acceptable output shape for an Expression. +/// +/// TODO: Some day, whether objects or arrays are allowed will be dependent on &self (i.e., is the * modifier used) +fn validate_shape(shape: &Shape, context: &Context) -> Result<(), String> { + match shape.case() { + ShapeCase::Array { .. } => Err("array values aren't valid here".to_string()), + ShapeCase::Object { .. } => Err("object values aren't valid here".to_string()), + ShapeCase::One(shapes) | ShapeCase::All(shapes) => { + for shape in shapes { + validate_shape(shape, context)?; + } + Ok(()) + } + ShapeCase::Name(name, key) => { + let mut shape = if name == "$root" { + return Err(format!( + "`{key}` must start with an argument name, like `$this` or `$args`", + key = key.iter().map(|key| key.to_string()).join(".") + )); + } else if name.starts_with('$') { + let namespace = Namespace::from_str(name).map_err(|_| { + format!( + "unknown variable `{name}`, must be one of {namespaces}", + namespaces = context.var_lookup.keys().map(|ns| ns.as_str()).join(", ") + ) + })?; + context + .var_lookup + .get(&namespace) + .ok_or_else(|| { + format!( + "{namespace} is not valid here, must be one of {namespaces}", + namespaces = context.var_lookup.keys().map(|ns| ns.as_str()).join(", "), + ) + })? + .clone() + } else { + context + .schema + .shape_lookup + .get(name.as_str()) + .cloned() + .ok_or_else(|| format!("unknown type `{name}`"))? + }; + let mut path = name.clone(); + for key in key { + let child = shape.child(key); + if child.is_none() { + return Err(format!("`{path}` doesn't have a field named `{key}`")); + } + shape = child; + path = format!("{path}.{key}"); + } + validate_shape(&shape, context) + } + ShapeCase::Error(shape::Error { message, .. }) => Err(message.clone()), + ShapeCase::None + | ShapeCase::Bool(_) + | ShapeCase::String(_) + | ShapeCase::Int(_) + | ShapeCase::Float + | ShapeCase::Null + | ShapeCase::Unknown => Ok(()), + } +} + +#[cfg(test)] +mod tests { + use apollo_compiler::name; + use apollo_compiler::Schema; + use rstest::rstest; + + use super::*; + use crate::sources::connect::validation::coordinates::FieldCoordinate; + use crate::sources::connect::JSONSelection; + + fn expression(selection: &str) -> Expression { + Expression { + expression: JSONSelection::parse(selection).unwrap(), + location: 0..0, + } + } + + const SCHEMA: &str = r#" + extend schema + @link( + url: "https://specs.apollo.dev/connect/v0.1" + import: ["@connect", "@source"] + ) + @source(name: "v2", http: { baseURL: "http://127.0.0.1" }) + + type Query { + aField( + int: Int + string: String + customScalar: CustomScalar + object: InputObject + array: [InputObject] + multiLevel: MultiLevelInput + ): AnObject @connect(source: "v2") + something: String + } + + scalar CustomScalar + + input InputObject { + bool: Boolean + } + + type AnObject { + bool: Boolean + } + + input MultiLevelInput { + inner: MultiLevel + } + + type MultiLevel { + nested: String + } + "#; + + #[rstest] + #[case::int("$(1)")] + #[case::float("$(1.0)")] + #[case::bool("$(true)")] + #[case::string("$(\"hello\")")] + #[case::null("$(null)")] + #[case::property_of_object("$({\"a\": 1}).a")] + fn allowed_literals(#[case] selection: &str) { + let schema = Schema::parse(SCHEMA, "schema").unwrap(); + let connect = name!("connect"); + let source = name!("source"); + let schema_info = SchemaInfo::new(&schema, "", &connect, &source); + let context = Context::for_source(&schema_info); + validate(&expression(selection), &context).unwrap(); + } + + #[rstest] + #[case::array("$([])")] + #[case::object("$({\"a\": 1})")] + // #[case::missing_property_of_object("$({\"a\": 1}).b")] // TODO: catch this error + fn disallowed_literals(#[case] selection: &str) { + let schema = Schema::parse(SCHEMA, "schema").unwrap(); + let connect = name!("connect"); + let source = name!("source"); + let schema_info = SchemaInfo::new(&schema, "", &connect, &source); + let context = Context::for_source(&schema_info); + assert!(validate(&expression(selection), &context).is_err()); + } + + #[rstest] + #[case::echo_valid_constants("$->echo(1)")] + #[case::map_unknown("$config->map(@)")] + #[case::map_scalar("$(1)->map(@)")] + #[case::match_only_valid_values("$config->match([1, 1], [2, true])")] + #[case::first("$([1, 2])->first")] + #[case::first_type_unknown("$config.something->first")] + #[case::last("$([1, 2])->last")] + #[case::last_type_unknown("$config.something->last")] + #[case::slice_of_string("$(\"hello\")->slice(0, 2)")] + #[case::slice_when_type_unknown("$config.something->slice(0, 2)")] + #[case::size_when_type_unknown("$config.something->size")] + #[case::size_of_array("$([])->size")] + #[case::size_of_entries("$config->entries->size")] + #[case::size_of_slice("$([1, 2, 3])->slice(0, 2)->size")] + #[case::slice_after_match("$config->match([1, \"something\"], [2, \"another\"])->slice(0, 2)")] + fn valid_methods(#[case] selection: &str) { + let schema = Schema::parse(SCHEMA, "schema").unwrap(); + let connect = name!("connect"); + let source = name!("source"); + let schema_info = SchemaInfo::new(&schema, "", &connect, &source); + let context = Context::for_source(&schema_info); + validate(&expression(selection), &context).unwrap(); + } + + #[rstest] + #[case::echo_invalid_constants("$->echo([])")] + #[case::map_array("$([])->map(@)")] + #[case::match_some_invalid_values("$config->match([1, 1], [2, {}])")] + #[case::slice_of_array("$([])->slice(0, 2)")] + #[case::entries("$config.something->entries")] + fn invalid_methods(#[case] selection: &str) { + let schema = Schema::parse(SCHEMA, "schema").unwrap(); + let connect = name!("connect"); + let source = name!("source"); + let schema_info = SchemaInfo::new(&schema, "", &connect, &source); + let context = Context::for_source(&schema_info); + assert!(validate(&expression(selection), &context).is_err()); + } + + #[rstest] + #[case("$args.int")] + #[case("$args.string")] + #[case("$args.customScalar")] + #[case("$args.object.bool")] + #[case("$args.array->echo(1)")] + #[case("$args.int->map(@)")] + #[case::chained_methods("$args.array->map(@)->slice(0,2)->first.bool")] + #[case::match_scalars("$args.string->match([\"hello\", \"world\"], [@, null])")] + #[case::slice("$args.string->slice(0, 2)")] + #[case::size("$args.array->size")] + #[case::first("$args.array->first.bool")] + #[case::last("$args.array->last.bool")] + #[case::multi_level_input("$args.multiLevel.inner.nested")] + fn valid_after_args_resolution(#[case] selection: &str) { + let schema = Schema::parse(SCHEMA, "schema").unwrap(); + let connect = name!("connect"); + let source = name!("source"); + let schema_info = SchemaInfo::new(&schema, "", &connect, &source); + let object = schema.get_object("Query").unwrap(); + let field = object.fields.get("aField").unwrap(); + let directive = field.directives.get("connect").unwrap(); + let coordinate = ConnectDirectiveCoordinate { + field_coordinate: FieldCoordinate { field, object }, + directive, + }; + let context = Context::for_connect_request(&schema_info, coordinate); + validate(&expression(selection), &context).unwrap(); + } + + #[rstest] + #[case::unknown_var("$args.unknown")] + #[case::arg_is_array("$args.array")] + #[case::arg_is_object("$args.object")] + #[case::unknown_field_on_object("$args.object.unknown")] + #[case::nested_unknown_property("$args.multiLevel.inner.unknown")] + // #[case::map_array("$args.array->map(@)")] // TODO: check for this error once we improve ->map type checking + #[case::slice_array("$args.array->slice(0, 2)")] + #[case::entries_scalar("$args.int->entries")] + #[case::first("$args.array->first")] + #[case::last("$args.array->last")] + fn invalid_args(#[case] selection: &str) { + let schema = Schema::parse(SCHEMA, "schema").unwrap(); + let connect = name!("connect"); + let source = name!("source"); + let schema_info = SchemaInfo::new(&schema, "", &connect, &source); + let object = schema.get_object("Query").unwrap(); + let field = object.fields.get("aField").unwrap(); + let directive = field.directives.get("connect").unwrap(); + let coordinate = ConnectDirectiveCoordinate { + field_coordinate: FieldCoordinate { field, object }, + directive, + }; + let context = Context::for_connect_request(&schema_info, coordinate); + assert!(validate(&expression(selection), &context).is_err()); + } + + #[test] + fn this_on_query() { + let schema = Schema::parse(SCHEMA, "schema").unwrap(); + let connect = name!("connect"); + let source = name!("source"); + let schema_info = SchemaInfo::new(&schema, "", &connect, &source); + let object = schema.get_object("Query").unwrap(); + let field = object.fields.get("aField").unwrap(); + let directive = field.directives.get("connect").unwrap(); + let coordinate = ConnectDirectiveCoordinate { + field_coordinate: FieldCoordinate { field, object }, + directive, + }; + let context = Context::for_connect_request(&schema_info, coordinate); + assert!(validate(&expression("$this.something"), &context).is_err()); + } +} diff --git a/apollo-federation/src/sources/connect/validation/extended_type.rs b/apollo-federation/src/sources/connect/validation/extended_type.rs new file mode 100644 index 0000000000..6e9014d77c --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/extended_type.rs @@ -0,0 +1,351 @@ +use apollo_compiler::ast::FieldDefinition; +use apollo_compiler::collections::IndexSet; +use apollo_compiler::executable::Selection; +use apollo_compiler::parser::SourceMap; +use apollo_compiler::parser::SourceSpan; +use apollo_compiler::schema::Component; +use apollo_compiler::schema::ExtendedType; +use apollo_compiler::schema::ObjectType; +use apollo_compiler::Name; +use apollo_compiler::Node; +use itertools::Itertools; + +use super::coordinates::ConnectDirectiveCoordinate; +use super::coordinates::ConnectHTTPCoordinate; +use super::coordinates::FieldCoordinate; +use super::coordinates::HttpHeadersCoordinate; +use super::entity::validate_entity_arg; +use super::expression; +use super::http::headers; +use super::http::method; +use super::resolvable_key_fields; +use super::selection::validate_body_selection; +use super::selection::validate_selection; +use super::source_name::validate_source_name_arg; +use super::source_name::SourceName; +use super::Code; +use super::Message; +use crate::sources::connect::spec::schema::CONNECT_BODY_ARGUMENT_NAME; +use crate::sources::connect::spec::schema::CONNECT_SOURCE_ARGUMENT_NAME; +use crate::sources::connect::spec::schema::HTTP_ARGUMENT_NAME; +use crate::sources::connect::validation::graphql::SchemaInfo; + +pub(super) fn validate_extended_type( + extended_type: &ExtendedType, + schema: &SchemaInfo, + all_source_names: &[SourceName], + seen_fields: &mut IndexSet<(Name, Name)>, +) -> Vec { + match extended_type { + ExtendedType::Object(object) => { + validate_object_fields(object, schema, all_source_names, seen_fields) + } + ExtendedType::Union(union_type) => vec![validate_abstract_type( + SourceSpan::recompose(union_type.location(), union_type.name.location()), + &schema.sources, + "union", + )], + ExtendedType::Interface(interface) => vec![validate_abstract_type( + SourceSpan::recompose(interface.location(), interface.name.location()), + &schema.sources, + "interface", + )], + _ => Vec::new(), + } +} + +#[derive(Clone, Copy, Debug, Eq, PartialEq)] +pub(crate) enum ObjectCategory { + Query, + Mutation, + Other, +} + +/// Make sure that any `@connect` directives on object fields are valid, and that all fields +/// are resolvable by some combination of `@connect` directives. +fn validate_object_fields( + object: &Node, + schema: &SchemaInfo, + source_names: &[SourceName], + seen_fields: &mut IndexSet<(Name, Name)>, +) -> Vec { + if object.is_built_in() { + return Vec::new(); + } + + // Mark resolvable key fields as seen + let mut selections: Vec<(Name, Selection)> = resolvable_key_fields(object, schema) + .flat_map(|(field_set, _)| { + field_set + .selection_set + .selections + .iter() + .map(|selection| (object.name.clone(), selection.clone())) + .collect::>() + }) + .collect(); + while !selections.is_empty() { + if let Some((type_name, selection)) = selections.pop() { + if let Some(field) = selection.as_field() { + let t = (type_name, field.name.clone()); + if !seen_fields.contains(&t) { + seen_fields.insert(t); + field.selection_set.selections.iter().for_each(|selection| { + selections.push((field.ty().inner_named_type().clone(), selection.clone())); + }); + } + } + } + } + + let source_map = &schema.sources; + let is_subscription = schema + .schema_definition + .subscription + .as_ref() + .is_some_and(|sub| sub.name == object.name); + if is_subscription { + return vec![Message { + code: Code::SubscriptionInConnectors, + message: format!( + "A subscription root type is not supported when using `@{connect_directive_name}`.", + connect_directive_name = schema.connect_directive_name, + ), + locations: object.line_column_range(source_map).into_iter().collect(), + }]; + } + + let object_category = if schema + .schema_definition + .query + .as_ref() + .is_some_and(|query| query.name == object.name) + { + ObjectCategory::Query + } else if schema + .schema_definition + .mutation + .as_ref() + .is_some_and(|mutation| mutation.name == object.name) + { + ObjectCategory::Mutation + } else { + ObjectCategory::Other + }; + object + .fields + .values() + .flat_map(|field| { + validate_field( + field, + object_category, + source_names, + object, + schema, + seen_fields, + ) + }) + .collect() +} + +fn validate_field( + field: &Component, + category: ObjectCategory, + source_names: &[SourceName], + object: &Node, + schema: &SchemaInfo, + seen_fields: &mut IndexSet<(Name, Name)>, +) -> Vec { + let source_map = &schema.sources; + let mut errors = Vec::new(); + let connect_directives = field + .directives + .iter() + .filter(|directive| directive.name == *schema.connect_directive_name) + .collect_vec(); + + if connect_directives.is_empty() { + match category { + ObjectCategory::Query => errors.push(get_missing_connect_directive_message( + Code::QueryFieldMissingConnect, + field, + object, + source_map, + schema.connect_directive_name, + )), + ObjectCategory::Mutation => errors.push(get_missing_connect_directive_message( + Code::MutationFieldMissingConnect, + field, + object, + source_map, + schema.connect_directive_name, + )), + _ => (), + } + + return errors; + }; + + // mark the field with a @connect directive as seen + seen_fields.insert((object.name.clone(), field.name.clone())); + + // direct recursion isn't allowed, like a connector on User.friends: [User] + if matches!(category, ObjectCategory::Other) && &object.name == field.ty.inner_named_type() { + errors.push(Message { + code: Code::CircularReference, + message: format!( + "Direct circular reference detected in `{}.{}: {}`. For more information, see https://go.apollo.dev/connectors/limitations#circular-references", + object.name, + field.name, + field.ty + ), + locations: field.line_column_range(source_map).into_iter().collect(), + }); + } + + for connect_directive in connect_directives { + let field_coordinate = FieldCoordinate { object, field }; + let connect_coordinate = ConnectDirectiveCoordinate { + directive: connect_directive, + field_coordinate, + }; + + errors.extend(validate_selection(connect_coordinate, schema, seen_fields).err()); + + errors + .extend(validate_entity_arg(field, connect_directive, object, schema, category).err()); + + let Some((http_arg, http_arg_node)) = connect_directive + .specified_argument_by_name(&HTTP_ARGUMENT_NAME) + .and_then(|arg| Some((arg.as_object()?, arg))) + else { + errors.push(Message { + code: Code::GraphQLError, + message: format!( + "{connect_coordinate} must have a `{HTTP_ARGUMENT_NAME}` argument." + ), + locations: connect_directive + .line_column_range(source_map) + .into_iter() + .collect(), + }); + return errors; + }; + + let expression_context = + expression::Context::for_connect_request(schema, connect_coordinate); + + let url_template = match method::validate( + http_arg, + ConnectHTTPCoordinate::from(connect_coordinate), + http_arg_node, + &expression_context, + ) { + Ok(method) => Some(method), + Err(errs) => { + errors.extend(errs); + None + } + }; + + if let Some((_, body)) = http_arg + .iter() + .find(|(name, _)| name == &CONNECT_BODY_ARGUMENT_NAME) + { + if let Err(err) = validate_body_selection( + connect_directive, + connect_coordinate, + object, + field, + schema, + body, + ) { + errors.push(err); + } + } + + if let Some(source_name) = connect_directive + .arguments + .iter() + .find(|arg| arg.name == CONNECT_SOURCE_ARGUMENT_NAME) + { + errors.extend(validate_source_name_arg( + &field.name, + &object.name, + source_name, + source_names, + schema, + )); + + if let Some((template, coordinate)) = url_template { + if template.base.is_some() { + errors.push(Message { + code: Code::AbsoluteConnectUrlWithSource, + message: format!( + "{coordinate} contains the absolute URL {raw_value} while also specifying a `{CONNECT_SOURCE_ARGUMENT_NAME}`. Either remove the `{CONNECT_SOURCE_ARGUMENT_NAME}` argument or change the URL to a path.", + raw_value = coordinate.node + ), + locations: coordinate.node.line_column_range(source_map) + .into_iter() + .collect(), + }) + } + } + } else if let Some((template, coordinate)) = url_template { + if template.base.is_none() { + errors.push(Message { + code: Code::RelativeConnectUrlWithoutSource, + message: format!( + "{coordinate} specifies the relative URL {raw_value}, but no `{CONNECT_SOURCE_ARGUMENT_NAME}` is defined. Either use an absolute URL including scheme (e.g. https://), or add a `@{source_directive_name}`.", + raw_value = coordinate.node, + source_directive_name = schema.source_directive_name, + ), + locations: coordinate.node.line_column_range(source_map).into_iter().collect() + }) + } + } + + errors.extend(headers::validate_arg( + http_arg, + &expression_context, + HttpHeadersCoordinate::Connect { + connect: connect_coordinate, + object: &object.name, + field: &field.name, + }, + )); + } + errors +} + +fn validate_abstract_type( + node: Option, + source_map: &SourceMap, + keyword: &str, +) -> Message { + Message { + code: Code::ConnectorsUnsupportedAbstractType, + message: format!("Abstract schema types, such as `{keyword}`, are not supported when using connectors. You can check out our documentation at https://go.apollo.dev/connectors/best-practices#abstract-schema-types-are-unsupported."), + locations: node.and_then(|location| location.line_column_range(source_map)) + .into_iter() + .collect(), + } +} + +fn get_missing_connect_directive_message( + code: Code, + field: &Component, + object: &Node, + source_map: &SourceMap, + connect_directive_name: &Name, +) -> Message { + Message { + code, + message: format!( + "The field `{object_name}.{field}` has no `@{connect_directive_name}` directive.", + field = field.name, + object_name = object.name, + ), + locations: field.line_column_range(source_map).into_iter().collect(), + } +} diff --git a/apollo-federation/src/sources/connect/validation/graphql.rs b/apollo-federation/src/sources/connect/validation/graphql.rs new file mode 100644 index 0000000000..cac1bd4984 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/graphql.rs @@ -0,0 +1,82 @@ +//! Helper structs & functions for dealing with GraphQL schemas +use std::ops::Deref; + +use apollo_compiler::collections::IndexMap; +use apollo_compiler::Name; +use apollo_compiler::Schema; +use line_col::LineColLookup; +use shape::Shape; + +mod strings; + +pub(super) use strings::GraphQLString; + +pub(super) struct SchemaInfo<'schema> { + pub(crate) schema: &'schema Schema, + len: usize, + lookup: LineColLookup<'schema>, + pub(crate) connect_directive_name: &'schema Name, + pub(crate) source_directive_name: &'schema Name, + /// A lookup map for the Shapes computed from GraphQL types. + pub(crate) shape_lookup: IndexMap<&'schema str, Shape>, +} + +impl<'schema> SchemaInfo<'schema> { + pub(crate) fn new( + schema: &'schema Schema, + src: &'schema str, + connect_directive_name: &'schema Name, + source_directive_name: &'schema Name, + ) -> Self { + Self { + schema, + len: src.len(), + lookup: LineColLookup::new(src), + connect_directive_name, + source_directive_name, + shape_lookup: shape::graphql::shapes_for_schema(schema), + } + } + + /// Get the 1-based line and column values for an offset into this schema. + /// + /// # Returns + /// The line and column, or `None` if the offset is not within the schema. + pub(crate) fn line_col(&self, offset: usize) -> Option<(usize, usize)> { + if offset > self.len { + None + } else { + Some(self.lookup.get(offset)) + } + } +} + +impl Deref for SchemaInfo<'_> { + type Target = Schema; + + fn deref(&self) -> &Self::Target { + self.schema + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn line_col_lookup() { + let src = r#" + type Query { + foo: String + } + "#; + let schema = Schema::parse(src, "testSchema").unwrap(); + + let name = "unused".try_into().unwrap(); + let schema_info = SchemaInfo::new(&schema, src, &name, &name); + + assert_eq!(schema_info.line_col(0), Some((1, 1))); + assert_eq!(schema_info.line_col(4), Some((2, 4))); + assert_eq!(schema_info.line_col(200), None); + } +} diff --git a/apollo-federation/src/sources/connect/validation/graphql/strings.rs b/apollo-federation/src/sources/connect/validation/graphql/strings.rs new file mode 100644 index 0000000000..ca16b26063 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/graphql/strings.rs @@ -0,0 +1,297 @@ +//! Helpers for dealing with GraphQL literal strings and locations within them. +//! +//! GraphQL string literals can be either standard single-line strings surrounded by a single +//! set of quotes, or a multi-line block string surrounded by triple quotes. +//! +//! Standard strings may contain escape sequences, while block strings contain verbatim text. +//! Block strings additionally have any common indent and leading whitespace lines removed. +//! +//! See: + +use std::ops::Range; + +use apollo_compiler::ast::Value; +use apollo_compiler::parser::LineColumn; +use apollo_compiler::parser::SourceMap; +use apollo_compiler::Node; +use nom::AsChar; + +use crate::sources::connect::validation::graphql::SchemaInfo; + +fn is_whitespace(c: char) -> bool { + matches!(c, ' ' | '\t') +} + +fn is_whitespace_line(line: &str) -> bool { + line.is_empty() || line.chars().all(is_whitespace) +} + +#[derive(Clone, Copy)] +pub(crate) enum GraphQLString<'schema> { + Standard { + data: Data<'schema>, + }, + Block { + data: Data<'schema>, + + /// The common indent + common_indent: usize, + }, +} + +#[derive(Clone, Copy)] +pub(crate) struct Data<'schema> { + /// The GraphQL string literal with the rules from the spec applied by the compiler + compiled_string: &'schema str, + + /// The original string from the source file, excluding the surrounding quotes + raw_string: &'schema str, + + /// Where `raw_string` _starts_ in the source text + raw_offset: usize, +} + +impl<'schema> GraphQLString<'schema> { + pub(crate) fn new( + value: &'schema Node, + sources: &'schema SourceMap, + ) -> Result { + // The node value has escape sequences removed (for standard strings) and block string + // rules applied (for block strings) which affect whitespace and newlines. Offsets into + // the node value string will not match what is in the source file. + let compiled_string = value.as_str().ok_or(())?; + + // Get the raw string value from the source file. This is just the raw string without any + // of the escape sequence processing or whitespace/newline modifications mentioned above. + let source_span = value.location().ok_or(())?; + let file = sources.get(&source_span.file_id()).ok_or(())?; + let source_text = file.source_text(); + let start_of_quotes = source_span.offset(); + let end_of_quotes = source_span.end_offset(); + let raw_string_with_quotes = source_text.get(start_of_quotes..end_of_quotes).ok_or(())?; + + // Count the number of double-quote characters + let num_quotes = raw_string_with_quotes + .chars() + .take_while(|c| matches!(c, '"')) + .count(); + + // Get the raw string with the quotes removed + let raw_string = source_text + .get(start_of_quotes + num_quotes..end_of_quotes - num_quotes) + .ok_or(())?; + + Ok(if num_quotes == 3 { + GraphQLString::Block { + data: Data { + compiled_string, + raw_string, + raw_offset: start_of_quotes + num_quotes, + }, + common_indent: raw_string + .lines() + .skip(1) + .filter_map(|line| { + let length = line.len(); + let indent = line.chars().take_while(|&c| is_whitespace(c)).count(); + (indent < length).then_some(indent) + }) + .min() + .unwrap_or(0), + } + } else { + GraphQLString::Standard { + data: Data { + compiled_string, + raw_string, + raw_offset: start_of_quotes + num_quotes, + }, + } + }) + } + + pub(crate) fn as_str(&self) -> &str { + match self { + GraphQLString::Standard { data } => data.compiled_string, + GraphQLString::Block { data, .. } => data.compiled_string, + } + } + + pub(crate) fn line_col_for_subslice( + &self, + substring_location: Range, + schema_info: &SchemaInfo, + ) -> Option> { + let start_offset = self.true_offset(substring_location.start)?; + let end_offset = self.true_offset(substring_location.end)?; + + let (line, column) = schema_info.line_col(start_offset)?; + let start = LineColumn { line, column }; + let (line, column) = schema_info.line_col(end_offset)?; + let end = LineColumn { line, column }; + + Some(start..end) + } + + /// Given an offset into the compiled string, compute the true offset in the raw source string. + /// See: https://spec.graphql.org/October2021/#sec-String-Value + fn true_offset(&self, input_offset: usize) -> Option { + match self { + GraphQLString::Standard { data } => { + // For standard strings, handle escape sequences + let mut i = 0usize; + let mut true_offset = data.raw_offset; + let mut chars = data.raw_string.chars(); + while i < input_offset { + let ch = chars.next()?; + true_offset += 1; + if ch == '\\' { + let next = chars.next()?; + true_offset += 1; + if next == 'u' { + // Determine the length of the codepoint in bytes. For example, \uFDFD + // is 3 bytes when encoded in UTF-8 (0xEF,0xB7,0xBD). + let codepoint: String = (&mut chars).take(4).collect(); + let codepoint = u32::from_str_radix(&codepoint, 16).ok()?; + i += char::from_u32(codepoint)?.len(); + true_offset += 4; + continue; + } + } + i += ch.len(); + } + Some(true_offset) + } + GraphQLString::Block { + data, + common_indent, + } => { + // For block strings, handle whitespace changes + let mut skip_chars = 0usize; + let mut skip_lines = data + .raw_string + .lines() + .take_while(|&line| is_whitespace_line(line)) + .count(); + let mut i = 0usize; + let mut true_offset = data.raw_offset; + let mut chars = data.raw_string.chars(); + while i < input_offset { + let ch = chars.next()?; + true_offset += 1; + if skip_chars > 0 { + if ch == '\n' { + skip_chars = *common_indent; + i += 1; + } else { + skip_chars -= 1; + } + continue; + } + if skip_lines > 0 { + if ch == '\n' { + skip_lines -= 1; + if skip_lines == 0 { + skip_chars = *common_indent; + } + } + continue; + } + if ch == '\n' { + skip_chars = *common_indent; + } + if ch != '\r' { + i += ch.len(); + } + } + Some(true_offset + skip_chars) + } + } + } +} + +#[cfg(test)] +mod tests { + use apollo_compiler::ast::Value; + use apollo_compiler::parser::LineColumn; + use apollo_compiler::schema::ExtendedType; + use apollo_compiler::Node; + use apollo_compiler::Schema; + use pretty_assertions::assert_eq; + + use crate::sources::connect::validation::graphql::GraphQLString; + use crate::sources::connect::validation::graphql::SchemaInfo; + + const SCHEMA: &str = r#" + type Query { + field: String @connect( + http: { + GET: "https://example.com" + }, + selection: """ + something + somethingElse { + nested + } + """ + ) + } + "#; + + fn connect_argument<'schema>(schema: &'schema Schema, name: &str) -> &'schema Node { + let ExtendedType::Object(query) = schema.types.get("Query").unwrap() else { + panic!("Query type not found"); + }; + let field = query.fields.get("field").unwrap(); + let directive = field.directives.get("connect").unwrap(); + directive.specified_argument_by_name(name).unwrap() + } + + #[test] + fn standard_string() { + let schema = Schema::parse(SCHEMA, "test.graphql").unwrap(); + let http = connect_argument(&schema, "http").as_object().unwrap(); + let value = &http[0].1; + + let string = GraphQLString::new(value, &schema.sources).unwrap(); + assert_eq!(string.as_str(), "https://example.com"); + let name = "unused".try_into().unwrap(); + let schema_info = SchemaInfo::new(&schema, SCHEMA, &name, &name); + assert_eq!( + string.line_col_for_subslice(2..5, &schema_info), + Some( + LineColumn { + line: 5, + column: 25 + }..LineColumn { + line: 5, + column: 28 + } + ) + ); + } + + #[test] + fn block_string() { + let schema = Schema::parse(SCHEMA, "test.graphql").unwrap(); + let value = connect_argument(&schema, "selection"); + + let string = GraphQLString::new(value, &schema.sources).unwrap(); + assert_eq!(string.as_str(), "something\nsomethingElse {\n nested\n}"); + let name = "unused".try_into().unwrap(); + let schema_info = SchemaInfo::new(&schema, SCHEMA, &name, &name); + assert_eq!("nested", &string.as_str()[28..34]); + assert_eq!( + string.line_col_for_subslice(28..34, &schema_info), + Some( + LineColumn { + line: 10, + column: 15 + }..LineColumn { + line: 10, + column: 21 + } + ) + ); + } +} diff --git a/apollo-federation/src/sources/connect/validation/http.rs b/apollo-federation/src/sources/connect/validation/http.rs new file mode 100644 index 0000000000..28d0ff8d6a --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/http.rs @@ -0,0 +1,3 @@ +pub(super) mod headers; +pub(super) mod method; +pub(super) mod url; diff --git a/apollo-federation/src/sources/connect/validation/http/headers.rs b/apollo-federation/src/sources/connect/validation/http/headers.rs new file mode 100644 index 0000000000..b83eec54ba --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/http/headers.rs @@ -0,0 +1,125 @@ +use std::collections::HashMap; + +use apollo_compiler::ast::Value; +use apollo_compiler::Name; +use apollo_compiler::Node; + +use crate::sources::connect::models::Header; +use crate::sources::connect::models::HeaderParseError; +use crate::sources::connect::spec::schema::HEADERS_ARGUMENT_NAME; +use crate::sources::connect::string_template; +use crate::sources::connect::validation::coordinates::HttpHeadersCoordinate; +use crate::sources::connect::validation::expression; +use crate::sources::connect::validation::graphql::GraphQLString; +use crate::sources::connect::validation::Code; +use crate::sources::connect::validation::Message; +use crate::sources::connect::HeaderSource; + +pub(crate) fn validate_arg<'a>( + http_arg: &'a [(Name, Node)], + expression_context: &expression::Context, + coordinate: HttpHeadersCoordinate<'a>, +) -> Vec { + let sources = &expression_context.schema.sources; + let mut messages = Vec::new(); + let Some(headers_arg) = get_arg(http_arg) else { + return messages; + }; + + #[allow(clippy::mutable_key_type)] + let mut names = HashMap::new(); + for header in Header::from_headers_arg(headers_arg) { + let Header { + name, + name_node, + source, + source_node, + } = match header { + Ok(header) => header, + Err(err) => { + let (message, locations) = match err { + HeaderParseError::Other { message, node } => ( + message, + node.line_column_range(sources).into_iter().collect(), + ), + HeaderParseError::ConflictingArguments { + message, + from_location, + value_location, + } => ( + message, + from_location + .iter() + .chain(value_location.iter()) + .flat_map(|span| span.line_column_range(sources)) + .collect(), + ), + HeaderParseError::ValueError { + err: string_template::Error { message, location }, + node, + } => ( + message, + GraphQLString::new(node, sources) + .ok() + .and_then(|expression| { + expression + .line_col_for_subslice(location, expression_context.schema) + }) + .into_iter() + .collect(), + ), + }; + messages.push(Message { + code: Code::InvalidHeader, + message: format!("In {coordinate} {message}"), + locations, + }); + continue; + } + }; + if let Some(duplicate) = names.insert(name.clone(), name_node.location()) { + messages.push(Message { + code: Code::HttpHeaderNameCollision, + message: format!( + "Duplicate header names are not allowed. The header name '{name}' at {coordinate} is already defined.", + ), + locations: name_node.line_column_range(sources) + .into_iter() + .chain( + duplicate.and_then(|span| span.line_column_range(sources)) + ) + .collect(), + }); + continue; + } + if let HeaderSource::Value(header_value) = source { + let Ok(expression) = GraphQLString::new(source_node, sources) else { + // This should never fail in practice, we convert to GraphQLString only to hack in location data + continue; + }; + messages.extend( + header_value + .expressions() + .filter_map(|expression| { + expression::validate(expression, expression_context).err() + }) + .flatten() + .map(|err| Message { + code: Code::InvalidHeader, + message: format!("In {coordinate}: {}", err.message), + locations: expression + .line_col_for_subslice(err.location, expression_context.schema) + .into_iter() + .collect(), + }), + ); + } + } + messages +} + +fn get_arg(http_arg: &[(Name, Node)]) -> Option<&Node> { + http_arg + .iter() + .find_map(|(key, value)| (*key == HEADERS_ARGUMENT_NAME).then_some(value)) +} diff --git a/apollo-federation/src/sources/connect/validation/http/method.rs b/apollo-federation/src/sources/connect/validation/http/method.rs new file mode 100644 index 0000000000..b38585be48 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/http/method.rs @@ -0,0 +1,70 @@ +use apollo_compiler::ast::Value; +use apollo_compiler::Name; +use apollo_compiler::Node; + +use super::url::validate_template; +use crate::sources::connect::spec::schema::CONNECT_HTTP_ARGUMENT_DELETE_METHOD_NAME; +use crate::sources::connect::spec::schema::CONNECT_HTTP_ARGUMENT_GET_METHOD_NAME; +use crate::sources::connect::spec::schema::CONNECT_HTTP_ARGUMENT_PATCH_METHOD_NAME; +use crate::sources::connect::spec::schema::CONNECT_HTTP_ARGUMENT_POST_METHOD_NAME; +use crate::sources::connect::spec::schema::CONNECT_HTTP_ARGUMENT_PUT_METHOD_NAME; +use crate::sources::connect::validation::coordinates::ConnectHTTPCoordinate; +use crate::sources::connect::validation::coordinates::HttpMethodCoordinate; +use crate::sources::connect::validation::expression; +use crate::sources::connect::validation::Code; +use crate::sources::connect::validation::Message; +use crate::sources::connect::URLTemplate; + +pub(crate) fn validate<'schema>( + http_arg: &'schema [(Name, Node)], + coordinate: ConnectHTTPCoordinate<'schema>, + http_arg_node: &Node, + expression_context: &expression::Context, +) -> Result<(URLTemplate, HttpMethodCoordinate<'schema>), Vec> { + let source_map = &expression_context.schema.sources; + let mut methods = http_arg + .iter() + .filter(|(method, _)| { + [ + CONNECT_HTTP_ARGUMENT_GET_METHOD_NAME, + CONNECT_HTTP_ARGUMENT_POST_METHOD_NAME, + CONNECT_HTTP_ARGUMENT_PUT_METHOD_NAME, + CONNECT_HTTP_ARGUMENT_PATCH_METHOD_NAME, + CONNECT_HTTP_ARGUMENT_DELETE_METHOD_NAME, + ] + .contains(method) + }) + .peekable(); + + let Some((method_name, method_value)) = methods.next() else { + return Err(vec![Message { + code: Code::MissingHttpMethod, + message: format!("{coordinate} must specify an HTTP method."), + locations: http_arg_node + .line_column_range(source_map) + .into_iter() + .collect(), + }]); + }; + + if methods.peek().is_some() { + let locations = method_value + .line_column_range(source_map) + .into_iter() + .chain(methods.filter_map(|(_, node)| node.line_column_range(source_map))) + .collect(); + return Err(vec![Message { + code: Code::MultipleHttpMethods, + message: format!("{coordinate} cannot specify more than one HTTP method."), + locations, + }]); + } + + let coordinate = HttpMethodCoordinate { + connect: coordinate.connect_directive_coordinate, + http_method: method_name, + node: method_value, + }; + + validate_template(coordinate, expression_context).map(|template| (template, coordinate)) +} diff --git a/apollo-federation/src/sources/connect/validation/http/url.rs b/apollo-federation/src/sources/connect/validation/http/url.rs new file mode 100644 index 0000000000..c778b9dcdb --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/http/url.rs @@ -0,0 +1,106 @@ +use std::fmt::Display; +use std::str::FromStr; + +use apollo_compiler::ast::Value; +use apollo_compiler::Node; +use url::Url; + +use crate::sources::connect::string_template; +use crate::sources::connect::validation::coordinates::HttpMethodCoordinate; +use crate::sources::connect::validation::expression; +use crate::sources::connect::validation::graphql::GraphQLString; +use crate::sources::connect::validation::graphql::SchemaInfo; +use crate::sources::connect::validation::Code; +use crate::sources::connect::validation::Message; +use crate::sources::connect::URLTemplate; + +pub(crate) fn validate_template( + coordinate: HttpMethodCoordinate, + expression_context: &expression::Context, +) -> Result> { + let schema = expression_context.schema; + let (template, str_value) = match parse_template(coordinate, schema) { + Ok(tuple) => tuple, + Err(message) => return Err(vec![message]), + }; + let mut messages = Vec::new(); + if let Some(base) = template.base.as_ref() { + messages + .extend(validate_base_url(base, coordinate, coordinate.node, str_value, schema).err()); + } + let expression_context = expression::Context::for_connect_request(schema, coordinate.connect); + + for expression in template.expressions() { + messages.extend( + expression::validate(expression, &expression_context) + .err() + .into_iter() + .flatten() + .map(|err| Message { + code: Code::InvalidUrl, + message: format!("In {coordinate}: {}", err.message), + locations: str_value + .line_col_for_subslice(err.location, schema) + .into_iter() + .collect(), + }), + ); + } + + if messages.is_empty() { + Ok(template) + } else { + Err(messages) + } +} + +fn parse_template<'schema>( + coordinate: HttpMethodCoordinate<'schema>, + schema: &'schema SchemaInfo, +) -> Result<(URLTemplate, GraphQLString<'schema>), Message> { + let str_value = GraphQLString::new(coordinate.node, &schema.sources).map_err(|_| Message { + code: Code::GraphQLError, + message: format!("The value for {coordinate} must be a string."), + locations: coordinate + .node + .line_column_range(&schema.sources) + .into_iter() + .collect(), + })?; + let template = URLTemplate::from_str(str_value.as_str()).map_err( + |string_template::Error { message, location }| Message { + code: Code::InvalidUrl, + message: format!("In {coordinate}: {message}"), + locations: str_value + .line_col_for_subslice(location, schema) + .into_iter() + .collect(), + }, + )?; + Ok((template, str_value)) +} + +pub(crate) fn validate_base_url( + url: &Url, + coordinate: impl Display, + value: &Node, + str_value: GraphQLString, + schema: &SchemaInfo, +) -> Result<(), Message> { + let scheme = url.scheme(); + if scheme != "http" && scheme != "https" { + let scheme_location = 0..scheme.len(); + Err(Message { + code: Code::InvalidUrlScheme, + message: format!( + "The value {value} for {coordinate} must be http or https, got {scheme}.", + ), + locations: str_value + .line_col_for_subslice(scheme_location, schema) + .into_iter() + .collect(), + }) + } else { + Ok(()) + } +} diff --git a/apollo-federation/src/sources/connect/validation/mod.rs b/apollo-federation/src/sources/connect/validation/mod.rs new file mode 100644 index 0000000000..b11eef6ce0 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/mod.rs @@ -0,0 +1,646 @@ +//! Validation of the `@source` and `@connect` directives. + +// No panics allowed in this module +#![cfg_attr( + not(test), + deny( + clippy::exit, + clippy::panic, + clippy::unwrap_used, + clippy::expect_used, + clippy::indexing_slicing, + clippy::unimplemented, + clippy::todo + ) +)] + +mod coordinates; +mod entity; +mod expression; +mod extended_type; +mod graphql; +mod http; +mod selection; +mod source_name; +mod variable; + +use std::collections::HashMap; +use std::fmt::Display; +use std::ops::Range; + +use apollo_compiler::ast::OperationType; +use apollo_compiler::ast::Value; +use apollo_compiler::collections::IndexSet; +use apollo_compiler::executable::FieldSet; +use apollo_compiler::name; +use apollo_compiler::parser::LineColumn; +use apollo_compiler::parser::Parser; +use apollo_compiler::schema::Component; +use apollo_compiler::schema::Directive; +use apollo_compiler::schema::ExtendedType; +use apollo_compiler::schema::ObjectType; +use apollo_compiler::validation::Valid; +use apollo_compiler::Name; +use apollo_compiler::Node; +use apollo_compiler::Schema; +use coordinates::source_http_argument_coordinate; +use entity::field_set_error; +use entity::EntityKeyChecker; +use extended_type::validate_extended_type; +use itertools::Itertools; +use source_name::SourceName; +use strum_macros::Display; +use strum_macros::IntoStaticStr; +use url::Url; + +use super::Connector; +use crate::link::federation_spec_definition::FEDERATION_FIELDS_ARGUMENT_NAME; +use crate::link::federation_spec_definition::FEDERATION_KEY_DIRECTIVE_NAME_IN_SPEC; +use crate::link::federation_spec_definition::FEDERATION_RESOLVABLE_ARGUMENT_NAME; +use crate::link::spec::Identity; +use crate::link::Import; +use crate::link::Link; +use crate::sources::connect::spec::schema::HTTP_ARGUMENT_NAME; +use crate::sources::connect::spec::schema::SOURCE_BASE_URL_ARGUMENT_NAME; +use crate::sources::connect::spec::schema::SOURCE_DIRECTIVE_NAME_IN_SPEC; +use crate::sources::connect::spec::schema::SOURCE_NAME_ARGUMENT_NAME; +use crate::sources::connect::validation::coordinates::BaseUrlCoordinate; +use crate::sources::connect::validation::coordinates::HttpHeadersCoordinate; +use crate::sources::connect::validation::graphql::GraphQLString; +use crate::sources::connect::validation::graphql::SchemaInfo; +use crate::sources::connect::validation::http::headers; +use crate::sources::connect::ConnectSpec; +use crate::subgraph::spec::CONTEXT_DIRECTIVE_NAME; +use crate::subgraph::spec::EXTERNAL_DIRECTIVE_NAME; +use crate::subgraph::spec::FROM_CONTEXT_DIRECTIVE_NAME; + +// The result of a validation pass on a subgraph +#[derive(Debug)] +pub struct ValidationResult { + /// All validation errors encountered. + pub errors: Vec, + + /// Whether or not the validated subgraph contained connector directives + pub has_connectors: bool, + + /// The parsed (and potentially invalid) schema of the subgraph + pub schema: Schema, +} + +/// Validate the connectors-related directives `@source` and `@connect`. +/// +/// This function attempts to collect as many validation errors as possible, so it does not bail +/// out as soon as it encounters one. +pub fn validate(source_text: &str, file_name: &str) -> ValidationResult { + // TODO: Use parse_and_validate (adding in directives as needed) + // TODO: Handle schema errors rather than relying on JavaScript to catch it later + let schema = Schema::parse(source_text, file_name) + .unwrap_or_else(|schema_with_errors| schema_with_errors.partial); + let connect_identity = ConnectSpec::identity(); + let Some((link, link_directive)) = Link::for_identity(&schema, &connect_identity) else { + // There are no connectors-related directives to validate + return ValidationResult { + errors: Vec::new(), + has_connectors: false, + schema, + }; + }; + + let federation = Link::for_identity(&schema, &Identity::federation_identity()); + let external_directive_name = federation + .map(|(link, _)| link.directive_name_in_schema(&EXTERNAL_DIRECTIVE_NAME)) + .unwrap_or(EXTERNAL_DIRECTIVE_NAME.clone()); + + let mut messages = check_conflicting_directives(&schema); + + let source_directive_name = ConnectSpec::source_directive_name(&link); + let connect_directive_name = ConnectSpec::connect_directive_name(&link); + let schema_info = SchemaInfo::new( + &schema, + source_text, + &connect_directive_name, + &source_directive_name, + ); + + let source_directives: Vec = schema + .schema_definition + .directives + .iter() + .filter(|directive| directive.name == source_directive_name) + .map(|directive| validate_source(directive, &schema_info)) + .collect(); + + let mut valid_source_names = HashMap::new(); + let all_source_names = source_directives + .iter() + .map(|directive| directive.name.clone()) + .collect_vec(); + for directive in source_directives { + messages.extend(directive.errors); + match directive.name.into_value_or_error(&schema_info.sources) { + Err(error) => messages.push(error), + Ok(name) => valid_source_names + .entry(name) + .or_insert_with(Vec::new) + .extend( + directive + .directive + .node + .line_column_range(&schema_info.sources), + ), + } + } + for (name, locations) in valid_source_names { + if locations.len() > 1 { + messages.push(Message { + message: format!("Every `@{source_directive_name}({SOURCE_NAME_ARGUMENT_NAME}:)` must be unique. Found duplicate name {name}."), + code: Code::DuplicateSourceName, + locations, + }); + } + } + + let mut seen_fields = IndexSet::default(); + + let connect_errors = schema.types.values().flat_map(|extended_type| { + validate_extended_type( + extended_type, + &schema_info, + &all_source_names, + &mut seen_fields, + ) + }); + messages.extend(connect_errors); + + if source_directive_name == DEFAULT_SOURCE_DIRECTIVE_NAME + && messages + .iter() + .any(|error| error.code == Code::NoSourcesDefined) + { + messages.push(Message { + code: Code::NoSourceImport, + message: format!("The `@{SOURCE_DIRECTIVE_NAME_IN_SPEC}` directive is not imported. Try adding `@{SOURCE_DIRECTIVE_NAME_IN_SPEC}` to `import` for `@{link_name}(url: \"{connect_identity}\")`", link_name=link_directive.name), + locations: link_directive.line_column_range(&schema.sources) + .into_iter() + .collect(), + }); + } + + if should_do_advanced_validations(&messages) { + messages.extend(check_seen_fields( + &schema_info, + &seen_fields, + &external_directive_name, + )); + + match advanced_validations(&schema, file_name, ConnectSpec::V0_1) { + Ok(multiple) => messages.extend(multiple), + Err(Some(single)) => messages.push(single), + _ => {} // let the rest of composition handle this + }; + } + + ValidationResult { + errors: messages, + has_connectors: true, + schema, + } +} + +fn advanced_validations( + schema: &Schema, + subgraph_name: &str, + spec: ConnectSpec, +) -> Result, Option> { + let mut messages = vec![]; + + let connectors = Connector::from_schema(schema, subgraph_name, spec).map_err(|_| None)?; + + let mut entity_checker = EntityKeyChecker::default(); + + for (field_set, directive) in find_all_resolvable_keys(schema) { + entity_checker.add_key(&field_set, directive); + } + + for (_, connector) in connectors { + if let Some(field_set) = connector.resolvable_key(schema).map_err(|_| { + let variables = connector.variable_references().collect_vec(); + field_set_error(&variables, connector.id.directive.field.type_name()) + })? { + entity_checker.add_connector(field_set); + } + } + + messages.extend(entity_checker.check_for_missing_entity_connectors(schema)); + + Ok(messages) +} + +/// We'll avoid doing this work if there are bigger issues with the schema. +/// Otherwise we might emit a large number of diagnostics that will +/// distract from the main problems. +fn should_do_advanced_validations(messages: &[Message]) -> bool { + messages.is_empty() +} + +/// Check that all fields defined in the schema are resolved by a connector. +fn check_seen_fields( + schema: &SchemaInfo, + seen_fields: &IndexSet<(Name, Name)>, + external_directive_name: &Name, +) -> Vec { + let all_fields: IndexSet<_> = schema + .types + .values() + .filter_map(|extended_type| { + if extended_type.is_built_in() { + return None; + } + // ignore root fields, we have different validations for them + if schema.root_operation(OperationType::Query) == Some(extended_type.name()) + || schema.root_operation(OperationType::Mutation) == Some(extended_type.name()) + || schema.root_operation(OperationType::Subscription) == Some(extended_type.name()) + { + return None; + } + let coord = |(name, _): (&Name, _)| (extended_type.name().clone(), name.clone()); + + // ignore all fields on objects marked @external + if extended_type + .directives() + .iter() + .any(|dir| &dir.name == external_directive_name) + { + return None; + } + + match extended_type { + ExtendedType::Object(object) => { + // ignore fields marked @external + Some( + object + .fields + .iter() + .filter(|(_, def)| { + !def.directives + .iter() + .any(|dir| &dir.name == external_directive_name) + }) + .map(coord), + ) + } + ExtendedType::Interface(_) => None, // TODO: when interfaces are supported (probably should include fields from implementing/member types as well) + _ => None, + } + }) + .flatten() + .collect(); + + (&all_fields - seen_fields).iter().map(move |(parent_type, field_name)| { + let Ok(field_def) = schema.type_field(parent_type, field_name) else { + // This should never happen, but if it does, we don't want to panic + return Message { + code: Code::GraphQLError, + message: format!( + "Field `{parent_type}.{field_name}` is missing from the schema.", + ), + locations: Vec::new(), + }; + }; + Message { + code: Code::ConnectorsUnresolvedField, + message: format!( + "No connector resolves field `{parent_type}.{field_name}`. It must have a `@{connect_directive_name}` directive or appear in `@{connect_directive_name}(selection:)`.", + connect_directive_name = schema.connect_directive_name + ), + locations: field_def.line_column_range(&schema.sources).into_iter().collect(), + } + }).collect() +} + +fn check_conflicting_directives(schema: &Schema) -> Vec { + let Some((fed_link, fed_link_directive)) = + Link::for_identity(schema, &Identity::federation_identity()) + else { + return Vec::new(); + }; + + // TODO: make the `Link` code retain locations directly instead of reparsing stuff for validation + let imports = fed_link_directive + .specified_argument_by_name(&name!("import")) + .and_then(|arg| arg.as_list()) + .into_iter() + .flatten() + .filter_map(|value| Import::from_value(value).ok().map(|import| (value, import))) + .collect_vec(); + + let disallowed_imports = [CONTEXT_DIRECTIVE_NAME, FROM_CONTEXT_DIRECTIVE_NAME]; + fed_link + .imports + .into_iter() + .filter_map(|import| { + disallowed_imports + .contains(&import.element) + .then(|| Message { + code: Code::ConnectorsUnsupportedFederationDirective, + message: format!( + "The directive `@{import}` is not supported when using connectors.", + import = import.alias.as_ref().unwrap_or(&import.element) + ), + locations: imports + .iter() + .find_map(|(value, reparsed)| { + (*reparsed == *import).then(|| value.line_column_range(&schema.sources)) + }) + .flatten() + .into_iter() + .collect(), + }) + }) + .collect() +} + +const DEFAULT_SOURCE_DIRECTIVE_NAME: &str = "connect__source"; +#[allow(unused)] +const DEFAULT_CONNECT_DIRECTIVE_NAME: &str = "connect__connect"; + +fn validate_source(directive: &Component, schema: &SchemaInfo) -> SourceDirective { + let name = SourceName::from_directive(directive); + let mut errors = Vec::new(); + + if let Some(http_arg) = directive + .specified_argument_by_name(&HTTP_ARGUMENT_NAME) + .and_then(|arg| arg.as_object()) + { + // Validate URL argument + if let Some(url_value) = http_arg + .iter() + .find_map(|(key, value)| (key == &SOURCE_BASE_URL_ARGUMENT_NAME).then_some(value)) + { + if let Some(url_error) = parse_url( + url_value, + BaseUrlCoordinate { + source_directive_name: &directive.name, + }, + schema, + ) + .err() + { + errors.push(url_error); + } + } + + let expression_context = expression::Context::for_source(schema); + errors.extend(headers::validate_arg( + http_arg, + &expression_context, + HttpHeadersCoordinate::Source { + directive_name: &directive.name, + }, + )); + } else { + errors.push(Message { + code: Code::GraphQLError, + message: format!( + "{coordinate} must have a `{HTTP_ARGUMENT_NAME}` argument.", + coordinate = source_http_argument_coordinate(&directive.name), + ), + locations: directive + .line_column_range(&schema.sources) + .into_iter() + .collect(), + }) + } + + SourceDirective { + name, + errors, + directive: directive.clone(), + } +} + +/// A `@source` directive along with any errors related to it. +struct SourceDirective { + name: SourceName, + errors: Vec, + directive: Component, +} + +fn parse_url( + value: &Node, + coordinate: Coordinate, + schema: &SchemaInfo, +) -> Result<(), Message> { + let str_value = GraphQLString::new(value, &schema.sources).map_err(|_| Message { + code: Code::GraphQLError, + message: format!("The value for {coordinate} must be a string."), + locations: value + .line_column_range(&schema.sources) + .into_iter() + .collect(), + })?; + let url = Url::parse(str_value.as_str()).map_err(|inner| Message { + code: Code::InvalidUrl, + message: format!("The value {value} for {coordinate} is not a valid URL: {inner}."), + locations: value + .line_column_range(&schema.sources) + .into_iter() + .collect(), + })?; + http::url::validate_base_url(&url, coordinate, value, str_value, schema) +} + +/// For an object type, get all the keys (and directive nodes) that are resolvable. +/// +/// The FieldSet returned here is what goes in the `fields` argument, so `id` in `@key(fields: "id")` +fn resolvable_key_fields<'a>( + object: &'a Node, + schema: &'a Schema, +) -> impl Iterator)> + 'a { + object + .directives + .iter() + .filter(|directive| directive.name == FEDERATION_KEY_DIRECTIVE_NAME_IN_SPEC) + .filter(|directive| { + directive + .arguments + .iter() + .find(|arg| arg.name == FEDERATION_RESOLVABLE_ARGUMENT_NAME) + .and_then(|arg| arg.value.to_bool()) + .unwrap_or(true) + }) + .filter_map(|directive| { + if let Some(fields_str) = directive + .arguments + .iter() + .find(|arg| arg.name == FEDERATION_FIELDS_ARGUMENT_NAME) + .map(|arg| &arg.value) + .and_then(|value| value.as_str()) + { + Parser::new() + .parse_field_set( + Valid::assume_valid_ref(schema), + object.name.clone(), + fields_str.to_string(), + "", + ) + .ok() + .map(|field_set| (field_set, directive)) + } else { + None + } + }) +} + +fn find_all_resolvable_keys(schema: &Schema) -> Vec<(FieldSet, &Component)> { + schema + .types + .values() + .flat_map(|extended_type| match extended_type { + ExtendedType::Object(object) => Some(resolvable_key_fields(object, schema)), + _ => None, + }) + .flatten() + .collect() +} + +type DirectiveName = Name; + +#[derive(Debug, Clone)] +pub struct Message { + /// A unique, per-error code to allow consuming tools to take specific actions. These codes + /// should not change once stabilized. + pub code: Code, + /// A human-readable message describing the error. These messages are not stable, tools should + /// not rely on them remaining the same. + /// + /// # Formatting messages + /// 1. Messages should be complete sentences, starting with capitalization as appropriate and + /// ending with punctuation. + /// 2. When referring to elements of the schema, use + /// [schema coordinates](https://github.com/graphql/graphql-wg/blob/main/rfcs/SchemaCoordinates.md) + /// with any additional information added as required for clarity (e.g., the value of an arg). + /// 3. When referring to code elements (including schema coordinates), surround them with + /// backticks. This clarifies that `Type.field` is not ending a sentence with its period. + pub message: String, + pub locations: Vec>, +} + +/// The error code that will be shown to users when a validation fails during composition. +/// +/// Note that these codes are global, not scoped to connectors, so they should attempt to be +/// unique across all pieces of composition, including JavaScript components. +#[derive(Clone, Copy, Debug, Display, Eq, IntoStaticStr, PartialEq)] +#[strum(serialize_all = "SCREAMING_SNAKE_CASE")] +pub enum Code { + /// A problem with GraphQL syntax or semantics was found. These will usually be caught before + /// this validation process. + GraphQLError, + DuplicateSourceName, + InvalidSourceName, + EmptySourceName, + /// A provided URL was not valid + InvalidUrl, + /// A URL scheme is not `http` or `https` + InvalidUrlScheme, + SourceNameMismatch, + SubscriptionInConnectors, + /// Query field is missing the `@connect` directive + QueryFieldMissingConnect, + /// Mutation field is missing the `@connect` directive + MutationFieldMissingConnect, + /// The `@connect` is using a `source`, but the URL is absolute. This is trouble because + /// the `@source` URL will be joined with the `@connect` URL, so the `@connect` URL should + /// actually be a path only. + AbsoluteConnectUrlWithSource, + /// The `@connect` directive is using a relative URL (path only) but does not define a `source`. + /// This is just a specialization of [`Self::InvalidUrl`] that provides a better suggestion for + /// the user. + RelativeConnectUrlWithoutSource, + /// This is a specialization of [`Self::SourceNameMismatch`] that provides a better suggestion. + NoSourcesDefined, + /// The subgraph doesn't import the `@source` directive. This isn't necessarily a problem, but + /// is likely a mistake. + NoSourceImport, + /// The `@connect` directive has multiple HTTP methods when only one is allowed. + MultipleHttpMethods, + /// The `@connect` directive is missing an HTTP method. + MissingHttpMethod, + /// The `entity` argument should only be used on the root `Query` field. + EntityNotOnRootQuery, + /// The arguments to the entity reference resolver do not match the entity type. + EntityResolverArgumentMismatch, + /// The `entity` argument should only be used with non-list, nullable, object types. + EntityTypeInvalid, + /// A @key is defined without a cooresponding entity connector. + MissingEntityConnector, + /// A syntax error in `selection` + InvalidJsonSelection, + /// A cycle was detected within a `selection` + CircularReference, + /// A field was selected but is not defined on the type + SelectedFieldNotFound, + /// A group selection (`a { b }`) was used, but the field is not an object + GroupSelectionIsNotObject, + /// The `name` mapping must be unique for all headers. + HttpHeaderNameCollision, + /// A provided header in `@source` or `@connect` was not valid + InvalidHeader, + /// Certain directives are not allowed when using connectors + ConnectorsUnsupportedFederationDirective, + /// Abstract types are not allowed when using connectors + ConnectorsUnsupportedAbstractType, + /// Fields that return an object type must use a group JSONSelection `{}` + GroupSelectionRequiredForObject, + /// Fields in the schema that aren't resolved by a connector + ConnectorsUnresolvedField, + /// A field resolved by a connector has arguments defined + ConnectorsFieldWithArguments, + /// Part of the `@connect` refers to an `$args` which is not defined + UndefinedArgument, + /// Part of the `@connect` refers to an `$this` which is not defined + UndefinedField, + /// A type used in a variable is not yet supported (i.e., unions) + UnsupportedVariableType, + /// A variable is nullable in a location which requires non-null at runtime + NullabilityMismatch, +} + +impl Code { + pub const fn severity(&self) -> Severity { + match self { + Self::NoSourceImport | Self::NullabilityMismatch => Severity::Warning, + _ => Severity::Error, + } + } +} + +/// Given the [`Code`] of a [`Message`], how important is that message? +#[derive(Clone, Copy, Debug, Eq, PartialEq)] +pub enum Severity { + /// This is an error, validation as failed. + Error, + /// The user probably wants to know about this, but it doesn't halt composition. + Warning, +} + +#[cfg(test)] +mod test_validate_source { + use std::fs::read_to_string; + + use insta::assert_snapshot; + use insta::glob; + + use super::*; + + #[test] + fn validation_tests() { + insta::with_settings!({prepend_module_to_snapshot => false}, { + glob!("test_data", "**/*.graphql", |path| { + let schema = read_to_string(path).unwrap(); + let result = validate(&schema, path.to_str().unwrap()); + assert_snapshot!(format!("{:#?}", result.errors)); + }); + }); + } +} diff --git a/apollo-federation/src/sources/connect/validation/selection.rs b/apollo-federation/src/sources/connect/validation/selection.rs new file mode 100644 index 0000000000..5dac0a7011 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/selection.rs @@ -0,0 +1,484 @@ +use std::fmt::Display; +use std::iter::once; +use std::ops::Range; + +use apollo_compiler::ast::FieldDefinition; +use apollo_compiler::collections::IndexSet; +use apollo_compiler::parser::LineColumn; +use apollo_compiler::schema::Component; +use apollo_compiler::schema::Directive; +use apollo_compiler::schema::ExtendedType; +use apollo_compiler::schema::ObjectType; +use apollo_compiler::Node; +use itertools::Itertools; + +use super::coordinates::ConnectDirectiveCoordinate; +use super::coordinates::SelectionCoordinate; +use super::Code; +use super::Message; +use super::Name; +use super::Value; +use crate::sources::connect::expand::visitors::FieldVisitor; +use crate::sources::connect::expand::visitors::GroupVisitor; +use crate::sources::connect::json_selection::ExternalVarPaths; +use crate::sources::connect::json_selection::NamedSelection; +use crate::sources::connect::json_selection::Ranged; +use crate::sources::connect::spec::schema::CONNECT_SELECTION_ARGUMENT_NAME; +use crate::sources::connect::validation::coordinates::connect_directive_http_body_coordinate; +use crate::sources::connect::validation::graphql::GraphQLString; +use crate::sources::connect::validation::graphql::SchemaInfo; +use crate::sources::connect::validation::variable::VariableResolver; +use crate::sources::connect::variable::Phase; +use crate::sources::connect::variable::Target; +use crate::sources::connect::variable::VariableContext; +use crate::sources::connect::JSONSelection; +use crate::sources::connect::SubSelection; + +pub(super) fn validate_selection( + coordinate: ConnectDirectiveCoordinate, + schema: &SchemaInfo, + seen_fields: &mut IndexSet<(Name, Name)>, +) -> Result<(), Message> { + let (selection_arg, json_selection) = get_json_selection(coordinate, schema)?; + + validate_selection_variables( + &VariableResolver::new( + VariableContext::new( + coordinate.field_coordinate.object, + coordinate.field_coordinate.field, + Phase::Response, + Target::Body, + ), + schema, + ), + selection_arg.coordinate, + &json_selection, + selection_arg.value, + )?; + + let field = coordinate.field_coordinate.field; + + let Some(return_type) = schema.get_object(field.ty.inner_named_type()) else { + // TODO: Validate scalar return types + return Ok(()); + }; + let Some(sub_selection) = json_selection.next_subselection() else { + // TODO: Validate scalar selections + return Ok(()); + }; + + let group = Group { + selection: sub_selection, + ty: return_type, + definition: field, + }; + + SelectionValidator { + root: PathPart::Root(coordinate.field_coordinate.object), + schema, + path: Vec::new(), + selection_arg, + seen_fields, + } + .walk(group) +} + +pub(super) fn validate_body_selection( + connect_directive: &Node, + connect_coordinate: ConnectDirectiveCoordinate, + parent_type: &Node, + field: &Component, + schema: &SchemaInfo, + selection_node: &Node, +) -> Result<(), Message> { + let coordinate = + connect_directive_http_body_coordinate(&connect_directive.name, parent_type, &field.name); + + let selection_str = + GraphQLString::new(selection_node, &schema.sources).map_err(|_| Message { + code: Code::GraphQLError, + message: format!("{coordinate} must be a string."), + locations: selection_node + .line_column_range(&schema.sources) + .into_iter() + .collect(), + })?; + + let selection = JSONSelection::parse(selection_str.as_str()).map_err(|err| Message { + code: Code::InvalidJsonSelection, + message: format!("{coordinate} is not a valid JSONSelection: {err}"), + locations: selection_node + .line_column_range(&schema.sources) + .into_iter() + .collect(), + })?; + + if selection.is_empty() { + return Err(Message { + code: Code::InvalidJsonSelection, + message: format!("{coordinate} is empty"), + locations: selection_node + .line_column_range(&schema.sources) + .into_iter() + .collect(), + }); + } + + validate_selection_variables( + &VariableResolver::new( + VariableContext::new( + connect_coordinate.field_coordinate.object, + connect_coordinate.field_coordinate.field, + Phase::Request, + Target::Body, + ), + schema, + ), + coordinate, + &selection, + selection_str, + ) +} + +/// Validate variable references in a JSON Selection +pub(super) fn validate_selection_variables( + variable_resolver: &VariableResolver, + coordinate: impl Display, + selection: &JSONSelection, + selection_str: GraphQLString, +) -> Result<(), Message> { + for reference in selection + .external_var_paths() + .into_iter() + .flat_map(|var_path| var_path.variable_reference()) + { + variable_resolver + .resolve(&reference, selection_str) + .map_err(|mut err| { + err.message = format!("In {coordinate}: {message}", message = err.message); + err + })?; + } + Ok(()) +} + +fn get_json_selection<'a>( + connect_directive: ConnectDirectiveCoordinate<'a>, + schema: &'a SchemaInfo<'a>, +) -> Result<(SelectionArg<'a>, JSONSelection), Message> { + let coordinate = SelectionCoordinate::from(connect_directive); + let selection_arg = connect_directive + .directive + .arguments + .iter() + .find(|arg| arg.name == CONNECT_SELECTION_ARGUMENT_NAME) + .ok_or_else(|| Message { + code: Code::GraphQLError, + message: format!("{coordinate} is required."), + locations: connect_directive + .directive + .line_column_range(&schema.sources) + .into_iter() + .collect(), + })?; + let selection_str = + GraphQLString::new(&selection_arg.value, &schema.sources).map_err(|_| Message { + code: Code::GraphQLError, + message: format!("{coordinate} must be a string."), + locations: selection_arg + .line_column_range(&schema.sources) + .into_iter() + .collect(), + })?; + + let selection = JSONSelection::parse(selection_str.as_str()).map_err(|err| Message { + code: Code::InvalidJsonSelection, + message: format!("{coordinate} is not a valid JSONSelection: {err}",), + locations: selection_str + .line_col_for_subslice(err.offset..err.offset + 1, schema) + .into_iter() + .collect(), + })?; + + if selection.is_empty() { + return Err(Message { + code: Code::InvalidJsonSelection, + message: format!("{coordinate} is empty",), + locations: selection_arg + .value + .line_column_range(&schema.sources) + .into_iter() + .collect(), + }); + } + + Ok(( + SelectionArg { + value: selection_str, + coordinate, + }, + selection, + )) +} + +struct SelectionArg<'schema> { + value: GraphQLString<'schema>, + coordinate: SelectionCoordinate<'schema>, +} + +struct SelectionValidator<'schema, 'a> { + schema: &'schema SchemaInfo<'schema>, + root: PathPart<'schema>, + path: Vec>, + selection_arg: SelectionArg<'schema>, + seen_fields: &'a mut IndexSet<(Name, Name)>, +} + +impl SelectionValidator<'_, '_> { + fn check_for_circular_reference( + &self, + field: Field, + object: &Node, + ) -> Result<(), Message> { + for (depth, seen_part) in self.path_with_root().enumerate() { + let (seen_type, ancestor_field) = match seen_part { + PathPart::Root(root) => (root, None), + PathPart::Field { ty, definition } => (ty, Some(definition)), + }; + + if seen_type == object { + return Err(Message { + code: Code::CircularReference, + message: format!( + "Circular reference detected in {coordinate}: type `{new_object_name}` appears more than once in `{selection_path}`. For more information, see https://go.apollo.dev/connectors/limitations#circular-references", + coordinate = &self.selection_arg.coordinate, + selection_path = self.path_string(field.definition), + new_object_name = object.name, + ), + // TODO: make a helper function for easier range collection + locations: self.get_range_location(field.inner_range()) + // Skip over fields which duplicate the location of the selection + .chain(if depth > 1 {ancestor_field.and_then(|def| def.line_column_range(&self.schema.sources))} else {None}) + .chain(field.definition.line_column_range(&self.schema.sources)) + .collect(), + }); + } + } + Ok(()) + } + + fn get_selection_location( + &self, + selection: &impl Ranged, + ) -> impl Iterator> { + selection + .range() + .and_then(|range| { + self.selection_arg + .value + .line_col_for_subslice(range, self.schema) + }) + .into_iter() + } + + fn get_range_location( + &self, + selection: Option>, + ) -> impl Iterator> { + selection + .as_ref() + .and_then(|range| { + self.selection_arg + .value + .line_col_for_subslice(range.clone(), self.schema) + }) + .into_iter() + } +} + +#[derive(Clone, Copy, Debug)] +struct Field<'schema> { + selection: &'schema NamedSelection, + definition: &'schema Node, +} + +impl<'schema> Field<'schema> { + fn next_subselection(&self) -> Option<&'schema SubSelection> { + self.selection.next_subselection() + } + + fn inner_range(&self) -> Option> { + self.selection.range() + } +} + +#[derive(Clone, Copy, Debug)] +enum PathPart<'a> { + // Query, Mutation, Subscription OR an Entity type + Root(&'a Node), + Field { + definition: &'a Node, + ty: &'a Node, + }, +} + +impl PathPart<'_> { + fn ty(&self) -> &Node { + match self { + PathPart::Root(ty) => ty, + PathPart::Field { ty, .. } => ty, + } + } +} + +#[derive(Clone, Debug)] +struct Group<'schema> { + selection: &'schema SubSelection, + ty: &'schema Node, + definition: &'schema Node, +} + +// TODO: Once there is location data for JSONSelection, return multiple errors instead of stopping +// at the first +impl<'schema> GroupVisitor, Field<'schema>> for SelectionValidator<'schema, '_> { + /// If the both the selection and the schema agree that this field is an object, then we + /// provide it back to the visitor to be walked. + /// + /// This does no validation, as we have to do that on the field level anyway. + fn try_get_group_for_field( + &self, + field: &Field<'schema>, + ) -> Result>, Self::Error> { + let Some(selection) = field.next_subselection() else { + return Ok(None); + }; + let Some(ty) = self + .schema + .get_object(field.definition.ty.inner_named_type()) + else { + return Ok(None); + }; + Ok(Some(Group { + selection, + ty, + definition: field.definition, + })) + } + + /// Get all the fields for an object type / selection. + /// Returns an error if a selection points at a field which does not exist on the schema. + fn enter_group(&mut self, group: &Group<'schema>) -> Result>, Self::Error> { + self.path.push(PathPart::Field { + definition: group.definition, + ty: group.ty, + }); + group.selection.selections_iter().flat_map(|selection| { + let mut results = Vec::new(); + for field_name in selection.names() { + if let Some(definition) = group.ty.fields.get(field_name) { + results.push(Ok(Field { + selection, + definition, + })); + } else { + results.push(Err(Message { + code: Code::SelectedFieldNotFound, + message: format!( + "{coordinate} contains field `{field_name}`, which does not exist on `{parent_type}`.", + coordinate = &self.selection_arg.coordinate, + parent_type = group.ty.name, + ), + locations: self.get_selection_location(selection).collect(), + })); + } + } + results + }).collect() + } + + fn exit_group(&mut self) -> Result<(), Self::Error> { + self.path.pop(); + Ok(()) + } +} + +impl<'schema> FieldVisitor> for SelectionValidator<'schema, '_> { + type Error = Message; + + fn visit(&mut self, field: Field<'schema>) -> Result<(), Self::Error> { + let field_name = field.definition.name.as_str(); + let type_name = field.definition.ty.inner_named_type(); + let coordinate = self.selection_arg.coordinate; + let field_type = self.schema.types.get(type_name).ok_or_else(|| Message { + code: Code::GraphQLError, + message: format!( + "{coordinate} contains field `{field_name}`, which has undefined type `{type_name}.", + ), + locations: self.get_range_location(field.inner_range()).collect(), + })?; + let is_group = field.next_subselection().is_some(); + + self.seen_fields.insert(( + self.last_field().ty().name.clone(), + field.definition.name.clone(), + )); + + if !field.definition.arguments.is_empty() { + return Err(Message { + code: Code::ConnectorsFieldWithArguments, + message: format!( + "{coordinate} selects field `{parent_type}.{field_name}`, which has arguments. Only fields with a connector can have arguments.", + parent_type = self.last_field().ty().name, + ), + locations: self.get_range_location(field.inner_range()).chain(field.definition.line_column_range(&self.schema.sources)).collect(), + }); + } + + match (field_type, is_group) { + (ExtendedType::Object(object), true) => { + self.check_for_circular_reference(field, object) + }, + (_, true) => { + Err(Message { + code: Code::GroupSelectionIsNotObject, + message: format!( + "{coordinate} selects a group `{field_name}{{}}`, but `{parent_type}.{field_name}` is of type `{type_name}` which is not an object.", + parent_type = self.last_field().ty().name, + ), + locations: self.get_range_location(field.inner_range()).chain(field.definition.line_column_range(&self.schema.sources)).collect(), + }) + }, + (ExtendedType::Object(_), false) => { + Err(Message { + code: Code::GroupSelectionRequiredForObject, + message: format!( + "`{parent_type}.{field_name}` is an object, so {coordinate} must select a group `{field_name}{{}}`.", + parent_type = self.last_field().ty().name, + ), + locations: self.get_range_location(field.inner_range()).chain(field.definition.line_column_range(&self.schema.sources)).collect(), + }) + }, + (_, false) => Ok(()), + } + } +} + +impl SelectionValidator<'_, '_> { + fn path_with_root(&self) -> impl Iterator { + once(self.root).chain(self.path.iter().copied()) + } + + fn path_string(&self, tail: &FieldDefinition) -> String { + self.path_with_root() + .map(|part| match part { + PathPart::Root(ty) => ty.name.as_str(), + PathPart::Field { definition, .. } => definition.name.as_str(), + }) + .chain(once(tail.name.as_str())) + .join(".") + } + + fn last_field(&self) -> &PathPart { + self.path.last().unwrap_or(&self.root) + } +} diff --git a/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@all_fields_selected.graphql.snap b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@all_fields_selected.graphql.snap new file mode 100644 index 0000000000..9641665ff9 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@all_fields_selected.graphql.snap @@ -0,0 +1,42 @@ +--- +source: apollo-federation/src/sources/connect/validation/mod.rs +expression: "format!(\"{:#?}\", result.errors)" +input_file: apollo-federation/src/sources/connect/validation/test_data/all_fields_selected.graphql +--- +[ + Message { + code: ConnectorsUnresolvedField, + message: "No connector resolves field `T.unselected`. It must have a `@connect` directive or appear in `@connect(selection:)`.", + locations: [ + 34:3..34:22, + ], + }, + Message { + code: ConnectorsUnresolvedField, + message: "No connector resolves field `T.secondUnused`. It must have a `@connect` directive or appear in `@connect(selection:)`.", + locations: [ + 49:3..49:23, + ], + }, + Message { + code: ConnectorsUnresolvedField, + message: "No connector resolves field `C.unselected`. It must have a `@connect` directive or appear in `@connect(selection:)`.", + locations: [ + 55:3..55:21, + ], + }, + Message { + code: ConnectorsUnresolvedField, + message: "No connector resolves field `D.unselected`. It must have a `@connect` directive or appear in `@connect(selection:)`.", + locations: [ + 60:3..60:21, + ], + }, + Message { + code: ConnectorsUnresolvedField, + message: "No connector resolves field `Unused.unselected`. It must have a `@connect` directive or appear in `@connect(selection:)`.", + locations: [ + 64:3..64:18, + ], + }, +] diff --git a/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@circular_reference.graphql.snap b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@circular_reference.graphql.snap new file mode 100644 index 0000000000..2438dd4986 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@circular_reference.graphql.snap @@ -0,0 +1,15 @@ +--- +source: apollo-federation/src/sources/connect/validation/mod.rs +expression: "format!(\"{:#?}\", errors)" +input_file: apollo-federation/src/sources/connect/validation/test_data/circular_reference.graphql +--- +[ + Message { + code: CircularReference, + message: "Circular reference detected in `@connect(selection:)` on `Query.me`: type `User` appears more than once in `Query.me.friends`. For more information, see https://go.apollo.dev/connectors/limitations#circular-references", + locations: [ + 9:65..9:77, + 14:5..14:22, + ], + }, +] diff --git a/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@circular_reference_2.graphql.snap b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@circular_reference_2.graphql.snap new file mode 100644 index 0000000000..dfcac23166 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@circular_reference_2.graphql.snap @@ -0,0 +1,15 @@ +--- +source: apollo-federation/src/sources/connect/validation/mod.rs +expression: "format!(\"{:#?}\", errors)" +input_file: apollo-federation/src/sources/connect/validation/test_data/circular_reference_2.graphql +--- +[ + Message { + code: CircularReference, + message: "Circular reference detected in `@connect(selection:)` on `Track.modules`: type `Track` appears more than once in `Track.modules.track`. For more information, see https://go.apollo.dev/connectors/limitations#circular-references", + locations: [ + 18:28..18:50, + 25:3..25:15, + ], + }, +] diff --git a/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@circular_reference_3.graphql.snap b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@circular_reference_3.graphql.snap new file mode 100644 index 0000000000..323db0c08e --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@circular_reference_3.graphql.snap @@ -0,0 +1,14 @@ +--- +source: apollo-federation/src/sources/connect/validation/mod.rs +expression: "format!(\"{:#?}\", errors)" +input_file: apollo-federation/src/sources/connect/validation/test_data/circular_reference_3.graphql +--- +[ + Message { + code: CircularReference, + message: "Direct circular reference detected in `User.friends: [User!]!`. For more information, see https://go.apollo.dev/connectors/limitations#circular-references", + locations: [ + 11:3..15:6, + ], + }, +] diff --git a/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@connect_source_name_mismatch.graphql.snap b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@connect_source_name_mismatch.graphql.snap new file mode 100644 index 0000000000..24a72c59bc --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@connect_source_name_mismatch.graphql.snap @@ -0,0 +1,14 @@ +--- +source: apollo-federation/src/sources/connect/validation/mod.rs +expression: "format!(\"{:#?}\", errors)" +input_file: apollo-federation/src/sources/connect/validation/test_data/connect_source_name_mismatch.graphql +--- +[ + Message { + code: SourceNameMismatch, + message: "`@connect(source: \"v1\")` on `Query.resources` does not match any defined sources. Did you mean `@source(name: \"v2\")`?", + locations: [ + 10:14..10:26, + ], + }, +] diff --git a/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@connect_source_undefined.graphql.snap b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@connect_source_undefined.graphql.snap new file mode 100644 index 0000000000..c60efbd89b --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@connect_source_undefined.graphql.snap @@ -0,0 +1,14 @@ +--- +source: apollo-federation/src/sources/connect/validation/mod.rs +expression: "format!(\"{:#?}\", errors)" +input_file: apollo-federation/src/sources/connect/validation/test_data/connect_source_undefined.graphql +--- +[ + Message { + code: NoSourcesDefined, + message: "`@connect(source: \"v1\")` on `Query.resources` specifies a source, but none are defined. Try adding `@source(name: \"v1\")` to the schema.", + locations: [ + 9:14..9:26, + ], + }, +] diff --git a/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@denest_scalars.graphql.snap b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@denest_scalars.graphql.snap new file mode 100644 index 0000000000..2dc3c88d0a --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@denest_scalars.graphql.snap @@ -0,0 +1,15 @@ +--- +source: apollo-federation/src/sources/connect/validation/mod.rs +expression: "format!(\"{:#?}\", errors)" +input_file: apollo-federation/src/sources/connect/validation/test_data/denest_scalars.graphql +--- +[ + Message { + code: GroupSelectionRequiredForObject, + message: "`User.street` is an object, so `@connect(selection:)` on `Query.me` must select a group `street{}`.", + locations: [ + 15:9..15:15, + 25:3..25:17, + ], + }, +] diff --git a/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@denest_scalars2.graphql.snap b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@denest_scalars2.graphql.snap new file mode 100644 index 0000000000..33bbc22731 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@denest_scalars2.graphql.snap @@ -0,0 +1,15 @@ +--- +source: apollo-federation/src/sources/connect/validation/mod.rs +expression: "format!(\"{:#?}\", errors)" +input_file: apollo-federation/src/sources/connect/validation/test_data/denest_scalars2.graphql +--- +[ + Message { + code: GroupSelectionIsNotObject, + message: "`@connect(selection:)` on `Query.me` selects a group `street{}`, but `User.street` is of type `String` which is not an object.", + locations: [ + 15:9..15:31, + 25:3..25:17, + ], + }, +] diff --git a/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@disallowed_abstract_types.graphql.snap b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@disallowed_abstract_types.graphql.snap new file mode 100644 index 0000000000..2eaf4a5994 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@disallowed_abstract_types.graphql.snap @@ -0,0 +1,21 @@ +--- +source: apollo-federation/src/sources/connect/validation/mod.rs +expression: "format!(\"{:#?}\", errors)" +input_file: apollo-federation/src/sources/connect/validation/test_data/disallowed_abstract_types.graphql +--- +[ + Message { + code: ConnectorsUnsupportedAbstractType, + message: "Abstract schema types, such as `interface`, are not supported when using connectors. You can check out our documentation at https://go.apollo.dev/connectors/best-practices#abstract-schema-types-are-unsupported.", + locations: [ + 21:1..21:18, + ], + }, + Message { + code: ConnectorsUnsupportedAbstractType, + message: "Abstract schema types, such as `union`, are not supported when using connectors. You can check out our documentation at https://go.apollo.dev/connectors/best-practices#abstract-schema-types-are-unsupported.", + locations: [ + 25:1..25:12, + ], + }, +] diff --git a/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@disallowed_federation_imports.graphql.snap b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@disallowed_federation_imports.graphql.snap new file mode 100644 index 0000000000..f139453059 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@disallowed_federation_imports.graphql.snap @@ -0,0 +1,21 @@ +--- +source: apollo-federation/src/sources/connect/validation/mod.rs +expression: "format!(\"{:#?}\", errors)" +input_file: apollo-federation/src/sources/connect/validation/test_data/disallowed_federation_imports.graphql +--- +[ + Message { + code: ConnectorsUnsupportedFederationDirective, + message: "The directive `@context` is not supported when using connectors.", + locations: [ + 6:7..6:17, + ], + }, + Message { + code: ConnectorsUnsupportedFederationDirective, + message: "The directive `@fromContext` is not supported when using connectors.", + locations: [ + 7:7..7:21, + ], + }, +] diff --git a/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@duplicate_source_name.graphql.snap b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@duplicate_source_name.graphql.snap new file mode 100644 index 0000000000..bd3bc9d198 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@duplicate_source_name.graphql.snap @@ -0,0 +1,15 @@ +--- +source: apollo-federation/src/sources/connect/validation/mod.rs +expression: "format!(\"{:#?}\", errors)" +input_file: apollo-federation/src/sources/connect/validation/test_data/duplicate_source_name.graphql +--- +[ + Message { + code: DuplicateSourceName, + message: "Every `@source(name:)` must be unique. Found duplicate name \"v1\".", + locations: [ + 6:3..6:61, + 7:3..7:61, + ], + }, +] diff --git a/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@empty_selection.graphql.snap b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@empty_selection.graphql.snap new file mode 100644 index 0000000000..d46635f3e3 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@empty_selection.graphql.snap @@ -0,0 +1,21 @@ +--- +source: apollo-federation/src/sources/connect/validation/mod.rs +expression: "format!(\"{:#?}\", errors)" +input_file: apollo-federation/src/sources/connect/validation/test_data/empty_selection.graphql +--- +[ + Message { + code: InvalidJsonSelection, + message: "`@connect(selection:)` on `Query.resources` is empty", + locations: [ + 13:18..13:29, + ], + }, + Message { + code: InvalidJsonSelection, + message: "`@connect(http: {body:})` on `Query.resources` is empty", + locations: [ + 12:41..12:45, + ], + }, +] diff --git a/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@empty_source_name.graphql.snap b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@empty_source_name.graphql.snap new file mode 100644 index 0000000000..11d5fa3826 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@empty_source_name.graphql.snap @@ -0,0 +1,14 @@ +--- +source: apollo-federation/src/sources/connect/validation/mod.rs +expression: "format!(\"{:#?}\", errors)" +input_file: apollo-federation/src/sources/connect/validation/test_data/empty_source_name.graphql +--- +[ + Message { + code: EmptySourceName, + message: "The value for `@source(name:)` can't be empty.", + locations: [ + 6:17..6:19, + ], + }, +] diff --git a/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@fields_with_arguments.graphql.snap b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@fields_with_arguments.graphql.snap new file mode 100644 index 0000000000..72df52187f --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@fields_with_arguments.graphql.snap @@ -0,0 +1,15 @@ +--- +source: apollo-federation/src/sources/connect/validation/mod.rs +expression: "format!(\"{:#?}\", errors)" +input_file: apollo-federation/src/sources/connect/validation/test_data/fields_with_arguments.graphql +--- +[ + Message { + code: ConnectorsFieldWithArguments, + message: "`@connect(selection:)` on `Query.ts` selects field `T.field`, which has arguments. Only fields with a connector can have arguments.", + locations: [ + 11:7..11:12, + 18:3..18:29, + ], + }, +] diff --git a/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@group_selection_on_scalar.graphql.snap b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@group_selection_on_scalar.graphql.snap new file mode 100644 index 0000000000..8fe327722a --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@group_selection_on_scalar.graphql.snap @@ -0,0 +1,15 @@ +--- +source: apollo-federation/src/sources/connect/validation/mod.rs +expression: "format!(\"{:#?}\", errors)" +input_file: apollo-federation/src/sources/connect/validation/test_data/group_selection_on_scalar.graphql +--- +[ + Message { + code: GroupSelectionIsNotObject, + message: "`@connect(selection:)` on `Query.me` selects a group `id{}`, but `User.id` is of type `ID` which is not an object.", + locations: [ + 8:78..8:87, + 12:5..12:12, + ], + }, +] diff --git a/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@headers__expressions_that_evaluate_to_invalid_types.graphql.snap b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@headers__expressions_that_evaluate_to_invalid_types.graphql.snap new file mode 100644 index 0000000000..5d304bc47e --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@headers__expressions_that_evaluate_to_invalid_types.graphql.snap @@ -0,0 +1,35 @@ +--- +source: apollo-federation/src/sources/connect/validation/mod.rs +expression: "format!(\"{:#?}\", errors)" +input_file: apollo-federation/src/sources/connect/validation/test_data/headers/expressions_that_evaluate_to_invalid_types.graphql +--- +[ + Message { + code: InvalidHeader, + message: "In `@source(http.headers:)`: array values aren't valid here", + locations: [ + 11:42..11:53, + ], + }, + Message { + code: InvalidHeader, + message: "In `@source(http.headers:)`: object values aren't valid here", + locations: [ + 12:43..12:48, + ], + }, + Message { + code: InvalidHeader, + message: "In `@source(http.headers:)`: object values aren't valid here", + locations: [ + 13:73..13:73, + ], + }, + Message { + code: InvalidHeader, + message: "In `@connect(http.headers:)` on `Query.blah`: array values aren't valid here", + locations: [ + 21:67..21:80, + ], + }, +] diff --git a/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@headers__invalid_connect_http_headers.graphql.snap b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@headers__invalid_connect_http_headers.graphql.snap new file mode 100644 index 0000000000..b93674127b --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@headers__invalid_connect_http_headers.graphql.snap @@ -0,0 +1,79 @@ +--- +source: apollo-federation/src/sources/connect/validation/mod.rs +expression: "format!(\"{:#?}\", result.errors)" +input_file: apollo-federation/src/sources/connect/validation/test_data/invalid_connect_http_headers.graphql +--- +[ + Message { + code: InvalidHeader, + message: "In `@connect(http.headers:)` on `Query.resources` either `from` or `value` must be set", + locations: [ + 12:11..12:39, + ], + }, + Message { + code: InvalidHeader, + message: "In `@connect(http.headers:)` on `Query.resources` missing `name` field", + locations: [ + 13:11..13:37, + ], + }, + Message { + code: InvalidHeader, + message: "In `@connect(http.headers:)` on `Query.resources` `from` and `value` can't be set at the same time", + locations: [ + 14:37..14:41, + 14:61..14:66, + ], + }, + Message { + code: HttpHeaderNameCollision, + message: "Duplicate header names are not allowed. The header name 'x-name-collision' at `@connect(http.headers:)` on `Query.resources` is already defined.", + locations: [ + 16:19..16:37, + 15:19..15:37, + ], + }, + Message { + code: InvalidHeader, + message: "In `@connect(http.headers:)` on `Query.resources` the value `` is an invalid HTTP header name", + locations: [ + 17:19..17:37, + ], + }, + Message { + code: InvalidHeader, + message: "In `@connect(http.headers:)` on `Query.resources` the value `` is an invalid HTTP header name", + locations: [ + 18:43..18:61, + ], + }, + Message { + code: InvalidHeader, + message: "In `@connect(http.headers:)` on `Query.resources` invalid value ` Value with 😊 emoji and newline \n `", + locations: [ + 21:21..21:58, + ], + }, + Message { + code: InvalidHeader, + message: "In `@connect(http.headers:)` on `Query.resources` header 'content-length' is reserved and cannot be set by a connector", + locations: [ + 23:19..23:35, + ], + }, + Message { + code: InvalidHeader, + message: "In `@connect(http.headers:)` on `Query.resources` header 'content-type' can't be set with `from`, only with `value`", + locations: [ + 24:19..24:33, + ], + }, + Message { + code: InvalidHeader, + message: "In `@connect(http.headers:)` on `Query.resources` header 'accept' can't be set with `from`, only with `value`", + locations: [ + 25:19..25:27, + ], + }, +] diff --git a/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@headers__invalid_namespace_in_header_variables.graphql.snap b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@headers__invalid_namespace_in_header_variables.graphql.snap new file mode 100644 index 0000000000..4168982f21 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@headers__invalid_namespace_in_header_variables.graphql.snap @@ -0,0 +1,56 @@ +--- +source: apollo-federation/src/sources/connect/validation/mod.rs +expression: "format!(\"{:#?}\", errors)" +input_file: apollo-federation/src/sources/connect/validation/test_data/headers/invalid_namespace_in_header_variables.graphql +--- +[ + Message { + code: InvalidHeader, + message: "In `@source(http.headers:)` Unknown variable", + locations: [ + 11:49..11:57, + ], + }, + Message { + code: InvalidHeader, + message: "In `@source(http.headers:)`: $this is not valid here, must be one of $config, $context", + locations: [ + 12:62..12:71, + ], + }, + Message { + code: InvalidHeader, + message: "In `@source(http.headers:)`: `config.bar` must start with an argument name, like `$this` or `$args`", + locations: [ + 13:56..13:66, + ], + }, + Message { + code: InvalidHeader, + message: "In `@connect(http.headers:)` on `Query.scalar` Unknown variable", + locations: [ + 24:49..24:57, + ], + }, + Message { + code: InvalidHeader, + message: "In `@connect(http.headers:)` on `Query.scalar`: $status is not valid here, must be one of $args, $config, $context", + locations: [ + 25:62..25:69, + ], + }, + Message { + code: InvalidHeader, + message: "In `@connect(http.headers:)` on `Query.scalar`: $this is not valid here, must be one of $args, $config, $context", + locations: [ + 26:47..26:52, + ], + }, + Message { + code: InvalidHeader, + message: "In `@connect(http.headers:)` on `Query.scalar`: `config.bar` must start with an argument name, like `$this` or `$args`", + locations: [ + 27:56..27:66, + ], + }, +] diff --git a/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@headers__invalid_nested_paths_in_header_variables.graphql.snap b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@headers__invalid_nested_paths_in_header_variables.graphql.snap new file mode 100644 index 0000000000..5a6a58e648 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@headers__invalid_nested_paths_in_header_variables.graphql.snap @@ -0,0 +1,35 @@ +--- +source: apollo-federation/src/sources/connect/validation/mod.rs +expression: "format!(\"{:#?}\", errors)" +input_file: apollo-federation/src/sources/connect/validation/test_data/headers/invalid_nested_paths_in_header_variables.graphql +--- +[ + Message { + code: InvalidHeader, + message: "In `@connect(http.headers:)` on `Query.scalar`: `$args.scalar` doesn't have a field named `blah`", + locations: [ + 13:47..13:64, + ], + }, + Message { + code: InvalidHeader, + message: "In `@connect(http.headers:)` on `Query.object`: `InputObject` doesn't have a field named `fieldThatDoesntExist`", + locations: [ + 23:47..23:79, + ], + }, + Message { + code: InvalidHeader, + message: "In `@connect(http.headers:)` on `Query.enum`: `Enum` doesn't have a field named `cantHaveFields`", + locations: [ + 33:45..33:70, + ], + }, + Message { + code: InvalidHeader, + message: "In `@connect(http.headers:)` on `Object.newField`: `$this` doesn't have a field named `fieldThatDoesntExist`", + locations: [ + 47:47..47:73, + ], + }, +] diff --git a/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@headers__invalid_source_http_headers.graphql.snap b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@headers__invalid_source_http_headers.graphql.snap new file mode 100644 index 0000000000..561ac3bbcb --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@headers__invalid_source_http_headers.graphql.snap @@ -0,0 +1,72 @@ +--- +source: apollo-federation/src/sources/connect/validation/mod.rs +expression: "format!(\"{:#?}\", errors)" +input_file: apollo-federation/src/sources/connect/validation/test_data/invalid_source_http_headers.graphql +--- +[ + Message { + code: InvalidHeader, + message: "In `@source(http.headers:)` either `from` or `value` must be set", + locations: [ + 13:9..13:37, + ], + }, + Message { + code: InvalidHeader, + message: "In `@source(http.headers:)` missing `name` field", + locations: [ + 14:9..14:35, + ], + }, + Message { + code: InvalidHeader, + message: "In `@source(http.headers:)` `from` and `value` can't be set at the same time", + locations: [ + 15:35..15:39, + 15:59..15:64, + ], + }, + Message { + code: HttpHeaderNameCollision, + message: "Duplicate header names are not allowed. The header name 'x-name-collision' at `@source(http.headers:)` is already defined.", + locations: [ + 17:17..17:35, + 16:17..16:35, + ], + }, + Message { + code: InvalidHeader, + message: "In `@source(http.headers:)` the value `` is an invalid HTTP header name", + locations: [ + 18:17..18:35, + ], + }, + Message { + code: InvalidHeader, + message: "In `@source(http.headers:)` the value `` is an invalid HTTP header name", + locations: [ + 19:41..19:59, + ], + }, + Message { + code: InvalidHeader, + message: "In `@source(http.headers:)` invalid value ` Value with 😊 emoji and newline \n `", + locations: [ + 22:19..22:56, + ], + }, + Message { + code: InvalidHeader, + message: "In `@source(http.headers:)` header 'content-length' is reserved and cannot be set by a connector", + locations: [ + 24:17..24:33, + ], + }, + Message { + code: InvalidHeader, + message: "In `@source(http.headers:)` header 'content-type' can't be set with `from`, only with `value`", + locations: [ + 25:17..25:31, + ], + }, +] diff --git a/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@invalid_chars_in_source_name.graphql.snap b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@invalid_chars_in_source_name.graphql.snap new file mode 100644 index 0000000000..1e3e531b7e --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@invalid_chars_in_source_name.graphql.snap @@ -0,0 +1,35 @@ +--- +source: apollo-federation/src/sources/connect/validation/mod.rs +expression: "format!(\"{:#?}\", errors)" +input_file: apollo-federation/src/sources/connect/validation/test_data/invalid_chars_in_source_name.graphql +--- +[ + Message { + code: InvalidSourceName, + message: "`@source(name: \"u$ers\")` is invalid; all source names must follow pattern '^[a-zA-Z][a-zA-Z0-9_-]{0,63}$", + locations: [ + 6:17..6:24, + ], + }, + Message { + code: InvalidSourceName, + message: "`@source(name: \"1\")` is invalid; all source names must follow pattern '^[a-zA-Z][a-zA-Z0-9_-]{0,63}$", + locations: [ + 7:17..7:20, + ], + }, + Message { + code: InvalidSourceName, + message: "`@source(name: \"no.dots\")` is invalid; all source names must follow pattern '^[a-zA-Z][a-zA-Z0-9_-]{0,63}$", + locations: [ + 8:17..8:26, + ], + }, + Message { + code: InvalidSourceName, + message: "`@source(name: \"areallylongnamethatisoversixtythreecharacterstakesalongwhiletotypebutthisshoulddoit\")` is invalid; all source names must follow pattern '^[a-zA-Z][a-zA-Z0-9_-]{0,63}$", + locations: [ + 10:11..10:96, + ], + }, +] diff --git a/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@invalid_namespace_in_body_selection.graphql.snap b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@invalid_namespace_in_body_selection.graphql.snap new file mode 100644 index 0000000000..cf30789ed7 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@invalid_namespace_in_body_selection.graphql.snap @@ -0,0 +1,14 @@ +--- +source: apollo-federation/src/sources/connect/validation/mod.rs +expression: "format!(\"{:#?}\", errors)" +input_file: apollo-federation/src/sources/connect/validation/test_data/invalid_namespace_in_body_selection.graphql +--- +[ + Message { + code: InvalidJsonSelection, + message: "In `@connect(http: {body:})` on `Mutation.createUser`: variable `$status` is not valid at this location, must be one of $args, $config, $context, $this", + locations: [ + 21:17..21:24, + ], + }, +] diff --git a/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@invalid_nested_paths_in_json_selection.graphql.snap b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@invalid_nested_paths_in_json_selection.graphql.snap new file mode 100644 index 0000000000..f032451623 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@invalid_nested_paths_in_json_selection.graphql.snap @@ -0,0 +1,35 @@ +--- +source: apollo-federation/src/sources/connect/validation/mod.rs +expression: "format!(\"{:#?}\", errors)" +input_file: apollo-federation/src/sources/connect/validation/test_data/invalid_nested_paths_in_json_selection.graphql +--- +[ + Message { + code: UndefinedField, + message: "In `@connect(http: {body:})` on `Query.scalar`: `String` does not have a field named `blah`.", + locations: [ + 12:34..12:38, + ], + }, + Message { + code: UndefinedField, + message: "In `@connect(http: {body:})` on `Query.object`: `InputObject` does not have a field named `fieldThatDoesntExist`.", + locations: [ + 20:33..20:53, + ], + }, + Message { + code: UndefinedField, + message: "In `@connect(http: {body:})` on `Query.enum`: `Enum` does not have a field named `cantHaveFields`.", + locations: [ + 28:30..28:44, + ], + }, + Message { + code: UndefinedField, + message: "In `@connect(http: {body:})` on `Object.newField`: `Object` does not have a field named `fieldThatDoesntExist`", + locations: [ + 40:27..40:47, + ], + }, +] diff --git a/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@invalid_selection_syntax.graphql.snap b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@invalid_selection_syntax.graphql.snap new file mode 100644 index 0000000000..7eff4959c2 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@invalid_selection_syntax.graphql.snap @@ -0,0 +1,14 @@ +--- +source: apollo-federation/src/sources/connect/validation/mod.rs +expression: "format!(\"{:#?}\", errors)" +input_file: apollo-federation/src/sources/connect/validation/test_data/invalid_selection_syntax.graphql +--- +[ + Message { + code: InvalidJsonSelection, + message: "`@connect(selection:)` on `Query.something` is not a valid JSONSelection: nom::error::ErrorKind::Eof: &how", + locations: [ + 8:87..8:88, + ], + }, +] diff --git a/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@keys_and_entities__invalid__arg_is_object_but_field_is_not.graphql.snap b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@keys_and_entities__invalid__arg_is_object_but_field_is_not.graphql.snap new file mode 100644 index 0000000000..9656d60dce --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@keys_and_entities__invalid__arg_is_object_but_field_is_not.graphql.snap @@ -0,0 +1,15 @@ +--- +source: apollo-federation/src/sources/connect/validation/mod.rs +expression: "format!(\"{:#?}\", errors)" +input_file: apollo-federation/src/sources/connect/validation/test_data/keys_and_entities/invalid/arg_is_object_but_field_is_not.graphql +--- +[ + Message { + code: EntityResolverArgumentMismatch, + message: "`Query.product` with `@connect(entity: true)` has invalid arguments. Mismatched type on field `id` - expected `ID` but found `ProductInput`.", + locations: [ + 6:11..6:28, + 10:7..10:19, + ], + }, +] diff --git a/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@keys_and_entities__invalid__arg_type_doesnt_match_field_type.graphql.snap b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@keys_and_entities__invalid__arg_type_doesnt_match_field_type.graphql.snap new file mode 100644 index 0000000000..948bcd24b2 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@keys_and_entities__invalid__arg_type_doesnt_match_field_type.graphql.snap @@ -0,0 +1,15 @@ +--- +source: apollo-federation/src/sources/connect/validation/mod.rs +expression: "format!(\"{:#?}\", errors)" +input_file: apollo-federation/src/sources/connect/validation/test_data/keys_and_entities/invalid/arg_type_doesnt_match_field_type.graphql +--- +[ + Message { + code: EntityResolverArgumentMismatch, + message: "`Query.product` with `@connect(entity: true)` has invalid arguments. Mismatched type on field `id` - expected `ID` but found `String`.", + locations: [ + 6:11..6:22, + 10:7..10:19, + ], + }, +] diff --git a/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@keys_and_entities__invalid__composite_key_doesnt_match.graphql.snap b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@keys_and_entities__invalid__composite_key_doesnt_match.graphql.snap new file mode 100644 index 0000000000..901a536c6d --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@keys_and_entities__invalid__composite_key_doesnt_match.graphql.snap @@ -0,0 +1,14 @@ +--- +source: apollo-federation/src/sources/connect/validation/mod.rs +expression: "format!(\"{:#?}\", errors)" +input_file: apollo-federation/src/sources/connect/validation/test_data/keys_and_entities/invalid/composite_key_doesnt_match.graphql +--- +[ + Message { + code: MissingEntityConnector, + message: "Entity resolution for `@key(fields: \"id store { id country { key_id region } }\")` on `Product` is not implemented by a connector. See https://go.apollo.dev/connectors/directives/#rules-for-entity-true", + locations: [ + 17:14..17:71, + ], + }, +] diff --git a/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@keys_and_entities__invalid__entity_arg_field_arg_name_mismatch.graphql.snap b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@keys_and_entities__invalid__entity_arg_field_arg_name_mismatch.graphql.snap new file mode 100644 index 0000000000..868c2fc9da --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@keys_and_entities__invalid__entity_arg_field_arg_name_mismatch.graphql.snap @@ -0,0 +1,15 @@ +--- +source: apollo-federation/src/sources/connect/validation/mod.rs +expression: "format!(\"{:#?}\", errors)" +input_file: apollo-federation/src/sources/connect/validation/test_data/keys_and_entities/invalid/entity_arg_field_arg_name_mismatch.graphql +--- +[ + Message { + code: EntityResolverArgumentMismatch, + message: "`Query.product` with `@connect(entity: true)` has invalid arguments. Argument `id` does not have a matching field `id` on type `Product`.", + locations: [ + 6:11..6:18, + 10:7..10:19, + ], + }, +] diff --git a/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@keys_and_entities__invalid__entity_arg_field_arg_name_mismatch_composite_key.graphql.snap b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@keys_and_entities__invalid__entity_arg_field_arg_name_mismatch_composite_key.graphql.snap new file mode 100644 index 0000000000..9ed27de6dc --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@keys_and_entities__invalid__entity_arg_field_arg_name_mismatch_composite_key.graphql.snap @@ -0,0 +1,15 @@ +--- +source: apollo-federation/src/sources/connect/validation/mod.rs +expression: "format!(\"{:#?}\", errors)" +input_file: apollo-federation/src/sources/connect/validation/test_data/keys_and_entities/invalid/entity_arg_field_arg_name_mismatch_composite_key.graphql +--- +[ + Message { + code: EntityResolverArgumentMismatch, + message: "`Query.product` with `@connect(entity: true)` has invalid arguments. Field `id` on `CountryInput` does not have a matching field `id` on `Country`.", + locations: [ + 42:3..42:10, + 12:7..12:19, + ], + }, +] diff --git a/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@keys_and_entities__invalid__entity_true_on_list_type.graphql.snap b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@keys_and_entities__invalid__entity_true_on_list_type.graphql.snap new file mode 100644 index 0000000000..069601c485 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@keys_and_entities__invalid__entity_true_on_list_type.graphql.snap @@ -0,0 +1,14 @@ +--- +source: apollo-federation/src/sources/connect/validation/mod.rs +expression: "format!(\"{:#?}\", errors)" +input_file: apollo-federation/src/sources/connect/validation/test_data/keys_and_entities/invalid/entity_true_on_list_type.graphql +--- +[ + Message { + code: EntityTypeInvalid, + message: "`@connect(entity: true)` on `Query.users` is invalid. Entity connectors must return non-list, nullable, object types. See https://go.apollo.dev/connectors/directives/#rules-for-entity-true", + locations: [ + 8:7..8:19, + ], + }, +] diff --git a/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@keys_and_entities__invalid__entity_true_on_non_root_field.graphql.snap b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@keys_and_entities__invalid__entity_true_on_non_root_field.graphql.snap new file mode 100644 index 0000000000..c9e84c0947 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@keys_and_entities__invalid__entity_true_on_non_root_field.graphql.snap @@ -0,0 +1,14 @@ +--- +source: apollo-federation/src/sources/connect/validation/mod.rs +expression: "format!(\"{:#?}\", errors)" +input_file: apollo-federation/src/sources/connect/validation/test_data/keys_and_entities/invalid/entity_true_on_non_root_field.graphql +--- +[ + Message { + code: EntityNotOnRootQuery, + message: "`@connect(entity: true)` on `User.favoriteColor` is invalid. Entity resolvers can only be declared on root `Query` fields.", + locations: [ + 19:7..19:19, + ], + }, +] diff --git a/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@keys_and_entities__invalid__entity_true_returning_non_null_type.graphql.snap b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@keys_and_entities__invalid__entity_true_returning_non_null_type.graphql.snap new file mode 100644 index 0000000000..85c61f8b4f --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@keys_and_entities__invalid__entity_true_returning_non_null_type.graphql.snap @@ -0,0 +1,14 @@ +--- +source: apollo-federation/src/sources/connect/validation/mod.rs +expression: "format!(\"{:#?}\", errors)" +input_file: apollo-federation/src/sources/connect/validation/test_data/keys_and_entities/invalid/entity_true_returning_non_null_type.graphql +--- +[ + Message { + code: EntityTypeInvalid, + message: "`@connect(entity: true)` on `Query.user` is invalid. Entity connectors must return non-list, nullable, object types. See https://go.apollo.dev/connectors/directives/#rules-for-entity-true", + locations: [ + 8:7..8:19, + ], + }, +] diff --git a/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@keys_and_entities__invalid__entity_true_returning_scalar.graphql.snap b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@keys_and_entities__invalid__entity_true_returning_scalar.graphql.snap new file mode 100644 index 0000000000..3e28eaf331 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@keys_and_entities__invalid__entity_true_returning_scalar.graphql.snap @@ -0,0 +1,14 @@ +--- +source: apollo-federation/src/sources/connect/validation/mod.rs +expression: "format!(\"{:#?}\", errors)" +input_file: apollo-federation/src/sources/connect/validation/test_data/keys_and_entities/invalid/entity_true_returning_scalar.graphql +--- +[ + Message { + code: EntityTypeInvalid, + message: "`@connect(entity: true)` on `Query.name` is invalid. Entity connectors must return object types.", + locations: [ + 8:7..8:19, + ], + }, +] diff --git a/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@keys_and_entities__invalid__multiple_keys_not_all_resolved.graphql.snap b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@keys_and_entities__invalid__multiple_keys_not_all_resolved.graphql.snap new file mode 100644 index 0000000000..d29dab9015 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@keys_and_entities__invalid__multiple_keys_not_all_resolved.graphql.snap @@ -0,0 +1,14 @@ +--- +source: apollo-federation/src/sources/connect/validation/mod.rs +expression: "format!(\"{:#?}\", errors)" +input_file: apollo-federation/src/sources/connect/validation/test_data/keys_and_entities/invalid/multiple_keys_not_all_resolved.graphql +--- +[ + Message { + code: MissingEntityConnector, + message: "Entity resolution for `@key(fields: \"id store { id country { key_id region } }\")` on `Product` is not implemented by a connector. See https://go.apollo.dev/connectors/directives/#rules-for-entity-true", + locations: [ + 19:3..19:60, + ], + }, +] diff --git a/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@keys_and_entities__invalid__no_args_for_entity_true.graphql.snap b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@keys_and_entities__invalid__no_args_for_entity_true.graphql.snap new file mode 100644 index 0000000000..1b3641207b --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@keys_and_entities__invalid__no_args_for_entity_true.graphql.snap @@ -0,0 +1,14 @@ +--- +source: apollo-federation/src/sources/connect/validation/mod.rs +expression: "format!(\"{:#?}\", errors)" +input_file: apollo-federation/src/sources/connect/validation/test_data/keys_and_entities/invalid/no_args_for_entity_true.graphql +--- +[ + Message { + code: EntityResolverArgumentMismatch, + message: "`Query.product` with `@connect(entity: true)` must have arguments. See https://go.apollo.dev/connectors/directives/#rules-for-entity-true", + locations: [ + 10:7..10:19, + ], + }, +] diff --git a/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@keys_and_entities__invalid__unrelated_keys.graphql.snap b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@keys_and_entities__invalid__unrelated_keys.graphql.snap new file mode 100644 index 0000000000..9c61c48ef3 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@keys_and_entities__invalid__unrelated_keys.graphql.snap @@ -0,0 +1,14 @@ +--- +source: apollo-federation/src/sources/connect/validation/mod.rs +expression: "format!(\"{:#?}\", errors)" +input_file: apollo-federation/src/sources/connect/validation/test_data/keys_and_entities/invalid/unrelated_keys.graphql +--- +[ + Message { + code: MissingEntityConnector, + message: "Entity resolution for `@key(fields: \"id\")` on `Store` is not implemented by a connector. See https://go.apollo.dev/connectors/directives/#rules-for-entity-true", + locations: [ + 21:12..21:30, + ], + }, +] diff --git a/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@keys_and_entities__valid__basic_implicit_key.graphql.snap b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@keys_and_entities__valid__basic_implicit_key.graphql.snap new file mode 100644 index 0000000000..b173e82556 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@keys_and_entities__valid__basic_implicit_key.graphql.snap @@ -0,0 +1,6 @@ +--- +source: apollo-federation/src/sources/connect/validation/mod.rs +expression: "format!(\"{:#?}\", errors)" +input_file: apollo-federation/src/sources/connect/validation/test_data/keys_and_entities/valid/basic_implicit_key.graphql +--- +[] diff --git a/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@keys_and_entities__valid__entity_connector_matches_non_resolvable_key.graphql.snap b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@keys_and_entities__valid__entity_connector_matches_non_resolvable_key.graphql.snap new file mode 100644 index 0000000000..81f22c2b28 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@keys_and_entities__valid__entity_connector_matches_non_resolvable_key.graphql.snap @@ -0,0 +1,6 @@ +--- +source: apollo-federation/src/sources/connect/validation/mod.rs +expression: "format!(\"{:#?}\", errors)" +input_file: apollo-federation/src/sources/connect/validation/test_data/keys_and_entities/valid/entity_connector_matches_non_resolvable_key.graphql +--- +[] diff --git a/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@keys_and_entities__valid__entity_connector_matches_one_of_multiple_keys.graphql.snap b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@keys_and_entities__valid__entity_connector_matches_one_of_multiple_keys.graphql.snap new file mode 100644 index 0000000000..20be5d11a9 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@keys_and_entities__valid__entity_connector_matches_one_of_multiple_keys.graphql.snap @@ -0,0 +1,6 @@ +--- +source: apollo-federation/src/sources/connect/validation/mod.rs +expression: "format!(\"{:#?}\", errors)" +input_file: apollo-federation/src/sources/connect/validation/test_data/keys_and_entities/valid/entity_connector_matches_one_of_multiple_keys.graphql +--- +[] diff --git a/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@keys_and_entities__valid__entity_field_counts_as_key_resolver.graphql.snap b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@keys_and_entities__valid__entity_field_counts_as_key_resolver.graphql.snap new file mode 100644 index 0000000000..c6cda766c4 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@keys_and_entities__valid__entity_field_counts_as_key_resolver.graphql.snap @@ -0,0 +1,12 @@ +--- +source: apollo-federation/src/sources/connect/validation/mod.rs +expression: "format!(\"{:#?}\", errors)" +input_file: apollo-federation/src/sources/connect/validation/test_data/keys_and_entities/valid/entity_field_counts_as_key_resolver.graphql +--- +[ + Message { + code: GraphQLError, + message: "Variables used in connector (`$this`) for `Product` cannot be used to create a valid `@key` directive.", + locations: [], + }, +] diff --git a/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@keys_and_entities__valid__mix_explicit_and_implicit.graphql.snap b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@keys_and_entities__valid__mix_explicit_and_implicit.graphql.snap new file mode 100644 index 0000000000..a454ba231e --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@keys_and_entities__valid__mix_explicit_and_implicit.graphql.snap @@ -0,0 +1,6 @@ +--- +source: apollo-federation/src/sources/connect/validation/mod.rs +expression: "format!(\"{:#?}\", errors)" +input_file: apollo-federation/src/sources/connect/validation/test_data/keys_and_entities/valid/mix_explicit_and_implicit.graphql +--- +[] diff --git a/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@keys_and_entities__valid__multiple_entity_connectors_for_multiple_keys.graphql.snap b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@keys_and_entities__valid__multiple_entity_connectors_for_multiple_keys.graphql.snap new file mode 100644 index 0000000000..00b0d11a1c --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@keys_and_entities__valid__multiple_entity_connectors_for_multiple_keys.graphql.snap @@ -0,0 +1,6 @@ +--- +source: apollo-federation/src/sources/connect/validation/mod.rs +expression: "format!(\"{:#?}\", errors)" +input_file: apollo-federation/src/sources/connect/validation/test_data/keys_and_entities/valid/multiple_entity_connectors_for_multiple_keys.graphql +--- +[] diff --git a/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@missing_connect_on_mutation_field.graphql.snap b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@missing_connect_on_mutation_field.graphql.snap new file mode 100644 index 0000000000..87bb6114f5 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@missing_connect_on_mutation_field.graphql.snap @@ -0,0 +1,14 @@ +--- +source: apollo-federation/src/sources/connect/validation/mod.rs +expression: "format!(\"{:#?}\", errors)" +input_file: apollo-federation/src/sources/connect/validation/test_data/missing_connect_on_mutation_field.graphql +--- +[ + Message { + code: MutationFieldMissingConnect, + message: "The field `Mutation.setMessage` has no `@connect` directive.", + locations: [ + 9:3..9:38, + ], + }, +] diff --git a/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@missing_connect_on_query_field.graphql.snap b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@missing_connect_on_query_field.graphql.snap new file mode 100644 index 0000000000..b2ab1318b8 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@missing_connect_on_query_field.graphql.snap @@ -0,0 +1,14 @@ +--- +source: apollo-federation/src/sources/connect/validation/mod.rs +expression: "format!(\"{:#?}\", errors)" +input_file: apollo-federation/src/sources/connect/validation/test_data/missing_connect_on_query_field.graphql +--- +[ + Message { + code: QueryFieldMissingConnect, + message: "The field `Query.resources` has no `@connect` directive.", + locations: [ + 9:5..9:26, + ], + }, +] diff --git a/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@missing_http_method_on_connect.graphql.snap b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@missing_http_method_on_connect.graphql.snap new file mode 100644 index 0000000000..5d5666d63d --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@missing_http_method_on_connect.graphql.snap @@ -0,0 +1,14 @@ +--- +source: apollo-federation/src/sources/connect/validation/mod.rs +expression: "format!(\"{:#?}\", errors)" +input_file: apollo-federation/src/sources/connect/validation/test_data/missing_http_method_on_connect.graphql +--- +[ + Message { + code: MissingHttpMethod, + message: "`@connect(http:)` on `Query.resources` must specify an HTTP method.", + locations: [ + 9:54..9:56, + ], + }, +] diff --git a/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@missing_source_import.graphql.snap b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@missing_source_import.graphql.snap new file mode 100644 index 0000000000..f175ee6424 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@missing_source_import.graphql.snap @@ -0,0 +1,21 @@ +--- +source: apollo-federation/src/sources/connect/validation/mod.rs +expression: "format!(\"{:#?}\", errors)" +input_file: apollo-federation/src/sources/connect/validation/test_data/missing_source_import.graphql +--- +[ + Message { + code: NoSourcesDefined, + message: "`@connect(source: \"v2\")` on `Query.resources` specifies a source, but none are defined. Try adding `@connect__source(name: \"v2\")` to the schema.", + locations: [ + 7:14..7:26, + ], + }, + Message { + code: NoSourceImport, + message: "The `@source` directive is not imported. Try adding `@source` to `import` for `@link(url: \"https://specs.apollo.dev/connect\")`", + locations: [ + 2:3..2:76, + ], + }, +] diff --git a/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@multiple_errors.graphql.snap b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@multiple_errors.graphql.snap new file mode 100644 index 0000000000..9525cee5a6 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@multiple_errors.graphql.snap @@ -0,0 +1,21 @@ +--- +source: apollo-federation/src/sources/connect/validation/mod.rs +expression: "format!(\"{:#?}\", errors)" +input_file: apollo-federation/src/sources/connect/validation/test_data/multiple_errors.graphql +--- +[ + Message { + code: InvalidUrlScheme, + message: "The value \"ftp://127.0.0.1\" for `@source(baseURL:)` must be http or https, got ftp.", + locations: [ + 6:44..6:47, + ], + }, + Message { + code: InvalidSourceName, + message: "`@source(name: \"u$ers\")` is invalid; all source names must follow pattern '^[a-zA-Z][a-zA-Z0-9_-]{0,63}$", + locations: [ + 6:17..6:24, + ], + }, +] diff --git a/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@multiple_http_methods_on_connect.graphql.snap b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@multiple_http_methods_on_connect.graphql.snap new file mode 100644 index 0000000000..dfe9b0019f --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@multiple_http_methods_on_connect.graphql.snap @@ -0,0 +1,15 @@ +--- +source: apollo-federation/src/sources/connect/validation/mod.rs +expression: "format!(\"{:#?}\", errors)" +input_file: apollo-federation/src/sources/connect/validation/test_data/multiple_http_methods_on_connect.graphql +--- +[ + Message { + code: MultipleHttpMethods, + message: "`@connect(http:)` on `Query.resources` cannot specify more than one HTTP method.", + locations: [ + 12:20..12:32, + 12:42..12:53, + ], + }, +] diff --git a/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@non_root_circular_reference.graphql.snap b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@non_root_circular_reference.graphql.snap new file mode 100644 index 0000000000..6589606385 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@non_root_circular_reference.graphql.snap @@ -0,0 +1,16 @@ +--- +source: apollo-federation/src/sources/connect/validation/mod.rs +expression: "format!(\"{:#?}\", errors)" +input_file: apollo-federation/src/sources/connect/validation/test_data/non_root_circular_reference.graphql +--- +[ + Message { + code: CircularReference, + message: "Circular reference detected in `@connect(selection:)` on `Query.user`: type `Book` appears more than once in `Query.user.favoriteBooks.author.books`. For more information, see https://go.apollo.dev/connectors/limitations#circular-references", + locations: [ + 19:15..21:16, + 31:3..31:24, + 41:3..41:16, + ], + }, +] diff --git a/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@renamed_connect_directive.graphql.snap b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@renamed_connect_directive.graphql.snap new file mode 100644 index 0000000000..edc8c3b831 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@renamed_connect_directive.graphql.snap @@ -0,0 +1,14 @@ +--- +source: apollo-federation/src/sources/connect/validation/mod.rs +expression: "format!(\"{:#?}\", errors)" +input_file: apollo-federation/src/sources/connect/validation/test_data/renamed_connect_directive.graphql +--- +[ + Message { + code: QueryFieldMissingConnect, + message: "The field `Query.resources` has no `@data` directive.", + locations: [ + 9:5..9:26, + ], + }, +] diff --git a/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@select_nonexistant_group.graphql.snap b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@select_nonexistant_group.graphql.snap new file mode 100644 index 0000000000..b2ef98cb34 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@select_nonexistant_group.graphql.snap @@ -0,0 +1,14 @@ +--- +source: apollo-federation/src/sources/connect/validation/mod.rs +expression: "format!(\"{:#?}\", errors)" +input_file: apollo-federation/src/sources/connect/validation/test_data/select_nonexistant_group.graphql +--- +[ + Message { + code: SelectedFieldNotFound, + message: "`@connect(selection:)` on `Query.me` contains field `group`, which does not exist on `User`.", + locations: [ + 8:81..8:93, + ], + }, +] diff --git a/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@source_directive_rename.graphql.snap b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@source_directive_rename.graphql.snap new file mode 100644 index 0000000000..03907d75e7 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@source_directive_rename.graphql.snap @@ -0,0 +1,14 @@ +--- +source: apollo-federation/src/sources/connect/validation/mod.rs +expression: "format!(\"{:#?}\", errors)" +input_file: apollo-federation/src/sources/connect/validation/test_data/source_directive_rename.graphql +--- +[ + Message { + code: InvalidUrl, + message: "The value \"blahblahblah\" for `@api(baseURL:)` is not a valid URL: relative URL without a base.", + locations: [ + 6:40..6:54, + ], + }, +] diff --git a/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@subscriptions_with_connectors.graphql.snap b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@subscriptions_with_connectors.graphql.snap new file mode 100644 index 0000000000..1c7b95fec6 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@subscriptions_with_connectors.graphql.snap @@ -0,0 +1,14 @@ +--- +source: apollo-federation/src/sources/connect/validation/mod.rs +expression: "format!(\"{:#?}\", errors)" +input_file: apollo-federation/src/sources/connect/validation/test_data/subscriptions_with_connectors.graphql +--- +[ + Message { + code: SubscriptionInConnectors, + message: "A subscription root type is not supported when using `@connect`.", + locations: [ + 13:1..15:2, + ], + }, +] diff --git a/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@uri_templates__absolute_connect_url_with_source.graphql.snap b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@uri_templates__absolute_connect_url_with_source.graphql.snap new file mode 100644 index 0000000000..bb6c499a4b --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@uri_templates__absolute_connect_url_with_source.graphql.snap @@ -0,0 +1,14 @@ +--- +source: apollo-federation/src/sources/connect/validation/mod.rs +expression: "format!(\"{:#?}\", errors)" +input_file: apollo-federation/src/sources/connect/validation/test_data/uri_templates/absolute_connect_url_with_source.graphql +--- +[ + Message { + code: AbsoluteConnectUrlWithSource, + message: "`GET` in `@connect(http:)` on `Query.resources` contains the absolute URL \"http://127.0.0.1/resources\" while also specifying a `source`. Either remove the `source` argument or change the URL to a path.", + locations: [ + 12:20..12:48, + ], + }, +] diff --git a/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@uri_templates__invalid-jsonselection-in-expression.graphql.snap b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@uri_templates__invalid-jsonselection-in-expression.graphql.snap new file mode 100644 index 0000000000..f0b73656a2 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@uri_templates__invalid-jsonselection-in-expression.graphql.snap @@ -0,0 +1,14 @@ +--- +source: apollo-federation/src/sources/connect/validation/mod.rs +expression: "format!(\"{:#?}\", result.errors)" +input_file: apollo-federation/src/sources/connect/validation/test_data/uri_templates/invalid-jsonselection-in-expression.graphql +--- +[ + Message { + code: InvalidUrl, + message: "In `GET` in `@connect(http:)` on `Query.resources`: nom::error::ErrorKind::Eof", + locations: [ + 12:27..12:28, + ], + }, +] diff --git a/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@uri_templates__invalid-path-parameter.graphql.snap b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@uri_templates__invalid-path-parameter.graphql.snap new file mode 100644 index 0000000000..d773761cac --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@uri_templates__invalid-path-parameter.graphql.snap @@ -0,0 +1,14 @@ +--- +source: apollo-federation/src/sources/connect/validation/mod.rs +expression: "format!(\"{:#?}\", errors)" +input_file: apollo-federation/src/sources/connect/validation/test_data/uri_templates/invalid-path-parameter.graphql +--- +[ + Message { + code: InvalidUrl, + message: "In `GET` in `@connect(http:)` on `Query.resources`: Unknown variable", + locations: [ + 12:23..12:28, + ], + }, +] diff --git a/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@uri_templates__invalid_connect_url.graphql.snap b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@uri_templates__invalid_connect_url.graphql.snap new file mode 100644 index 0000000000..34fde9a757 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@uri_templates__invalid_connect_url.graphql.snap @@ -0,0 +1,14 @@ +--- +source: apollo-federation/src/sources/connect/validation/mod.rs +expression: "format!(\"{:#?}\", errors)" +input_file: apollo-federation/src/sources/connect/validation/test_data/uri_templates/invalid_connect_url.graphql +--- +[ + Message { + code: RelativeConnectUrlWithoutSource, + message: "`GET` in `@connect(http:)` on `Query.resources` specifies the relative URL \"127.0.0.1\", but no `source` is defined. Either use an absolute URL including scheme (e.g. https://), or add a `@connect__source`.", + locations: [ + 5:47..5:58, + ], + }, +] diff --git a/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@uri_templates__invalid_connect_url_scheme.graphql.snap b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@uri_templates__invalid_connect_url_scheme.graphql.snap new file mode 100644 index 0000000000..2dccd7c006 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@uri_templates__invalid_connect_url_scheme.graphql.snap @@ -0,0 +1,14 @@ +--- +source: apollo-federation/src/sources/connect/validation/mod.rs +expression: "format!(\"{:#?}\", errors)" +input_file: apollo-federation/src/sources/connect/validation/test_data/uri_templates/invalid_connect_url_scheme.graphql +--- +[ + Message { + code: InvalidUrlScheme, + message: "The value \"file://data.json\" for `GET` in `@connect(http:)` on `Query.resources` must be http or https, got file.", + locations: [ + 6:28..6:32, + ], + }, +] diff --git a/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@uri_templates__invalid_namespace_in_url_template_variables.graphql.snap b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@uri_templates__invalid_namespace_in_url_template_variables.graphql.snap new file mode 100644 index 0000000000..aecca0fd8a --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@uri_templates__invalid_namespace_in_url_template_variables.graphql.snap @@ -0,0 +1,28 @@ +--- +source: apollo-federation/src/sources/connect/validation/mod.rs +expression: "format!(\"{:#?}\", errors)" +input_file: apollo-federation/src/sources/connect/validation/test_data/uri_templates/invalid_namespace_in_url_template_variables.graphql +--- +[ + Message { + code: InvalidUrl, + message: "In `GET` in `@connect(http:)` on `Query.unknown`: Unknown variable", + locations: [ + 11:31..11:39, + ], + }, + Message { + code: InvalidUrl, + message: "In `GET` in `@connect(http:)` on `Query.invalid`: $status is not valid here, must be one of $args, $config, $context", + locations: [ + 18:31..18:42, + ], + }, + Message { + code: InvalidUrl, + message: "In `GET` in `@connect(http:)` on `Query.nodollar`: `config.bar` must start with an argument name, like `$this` or `$args`", + locations: [ + 25:31..25:41, + ], + }, +] diff --git a/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@uri_templates__invalid_nested_paths_in_url_template_variables.graphql.snap b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@uri_templates__invalid_nested_paths_in_url_template_variables.graphql.snap new file mode 100644 index 0000000000..a4ffaf602b --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@uri_templates__invalid_nested_paths_in_url_template_variables.graphql.snap @@ -0,0 +1,35 @@ +--- +source: apollo-federation/src/sources/connect/validation/mod.rs +expression: "format!(\"{:#?}\", errors)" +input_file: apollo-federation/src/sources/connect/validation/test_data/uri_templates/invalid_nested_paths_in_url_template_variables.graphql +--- +[ + Message { + code: InvalidUrl, + message: "In `GET` in `@connect(http:)` on `Query.scalar`: `$args.scalar` doesn't have a field named `blah`", + locations: [ + 10:49..10:66, + ], + }, + Message { + code: InvalidUrl, + message: "In `GET` in `@connect(http:)` on `Query.object`: `InputObject` doesn't have a field named `fieldThatDoesntExist`", + locations: [ + 15:49..15:81, + ], + }, + Message { + code: InvalidUrl, + message: "In `GET` in `@connect(http:)` on `Query.enum`: `Enum` doesn't have a field named `cantHaveFields`", + locations: [ + 20:47..20:72, + ], + }, + Message { + code: InvalidUrl, + message: "In `GET` in `@connect(http:)` on `Object.newField`: `$this` doesn't have a field named `fieldThatDoesntExist`", + locations: [ + 29:49..29:75, + ], + }, +] diff --git a/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@uri_templates__invalid_source_url.graphql.snap b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@uri_templates__invalid_source_url.graphql.snap new file mode 100644 index 0000000000..5bac1f0097 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@uri_templates__invalid_source_url.graphql.snap @@ -0,0 +1,14 @@ +--- +source: apollo-federation/src/sources/connect/validation/mod.rs +expression: "format!(\"{:#?}\", errors)" +input_file: apollo-federation/src/sources/connect/validation/test_data/uri_templates/invalid_source_url.graphql +--- +[ + Message { + code: InvalidUrl, + message: "The value \"127.0.0.1\" for `@source(baseURL:)` is not a valid URL: relative URL without a base.", + locations: [ + 6:40..6:51, + ], + }, +] diff --git a/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@uri_templates__invalid_source_url_scheme.graphql.snap b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@uri_templates__invalid_source_url_scheme.graphql.snap new file mode 100644 index 0000000000..0bf6b0d5dc --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@uri_templates__invalid_source_url_scheme.graphql.snap @@ -0,0 +1,14 @@ +--- +source: apollo-federation/src/sources/connect/validation/mod.rs +expression: "format!(\"{:#?}\", errors)" +input_file: apollo-federation/src/sources/connect/validation/test_data/uri_templates/invalid_source_url_scheme.graphql +--- +[ + Message { + code: InvalidUrlScheme, + message: "The value \"file://data.json\" for `@source(baseURL:)` must be http or https, got file.", + locations: [ + 6:41..6:45, + ], + }, +] diff --git a/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@uri_templates__invalid_types.graphql.snap b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@uri_templates__invalid_types.graphql.snap new file mode 100644 index 0000000000..1d85a2579f --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@uri_templates__invalid_types.graphql.snap @@ -0,0 +1,35 @@ +--- +source: apollo-federation/src/sources/connect/validation/mod.rs +expression: "format!(\"{:#?}\", errors)" +input_file: apollo-federation/src/sources/connect/validation/test_data/uri_templates/invalid_types.graphql +--- +[ + Message { + code: InvalidUrl, + message: "In `GET` in `@connect(http:)` on `Query.argIsArray`: array values aren't valid here", + locations: [ + 10:41..10:50, + ], + }, + Message { + code: InvalidUrl, + message: "In `GET` in `@connect(http:)` on `Query.argIsObject`: object values aren't valid here", + locations: [ + 16:41..16:50, + ], + }, + Message { + code: InvalidUrl, + message: "In `GET` in `@connect(http:)` on `This.thisIsArray`: array values aren't valid here", + locations: [ + 25:41..25:54, + ], + }, + Message { + code: InvalidUrl, + message: "In `GET` in `@connect(http:)` on `This.requiresAnObject`: object values aren't valid here", + locations: [ + 31:45..31:59, + ], + }, +] diff --git a/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@uri_templates__relative_connect_url_without_source.graphql.snap b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@uri_templates__relative_connect_url_without_source.graphql.snap new file mode 100644 index 0000000000..5bc57a1632 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@uri_templates__relative_connect_url_without_source.graphql.snap @@ -0,0 +1,14 @@ +--- +source: apollo-federation/src/sources/connect/validation/mod.rs +expression: "format!(\"{:#?}\", errors)" +input_file: apollo-federation/src/sources/connect/validation/test_data/uri_templates/relative_connect_url_without_source.graphql +--- +[ + Message { + code: RelativeConnectUrlWithoutSource, + message: "`GET` in `@connect(http:)` on `Query.resources` specifies the relative URL \"/resources\", but no `source` is defined. Either use an absolute URL including scheme (e.g. https://), or add a `@connect__source`.", + locations: [ + 5:47..5:59, + ], + }, +] diff --git a/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@uri_templates__this_on_root_types.graphql.snap b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@uri_templates__this_on_root_types.graphql.snap new file mode 100644 index 0000000000..27bfc422dd --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@uri_templates__this_on_root_types.graphql.snap @@ -0,0 +1,21 @@ +--- +source: apollo-federation/src/sources/connect/validation/mod.rs +expression: "format!(\"{:#?}\", errors)" +input_file: apollo-federation/src/sources/connect/validation/test_data/uri_templates/this_on_root_types.graphql +--- +[ + Message { + code: InvalidUrl, + message: "In `GET` in `@connect(http:)` on `Query.requiresThis`: $this is not valid here, must be one of $args, $config, $context", + locations: [ + 11:39..11:51, + ], + }, + Message { + code: InvalidUrl, + message: "In `GET` in `@connect(http:)` on `Mutation.requiresThis`: $this is not valid here, must be one of $args, $config, $context", + locations: [ + 20:37..20:49, + ], + }, +] diff --git a/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@uri_templates__undefined_arg_in_url_template.graphql.snap b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@uri_templates__undefined_arg_in_url_template.graphql.snap new file mode 100644 index 0000000000..5fc378f838 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@uri_templates__undefined_arg_in_url_template.graphql.snap @@ -0,0 +1,21 @@ +--- +source: apollo-federation/src/sources/connect/validation/mod.rs +expression: "format!(\"{:#?}\", errors)" +input_file: apollo-federation/src/sources/connect/validation/test_data/uri_templates/undefined_arg_in_url_template.graphql +--- +[ + Message { + code: InvalidUrl, + message: "In `GET` in `@connect(http:)` on `Query.resources`: `$args` doesn't have a field named `blah`", + locations: [ + 10:39..10:49, + ], + }, + Message { + code: InvalidUrl, + message: "In `GET` in `@connect(http:)` on `Query.resources`: `$args` doesn't have a field named `something`", + locations: [ + 10:62..10:77, + ], + }, +] diff --git a/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@uri_templates__undefined_this_in_url_template.graphql.snap b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@uri_templates__undefined_this_in_url_template.graphql.snap new file mode 100644 index 0000000000..e99cae2ab5 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@uri_templates__undefined_this_in_url_template.graphql.snap @@ -0,0 +1,21 @@ +--- +source: apollo-federation/src/sources/connect/validation/mod.rs +expression: "format!(\"{:#?}\", errors)" +input_file: apollo-federation/src/sources/connect/validation/test_data/uri_templates/undefined_this_in_url_template.graphql +--- +[ + Message { + code: InvalidUrl, + message: "In `GET` in `@connect(http:)` on `Something.resources`: `$this` doesn't have a field named `blah`", + locations: [ + 19:39..19:49, + ], + }, + Message { + code: InvalidUrl, + message: "In `GET` in `@connect(http:)` on `Something.resources`: `$this` doesn't have a field named `something`", + locations: [ + 19:62..19:77, + ], + }, +] diff --git a/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@uri_templates__valid_connect_absolute_url.graphql.snap b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@uri_templates__valid_connect_absolute_url.graphql.snap new file mode 100644 index 0000000000..800774ec10 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@uri_templates__valid_connect_absolute_url.graphql.snap @@ -0,0 +1,6 @@ +--- +source: apollo-federation/src/sources/connect/validation/mod.rs +expression: "format!(\"{:#?}\", errors)" +input_file: apollo-federation/src/sources/connect/validation/test_data/uri_templates/valid_connect_absolute_url.graphql +--- +[] diff --git a/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@valid_no_connect_on_resolvable_key_field.graphql.snap b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@valid_no_connect_on_resolvable_key_field.graphql.snap new file mode 100644 index 0000000000..3818735b23 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@valid_no_connect_on_resolvable_key_field.graphql.snap @@ -0,0 +1,6 @@ +--- +source: apollo-federation/src/sources/connect/validation/mod.rs +expression: "format!(\"{:#?}\", errors)" +input_file: apollo-federation/src/sources/connect/validation/test_data/valid_no_connect_on_resolvable_key_field.graphql +--- +[] diff --git a/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@valid_selection_with_escapes.graphql.snap b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@valid_selection_with_escapes.graphql.snap new file mode 100644 index 0000000000..f303f11f84 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/snapshots/validation_tests@valid_selection_with_escapes.graphql.snap @@ -0,0 +1,21 @@ +--- +source: apollo-federation/src/sources/connect/validation/mod.rs +expression: "format!(\"{:#?}\", errors)" +input_file: apollo-federation/src/sources/connect/validation/test_data/valid_selection_with_escapes.graphql +--- +[ + Message { + code: InvalidJsonSelection, + message: "`@connect(selection:)` on `Query.block` is not a valid JSONSelection: Path selection . must be followed by key (identifier or quoted string literal): .", + locations: [ + 16:30..16:31, + ], + }, + Message { + code: InvalidJsonSelection, + message: "`@connect(selection:)` on `Query.standard` is not a valid JSONSelection: Path selection . must be followed by key (identifier or quoted string literal): .", + locations: [ + 22:95..22:96, + ], + }, +] diff --git a/apollo-federation/src/sources/connect/validation/source_name.rs b/apollo-federation/src/sources/connect/validation/source_name.rs new file mode 100644 index 0000000000..863c3955b0 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/source_name.rs @@ -0,0 +1,199 @@ +use std::fmt::Display; + +use apollo_compiler::ast::Argument; +use apollo_compiler::ast::Value; +use apollo_compiler::parser::SourceMap; +use apollo_compiler::schema::Component; +use apollo_compiler::schema::Directive; +use apollo_compiler::Name; +use apollo_compiler::Node; + +use super::coordinates::connect_directive_name_coordinate; +use super::coordinates::source_name_argument_coordinate; +use super::coordinates::source_name_value_coordinate; +use super::Code; +use super::DirectiveName; +use super::Message; +use crate::sources::connect::spec::schema::SOURCE_NAME_ARGUMENT_NAME; +use crate::sources::connect::validation::graphql::SchemaInfo; + +// Adding a module to allow changing clippy lints for the regex +#[allow(clippy::expect_used)] +mod patterns { + use once_cell::sync::Lazy; + use regex::Regex; + + /// This is the same regular expression used for subgraph names + pub(super) static SOURCE_NAME_REGEX: Lazy = Lazy::new(|| { + Regex::new(r"^[a-zA-Z][a-zA-Z0-9_-]{0,63}$") + .expect("this regex to check source names is valid") + }); +} + +pub(super) fn validate_source_name_arg( + field_name: &Name, + object_name: &Name, + source_name: &Node, + source_names: &[SourceName], + schema: &SchemaInfo, +) -> Vec { + let mut messages = vec![]; + + if source_names.iter().all(|name| name != &source_name.value) { + // TODO: Pick a suggestion that's not just the first defined source + let qualified_directive = connect_directive_name_coordinate( + schema.connect_directive_name, + &source_name.value, + object_name, + field_name, + ); + if let Some(first_source_name) = source_names.first() { + messages.push(Message { + code: Code::SourceNameMismatch, + message: format!( + "{qualified_directive} does not match any defined sources. Did you mean {first_source_name}?", + ), + locations: source_name.line_column_range(&schema.sources) + .into_iter() + .collect(), + }); + } else { + messages.push(Message { + code: Code::NoSourcesDefined, + message: format!( + "{qualified_directive} specifies a source, but none are defined. Try adding {coordinate} to the schema.", + coordinate = source_name_value_coordinate(schema.source_directive_name, &source_name.value), + ), + locations: source_name.line_column_range(&schema.sources) + .into_iter() + .collect(), + }); + } + } + + messages +} + +/// The `name` argument of a `@source` directive. +#[derive(Clone, Debug)] +pub(super) enum SourceName { + /// A perfectly reasonable source name. + Valid { + value: Node, + directive_name: DirectiveName, + }, + /// Contains invalid characters, so it will have to be renamed. This means certain checks + /// (like uniqueness) should be skipped. However, we have _a_ name, so _other_ checks on the + /// `@source` directive can continue. + Invalid { + value: Node, + directive_name: DirectiveName, + }, + /// The name was an empty string + Empty { + directive_name: DirectiveName, + value: Node, + }, + /// No `name` argument was defined + Missing { + directive_name: DirectiveName, + ast_node: Node, + }, +} + +impl SourceName { + pub(crate) fn from_directive(directive: &Component) -> Self { + let directive_name = directive.name.clone(); + let Some(arg) = directive + .arguments + .iter() + .find(|arg| arg.name == SOURCE_NAME_ARGUMENT_NAME) + else { + return Self::Missing { + directive_name, + ast_node: directive.node.clone(), + }; + }; + let Some(str_value) = arg.value.as_str() else { + return Self::Invalid { + value: arg.value.clone(), + directive_name, + }; + }; + if str_value.is_empty() { + Self::Empty { + directive_name, + value: arg.value.clone(), + } + } else if patterns::SOURCE_NAME_REGEX.is_match(str_value) { + Self::Valid { + value: arg.value.clone(), + directive_name, + } + } else { + Self::Invalid { + value: arg.value.clone(), + directive_name, + } + } + } + + pub(crate) fn into_value_or_error(self, sources: &SourceMap) -> Result, Message> { + match self { + Self::Valid { value, ..} => Ok(value), + Self::Invalid { + value, + directive_name, + } => Err(Message { + // This message is the same as Studio when trying to publish a subgraph with an invalid name + message: format!("{coordinate} is invalid; all source names must follow pattern '^[a-zA-Z][a-zA-Z0-9_-]{{0,63}}$", coordinate = source_name_value_coordinate(&directive_name, &value)), + code: Code::InvalidSourceName, + locations: value.line_column_range(sources).into_iter().collect(), + }), + Self::Empty { directive_name, value } => { + Err(Message { + code: Code::EmptySourceName, + message: format!("The value for {coordinate} can't be empty.", coordinate = source_name_argument_coordinate(&directive_name)) , + locations: value.line_column_range(sources).into_iter().collect(), + }) + } + Self::Missing { directive_name, ast_node } => Err(Message { + code: Code::GraphQLError, + message: format!("The {coordinate} argument is required.", coordinate = source_name_argument_coordinate(&directive_name)), + locations: ast_node.line_column_range(sources).into_iter().collect() + }), + } + } +} + +impl Display for SourceName { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Self::Valid { + value, + directive_name, + } + | Self::Invalid { + value, + directive_name, + } => write!( + f, + "`@{directive_name}({SOURCE_NAME_ARGUMENT_NAME}: {value})`" + ), + Self::Empty { directive_name, .. } | Self::Missing { directive_name, .. } => { + write!(f, "unnamed `@{directive_name}`") + } + } + } +} + +impl PartialEq> for SourceName { + fn eq(&self, other: &Node) -> bool { + match self { + Self::Valid { value, .. } | Self::Invalid { value, .. } => value == other, + Self::Empty { .. } | Self::Missing { .. } => { + other.as_str().unwrap_or_default().is_empty() + } + } + } +} diff --git a/apollo-federation/src/sources/connect/validation/test_data/all_fields_selected.graphql b/apollo-federation/src/sources/connect/validation/test_data/all_fields_selected.graphql new file mode 100644 index 0000000000..7cb8035eb4 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/test_data/all_fields_selected.graphql @@ -0,0 +1,73 @@ +extend schema + @link( + url: "https://specs.apollo.dev/federation/v2.10" + import: ["@key", "@external", "@requires"] + ) + @link(url: "https://specs.apollo.dev/connect/v0.1", import: ["@connect"]) + +type Query { + ts: [T] + @connect( + http: { GET: "http://test/ts" } + selection: """ + id + child { + id + } + wrapped: { + id + } + unwrapped: foo.bar + """ + ) + @connect( + http: { GET: "http://test/v2/ts" } + selection: """ + id + secondUsed + """ + ) +} + +type T @key(fields: "id") { + id: ID! + unselected: String! + child: C + wrapped: D + unwrapped: String! + external: External @external + external2: External2 @external + computed: String! + @requires(fields: "external") + @connect( + http: { + GET: "http://test/computed?id={$this.id}&external={$this.external.id}&external2={$this.external2.id}" + } + selection: "$" + ) + + secondUnused: String + secondUsed: String +} + +type C { + id: ID! + unselected: String +} + +type D { + id: ID! + unselected: String +} + +type Unused { + unselected: ID! +} + +type External { + id: ID! @external +} + +type External2 @external { + id: ID! +} diff --git a/apollo-federation/src/sources/connect/validation/test_data/circular_reference.graphql b/apollo-federation/src/sources/connect/validation/test_data/circular_reference.graphql new file mode 100644 index 0000000000..fb1338f4e7 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/test_data/circular_reference.graphql @@ -0,0 +1,15 @@ +extend schema +@link( + url: "https://specs.apollo.dev/connect/v0.1" + import: ["@connect"] +) + +type Query { + me: User + @connect(http: {GET: "http://127.0.0.1/me"}, selection: "id friends {id}") +} + +type User { + id: ID! + friends: [User!]! +} diff --git a/apollo-federation/src/sources/connect/validation/test_data/circular_reference_2.graphql b/apollo-federation/src/sources/connect/validation/test_data/circular_reference_2.graphql new file mode 100644 index 0000000000..7abee48bac --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/test_data/circular_reference_2.graphql @@ -0,0 +1,26 @@ +extend schema + @link(url: "https://specs.apollo.dev/connect/v0.1", import: ["@connect"]) + +type Query { + track(id: ID!): Track + @connect( + http: { GET: "http://track/{$args.id}" } + selection: "id" + entity: true + ) +} + +type Track { + id: ID! + modules: [Module] + @connect( + http: { GET: "http://track/{$this.id}/modules" } + selection: "id title track: { id: trackId }" + ) +} + +type Module { + id: ID! + title: String + track: Track +} diff --git a/apollo-federation/src/sources/connect/validation/test_data/circular_reference_3.graphql b/apollo-federation/src/sources/connect/validation/test_data/circular_reference_3.graphql new file mode 100644 index 0000000000..47fe256290 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/test_data/circular_reference_3.graphql @@ -0,0 +1,16 @@ +extend schema + @link(url: "https://specs.apollo.dev/connect/v0.1", import: ["@connect"]) + +type Query { + me: User @connect(http: { GET: "http://127.0.0.1/me" }, selection: "id name") +} + +type User { + id: ID! + name: String + friends: [User!]! # this can't ever work because `{ me { friends { friends { ... } } } }` will always fail + @connect( + http: { GET: "http://127.0.0.1/users/{$this.id}/friends" } + selection: "id name" + ) +} diff --git a/apollo-federation/src/sources/connect/validation/test_data/connect_source_name_mismatch.graphql b/apollo-federation/src/sources/connect/validation/test_data/connect_source_name_mismatch.graphql new file mode 100644 index 0000000000..797aa73101 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/test_data/connect_source_name_mismatch.graphql @@ -0,0 +1,11 @@ +extend schema + @link( + url: "https://specs.apollo.dev/connect/v0.1" + import: ["@connect", "@source"] + ) + @source(name: "v2", http: { baseURL: "http://127.0.0.1" }) + +type Query { + resources: [String!]! + @connect(source: "v1", http: { GET: "/resources" }, selection: "$") +} diff --git a/apollo-federation/src/sources/connect/validation/test_data/connect_source_undefined.graphql b/apollo-federation/src/sources/connect/validation/test_data/connect_source_undefined.graphql new file mode 100644 index 0000000000..1957c26bb4 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/test_data/connect_source_undefined.graphql @@ -0,0 +1,10 @@ +extend schema + @link( + url: "https://specs.apollo.dev/connect/v0.1" + import: ["@connect", "@source"] + ) + +type Query { + resources: [String!]! + @connect(source: "v1", http: { GET: "/resources" }, selection: "$") +} diff --git a/apollo-federation/src/sources/connect/validation/test_data/denest_scalars.graphql b/apollo-federation/src/sources/connect/validation/test_data/denest_scalars.graphql new file mode 100644 index 0000000000..0a734eb285 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/test_data/denest_scalars.graphql @@ -0,0 +1,31 @@ +extend schema + @link(url: "https://specs.apollo.dev/connect/v0.1", import: ["@connect"]) + +type Query { + me: User + @connect( + http: { GET: "http://127.0.0.1/something" } + selection: """ + id + $.name { + firstName: first + lastName: last + } + $.address { + street + } + """ + ) +} + +type User { + id: ID! + firstName: String + lastName: String + street: Street +} + +type Street { + number: Int + name: String +} diff --git a/apollo-federation/src/sources/connect/validation/test_data/denest_scalars2.graphql b/apollo-federation/src/sources/connect/validation/test_data/denest_scalars2.graphql new file mode 100644 index 0000000000..b6df406747 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/test_data/denest_scalars2.graphql @@ -0,0 +1,26 @@ +extend schema + @link(url: "https://specs.apollo.dev/connect/v0.1", import: ["@connect"]) + +type Query { + me: User + @connect( + http: { GET: "http://127.0.0.1/something" } + selection: """ + id + $.name { + firstName: first + lastName: last + } + $.address { + street { number name } + } + """ + ) +} + +type User { + id: ID! + firstName: String + lastName: String + street: String +} diff --git a/apollo-federation/src/sources/connect/validation/test_data/disallowed_abstract_types.graphql b/apollo-federation/src/sources/connect/validation/test_data/disallowed_abstract_types.graphql new file mode 100644 index 0000000000..03203d888d --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/test_data/disallowed_abstract_types.graphql @@ -0,0 +1,48 @@ +extend schema + @link(url: "https://specs.apollo.dev/connect/v0.1", import: ["@connect"]) + +type Query { + products: [Product] + @connect( + http: { GET: "http://127.0.0.1:8000/products" } + selection: """ + $.results { + id + title + author { name } + director { name } + } + """ + ) + search(title: String): [Media] + @connect(http: { GET: "http://127.0.0.1:8000/media" }, selection: "$") +} + +interface Product { + id: ID! +} + +union Media = Book | Film | Music + +type Book implements Product { + id: ID + title: String + author: Person +} + +type Film implements Product { + id: ID + title: String + director: Person +} + +type Music { + id: ID + title: String + singer: Person +} + +type Person { + id: ID + name: String +} diff --git a/apollo-federation/src/sources/connect/validation/test_data/disallowed_federation_imports.graphql b/apollo-federation/src/sources/connect/validation/test_data/disallowed_federation_imports.graphql new file mode 100644 index 0000000000..a42bdb2b55 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/test_data/disallowed_federation_imports.graphql @@ -0,0 +1,22 @@ +extend schema + @link( + url: "https://specs.apollo.dev/federation/v2.8" + import: [ + "@key" + "@context" + "@fromContext" + "@interfaceObject" + "@external" + "@requires" + "@provides" + ] + ) + @link( + url: "https://specs.apollo.dev/connect/v0.1" + import: ["@connect", "@source"] + ) + +type Query { + resources: [String!]! + @connect(http: { GET: "http://127.0.0.1" }, selection: "$") +} diff --git a/apollo-federation/src/sources/connect/validation/test_data/duplicate_source_name.graphql b/apollo-federation/src/sources/connect/validation/test_data/duplicate_source_name.graphql new file mode 100644 index 0000000000..4d5a99f08e --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/test_data/duplicate_source_name.graphql @@ -0,0 +1,12 @@ +extend schema + @link( + url: "https://specs.apollo.dev/connect/v0.1" + import: ["@connect", "@source"] + ) + @source(name: "v1", http: { baseURL: "http://127.0.0.1" }) + @source(name: "v1", http: { baseURL: "http://localhost" }) + +type Query { + resources: [String!]! + @connect(source: "v1", http: { GET: "/resources" }, selection: "$") +} diff --git a/apollo-federation/src/sources/connect/validation/test_data/empty_selection.graphql b/apollo-federation/src/sources/connect/validation/test_data/empty_selection.graphql new file mode 100644 index 0000000000..5d35acabcb --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/test_data/empty_selection.graphql @@ -0,0 +1,15 @@ +extend schema + @link( + url: "https://specs.apollo.dev/connect/v0.1" + import: ["@connect", "@source"] + ) + @source(name: "v2", http: { baseURL: "http://127.0.0.1" }) + +type Query { + resources: [String!]! + @connect( + source: "v2" + http: { POST: "/resources", body: " " } + selection: "# comment" + ) +} diff --git a/apollo-federation/src/sources/connect/validation/test_data/empty_source_name.graphql b/apollo-federation/src/sources/connect/validation/test_data/empty_source_name.graphql new file mode 100644 index 0000000000..2c6f9cb4e4 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/test_data/empty_source_name.graphql @@ -0,0 +1,11 @@ +extend schema + @link( + url: "https://specs.apollo.dev/connect/v0.1" + import: ["@connect", "@source"] + ) + @source(name: "", http: { baseURL: "http://127.0.0.1" }) + +type Query { + resources: [String!]! + @connect(source: "", http: { GET: "/resources" }, selection: "$") +} diff --git a/apollo-federation/src/sources/connect/validation/test_data/fields_with_arguments.graphql b/apollo-federation/src/sources/connect/validation/test_data/fields_with_arguments.graphql new file mode 100644 index 0000000000..5fc799baa6 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/test_data/fields_with_arguments.graphql @@ -0,0 +1,21 @@ +extend schema + @link(url: "https://specs.apollo.dev/federation/v2.10", import: ["@key"]) + @link(url: "https://specs.apollo.dev/connect/v0.1", import: ["@connect"]) + +type Query { + ts(first: Int): [T] + @connect( + http: { GET: "http://test/ts?first={$args.first}" } + selection: """ + id + field + """ + ) +} + +type T @key(fields: "id") { + id: ID! + field(foo: String): String + other(bar: String): String + @connect(http: { GET: "http://test/other?bar={$args.bar}" }, selection: "$") +} diff --git a/apollo-federation/src/sources/connect/validation/test_data/group_selection_on_scalar.graphql b/apollo-federation/src/sources/connect/validation/test_data/group_selection_on_scalar.graphql new file mode 100644 index 0000000000..6d644cdff2 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/test_data/group_selection_on_scalar.graphql @@ -0,0 +1,13 @@ +extend schema +@link( + url: "https://specs.apollo.dev/connect/v0.1" + import: ["@connect"] +) + +type Query { + me: User @connect(http: {GET: "http://127.0.0.1/something"}, selection: "id {blah}") +} + +type User { + id: ID! +} diff --git a/apollo-federation/src/sources/connect/validation/test_data/headers/expressions_that_evaluate_to_invalid_types.graphql b/apollo-federation/src/sources/connect/validation/test_data/headers/expressions_that_evaluate_to_invalid_types.graphql new file mode 100644 index 0000000000..7308c8cce7 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/test_data/headers/expressions_that_evaluate_to_invalid_types.graphql @@ -0,0 +1,24 @@ +extend schema +@link( + url: "https://specs.apollo.dev/connect/v0.1" + import: ["@connect", "@source"] +) +@source( + name: "v1" + http: { + baseURL: "http://127.0.0.1" + headers: [ + { name: "an-array", value: "{$->echo([])}" }, + { name: "an-object", value: "{$({})}"}, + { name: "maybe-object", value: "$config->match([1, 1], [2, {}])" }, + ] + } +) + +type Query { + blah(anArray: [String]): String @connect( + source: "v1", + http: {GET: "/blah", headers: {name: "an-array", value: "{$args.anArray}"}}, + selection: "$" + ) +} \ No newline at end of file diff --git a/apollo-federation/src/sources/connect/validation/test_data/headers/invalid_connect_http_headers.graphql b/apollo-federation/src/sources/connect/validation/test_data/headers/invalid_connect_http_headers.graphql new file mode 100644 index 0000000000..d01ec27812 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/test_data/headers/invalid_connect_http_headers.graphql @@ -0,0 +1,31 @@ +extend schema + @link(url: "https://specs.apollo.dev/connect/v0.1", import: ["@connect"]) + +type Query { + resources: [String!]! + @connect( + http: { + GET: "http://127.0.0.1:8000/resources" + headers: [ + { name: "valid-with-value", value: "text/html" } + { name: "valid-with-from", from: "valid-with-from" } + { name: "no-from-or-value" } + { from: "x-missing-name" } + { name: "from-and-value", from: "from-and-value", value: "text/html" } + { name: "x-name-collision", value: "text/html" } + { name: "X-NAME-COLLISION", from: "x-name-collision" } + { name: "", value: "invalid.header.name" } + { name: "x-invalid-from", from: "" } + { + name: "x-invalid-value" + value: " Value with 😊 emoji and newline \n " + } + { name: "Content-Length", value: "Is a reserved header" } + { name: "Content-Type", from: "Cant-Be-Dynamic" } + { name: "accept", from: "Is a reserved header" } + { name: "accept", value: "application/json; version=v4.0" } # ok + ] + } + selection: "$" + ) +} diff --git a/apollo-federation/src/sources/connect/validation/test_data/headers/invalid_namespace_in_header_variables.graphql b/apollo-federation/src/sources/connect/validation/test_data/headers/invalid_namespace_in_header_variables.graphql new file mode 100644 index 0000000000..00c68d3361 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/test_data/headers/invalid_namespace_in_header_variables.graphql @@ -0,0 +1,32 @@ +extend schema + @link( + url: "https://specs.apollo.dev/connect/v0.1" + import: ["@connect", "@source"] + ) + @source( + name: "v1" + http: { + baseURL: "https://127.0.0.1" + headers: [ + { name: "x-unknown-namespace", value: "{$foo.bar}" } + { name: "x-invalid-location-for-namespace", value: "{$this.bar}" } + { name: "x-namespace-missing-dollar", value: "{config.bar}" } + ] + } + ) + +type Query { + scalar(bar: String): String + @connect( + http: { + GET: "http://127.0.0.1" + headers: [ + { name: "x-unknown-namespace", value: "{$foo.bar}"} + { name: "x-invalid-location-for-namespace", value: "{$status}" } + { name: "x-no-this-on-root", value: "{$this}" } + { name: "x-namespace-missing-dollar", value: "{config.bar}" } + ] + } + selection: "$" + ) +} diff --git a/apollo-federation/src/sources/connect/validation/test_data/headers/invalid_nested_paths_in_header_variables.graphql b/apollo-federation/src/sources/connect/validation/test_data/headers/invalid_nested_paths_in_header_variables.graphql new file mode 100644 index 0000000000..36b56d38f8 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/test_data/headers/invalid_nested_paths_in_header_variables.graphql @@ -0,0 +1,60 @@ +extend schema + @link( + url: "https://specs.apollo.dev/connect/v0.1" + import: ["@connect", "@source"] + ) + +type Query { + scalar(scalar: String): String + @connect( + http: { + GET: "http://127.0.0.1" + headers: [ + { name: "x-custom-header", value: "{$args.scalar.blah}"} + ] + } + selection: "$" + ) + object(input: InputObject): Object + @connect( + http: { + GET: "http://127.0.0.1" + headers: [ + { name: "x-custom-header", value: "{$args.input.fieldThatDoesntExist}"} + ] + } + selection: "id" + ) + enum(enum: Enum): Enum + @connect( + http: { + GET: "http://127.0.0.1" + headers: [ + { name: "x-custom-header", value: "{$args.enum.cantHaveFields}"} + ] + } + selection: "$" + ) +} + +type Object { + id: ID! + newField: String + @connect( + http: { + GET: "http://127.0.0.1" + headers: [ + { name: "x-custom-header", value: "{$this.fieldThatDoesntExist}"} + ] + } + selection: "$" + ) +} + +input InputObject { + id: ID! +} + +enum Enum { + VALUE +} diff --git a/apollo-federation/src/sources/connect/validation/test_data/headers/invalid_source_http_headers.graphql b/apollo-federation/src/sources/connect/validation/test_data/headers/invalid_source_http_headers.graphql new file mode 100644 index 0000000000..223dff2c7c --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/test_data/headers/invalid_source_http_headers.graphql @@ -0,0 +1,33 @@ +extend schema + @link( + url: "https://specs.apollo.dev/connect/v0.1" + import: ["@connect", "@source"] + ) + @source( + name: "v1" + http: { + baseURL: "http://127.0.0.1" + headers: [ + { name: "valid-with-value", value: "text/html" } + { name: "valid-with-from", from: "valid-with-from" } + { name: "no-from-or-value" } + { from: "x-missing-name" } + { name: "from-and-value", from: "from-and-value", value: "text/html" } + { name: "x-name-collision", value: "text/html" } + { name: "X-NAME-COLLISION", from: "x-name-collision" } + { name: "", value: "invalid.header.name" } + { name: "x-invalid-from", from: "" } + { + name: "x-invalid-value" + value: " Value with 😊 emoji and newline \n " + } + { name: "Content-Length", value: "Is a reserved header" } + { name: "Content-Type", from: "Cant-Be-Dynamic" } + ] + } + ) + +type Query { + resources: [String!]! + @connect(source: "v1", http: { GET: "/resources" }, selection: "$") +} diff --git a/apollo-federation/src/sources/connect/validation/test_data/invalid_chars_in_source_name.graphql b/apollo-federation/src/sources/connect/validation/test_data/invalid_chars_in_source_name.graphql new file mode 100644 index 0000000000..44ac8b5714 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/test_data/invalid_chars_in_source_name.graphql @@ -0,0 +1,27 @@ +extend schema + @link( + url: "https://specs.apollo.dev/connect/v0.1" + import: ["@connect", "@source"] + ) + @source(name: "u$ers", http: { baseURL: "http://127.0.0.1" }) + @source(name: "1", http: { baseURL: "http://127.0.0.1" }) + @source(name: "no.dots", http: { baseURL: "http://127.0.0.1" }) + @source( + name: "areallylongnamethatisoversixtythreecharacterstakesalongwhiletotypebutthisshoulddoit" + http: { baseURL: "http://127.0.0.1" } + ) + +type Query { + resources1: [String!]! + @connect(source: "u$ers", http: { GET: "/resources" }, selection: "$") + resources2: [String!]! + @connect(source: "1", http: { GET: "/resources" }, selection: "$") + resources3: [String!]! + @connect(source: "no.dots", http: { GET: "/resources" }, selection: "$") + resources4: [String!]! + @connect( + source: "areallylongnamethatisoversixtythreecharacterstakesalongwhiletotypebutthisshoulddoit" + http: { GET: "/resources" } + selection: "$" + ) +} diff --git a/apollo-federation/src/sources/connect/validation/test_data/invalid_namespace_in_body_selection.graphql b/apollo-federation/src/sources/connect/validation/test_data/invalid_namespace_in_body_selection.graphql new file mode 100644 index 0000000000..0d3ddb2437 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/test_data/invalid_namespace_in_body_selection.graphql @@ -0,0 +1,25 @@ +extend schema + @link( + url: "https://specs.apollo.dev/connect/v0.1" + import: ["@connect"] + ) + +type Mutation { + createUser(id: ID!): String + @connect( + http: { + POST: "http://127.0.0.1/users/{$args.id}" + body: """ + id: $args.id + foo: $config.foo + bar: { + bar: $config.bar + baz: { + baz: $config.baz + } + } + status: $status + """ + } + selection: "$status" + ) diff --git a/apollo-federation/src/sources/connect/validation/test_data/invalid_nested_paths_in_json_selection.graphql b/apollo-federation/src/sources/connect/validation/test_data/invalid_nested_paths_in_json_selection.graphql new file mode 100644 index 0000000000..666f00de7a --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/test_data/invalid_nested_paths_in_json_selection.graphql @@ -0,0 +1,52 @@ +extend schema + @link( + url: "https://specs.apollo.dev/connect/v0.1" + import: ["@connect", "@source"] + ) + +type Query { + scalar(scalar: String): String + @connect( + http: { + POST: "http://127.0.0.1" + body: "foo: $args.scalar.blah" + } + selection: "$" + ) + object(input: InputObject): Object + @connect( + http: { + POST: "http://127.0.0.1" + body: "foo: $args.input.fieldThatDoesntExist" + } + selection: "id" + ) + enum(enum: Enum): Enum + @connect( + http: { + POST: "http://127.0.0.1" + body: "foo: $args.enum.cantHaveFields" + } + selection: "$" + ) +} + +type Object { + id: ID! + newField: String + @connect( + http: { + POST: "http://127.0.0.1" + body: "foo: $this.fieldThatDoesntExist" + } + selection: "$" + ) +} + +input InputObject { + id: ID! +} + +enum Enum { + VALUE +} diff --git a/apollo-federation/src/sources/connect/validation/test_data/invalid_selection_syntax.graphql b/apollo-federation/src/sources/connect/validation/test_data/invalid_selection_syntax.graphql new file mode 100644 index 0000000000..9ce31b3c16 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/test_data/invalid_selection_syntax.graphql @@ -0,0 +1,9 @@ +extend schema +@link( + url: "https://specs.apollo.dev/connect/v0.1" + import: ["@connect"] +) + +type Query { + something: String @connect(http: {GET: "http://127.0.0.1/something"}, selection: "&how") +} \ No newline at end of file diff --git a/apollo-federation/src/sources/connect/validation/test_data/keys_and_entities/invalid/arg_is_object_but_field_is_not.graphql b/apollo-federation/src/sources/connect/validation/test_data/keys_and_entities/invalid/arg_is_object_but_field_is_not.graphql new file mode 100644 index 0000000000..0399ec5ef0 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/test_data/keys_and_entities/invalid/arg_is_object_but_field_is_not.graphql @@ -0,0 +1,21 @@ +extend schema + @link(url: "https://specs.apollo.dev/federation/v2.10", import: ["@key"]) + @link(url: "https://specs.apollo.dev/connect/v0.1", import: ["@connect"]) + +type Query { + product(id: ProductInput!): Product + @connect( + http: { GET: "http://myapi/products/{$args.id.id}" } + selection: "id name" + entity: true + ) +} + +type Product { + id: ID! + name: String +} + +input ProductInput { + id: ID! +} diff --git a/apollo-federation/src/sources/connect/validation/test_data/keys_and_entities/invalid/arg_type_doesnt_match_field_type.graphql b/apollo-federation/src/sources/connect/validation/test_data/keys_and_entities/invalid/arg_type_doesnt_match_field_type.graphql new file mode 100644 index 0000000000..d9029ff1ab --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/test_data/keys_and_entities/invalid/arg_type_doesnt_match_field_type.graphql @@ -0,0 +1,17 @@ +extend schema + @link(url: "https://specs.apollo.dev/federation/v2.10", import: ["@key"]) + @link(url: "https://specs.apollo.dev/connect/v0.1", import: ["@connect"]) + +type Query { + product(id: String!): Product + @connect( + http: { GET: "http://myapi/products/{$args.id}" } + selection: "id name" + entity: true + ) +} + +type Product { + id: ID! + name: String +} diff --git a/apollo-federation/src/sources/connect/validation/test_data/keys_and_entities/invalid/composite_key_doesnt_match.graphql b/apollo-federation/src/sources/connect/validation/test_data/keys_and_entities/invalid/composite_key_doesnt_match.graphql new file mode 100644 index 0000000000..0f4e2f7cde --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/test_data/keys_and_entities/invalid/composite_key_doesnt_match.graphql @@ -0,0 +1,47 @@ +extend schema + @link(url: "https://specs.apollo.dev/federation/v2.10", import: ["@key"]) + @link(url: "https://specs.apollo.dev/connect/v0.1", import: ["@connect"]) + +type Query { + "The auto-key `store` field doesn't match the explicit composite key, so that key is unresolved" + product(id: ID!, store: StoreInput!): Product + @connect( + http: { + GET: "http://myapi/region/{$args.store.country.region}/country/{$args.store.country.id}/store/{$args.store.id}/products/{$args.id}" + } + selection: "id store { id country { id key_id region } } name" + entity: true + ) +} + +type Product @key(fields: "id store { id country { key_id region } }") { + id: ID! + store: Store! + name: String +} + +type Store { + id: ID! + country: Country +} + +type Country { + id: ID! + key_id: ID! + region: Region +} + +input StoreInput { + id: ID! + country: CountryInput! +} + +enum Region { + AMERICAS + EUROPE +} + +input CountryInput { + id: ID! + region: Region! +} diff --git a/apollo-federation/src/sources/connect/validation/test_data/keys_and_entities/invalid/entity_arg_field_arg_name_mismatch.graphql b/apollo-federation/src/sources/connect/validation/test_data/keys_and_entities/invalid/entity_arg_field_arg_name_mismatch.graphql new file mode 100644 index 0000000000..951b28331b --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/test_data/keys_and_entities/invalid/entity_arg_field_arg_name_mismatch.graphql @@ -0,0 +1,17 @@ +extend schema + @link(url: "https://specs.apollo.dev/federation/v2.10", import: ["@key"]) + @link(url: "https://specs.apollo.dev/connect/v0.1", import: ["@connect"]) + +type Query { + product(id: ID!): Product + @connect( + http: { GET: "http://myapi/products/{$args.id}" } + selection: "not_named_id name" + entity: true + ) +} + +type Product { + not_named_id: ID! + name: String +} diff --git a/apollo-federation/src/sources/connect/validation/test_data/keys_and_entities/invalid/entity_arg_field_arg_name_mismatch_composite_key.graphql b/apollo-federation/src/sources/connect/validation/test_data/keys_and_entities/invalid/entity_arg_field_arg_name_mismatch_composite_key.graphql new file mode 100644 index 0000000000..507cd65180 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/test_data/keys_and_entities/invalid/entity_arg_field_arg_name_mismatch_composite_key.graphql @@ -0,0 +1,44 @@ +extend schema + @link(url: "https://specs.apollo.dev/federation/v2.10", import: ["@key"]) + @link(url: "https://specs.apollo.dev/connect/v0.1", import: ["@connect"]) + +type Query { + product(id: ID!, store: StoreInput!): Product + @connect( + http: { + GET: "http://myapi/region/{$args.store.country.region}/country/{$args.store.country.id}/store/{$args.store.id}/products/{$args.id}" + } + selection: "id store { id country { not_named_id region } } name" + entity: true + ) +} + +type Product @key(fields: "id store { id country { not_named_id region } }") { + id: ID! + store: Store! + name: String +} + +type Store { + id: ID! + country: Country +} + +type Country { + not_named_id: ID! + region: Region +} + +input StoreInput { + id: ID! + country: CountryInput! +} + +enum Region { + AMERICAS + EUROPE +} +input CountryInput { + id: ID! + region: Region! +} diff --git a/apollo-federation/src/sources/connect/validation/test_data/keys_and_entities/invalid/entity_true_on_list_type.graphql b/apollo-federation/src/sources/connect/validation/test_data/keys_and_entities/invalid/entity_true_on_list_type.graphql new file mode 100644 index 0000000000..5e19542792 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/test_data/keys_and_entities/invalid/entity_true_on_list_type.graphql @@ -0,0 +1,16 @@ +extend schema + @link(url: "https://specs.apollo.dev/connect/v0.1", import: ["@connect"]) + +type Query { + users(id: ID!): [User] + @connect( + http: { GET: "http://127.0.0.1:8000/resources" } + entity: true + selection: "id name" + ) +} + +type User { + id: ID! + name: String! +} diff --git a/apollo-federation/src/sources/connect/validation/test_data/keys_and_entities/invalid/entity_true_on_non_root_field.graphql b/apollo-federation/src/sources/connect/validation/test_data/keys_and_entities/invalid/entity_true_on_non_root_field.graphql new file mode 100644 index 0000000000..92b1fe2b11 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/test_data/keys_and_entities/invalid/entity_true_on_non_root_field.graphql @@ -0,0 +1,22 @@ +extend schema + @link(url: "https://specs.apollo.dev/connect/v0.1", import: ["@connect"]) + +type Query { + user(id: ID!): User + @connect( + http: { GET: "http://127.0.0.1:8000/resources/{$args.id}" } + entity: true + selection: "id name" + ) +} + +type User { + id: ID! + name: String! + favoriteColor: String + @connect( + http: { GET: "http://127.0.0.1:8000/resources" } + entity: true + selection: "$" + ) +} diff --git a/apollo-federation/src/sources/connect/validation/test_data/keys_and_entities/invalid/entity_true_returning_non_null_type.graphql b/apollo-federation/src/sources/connect/validation/test_data/keys_and_entities/invalid/entity_true_returning_non_null_type.graphql new file mode 100644 index 0000000000..e17256ab4c --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/test_data/keys_and_entities/invalid/entity_true_returning_non_null_type.graphql @@ -0,0 +1,16 @@ +extend schema + @link(url: "https://specs.apollo.dev/connect/v0.1", import: ["@connect"]) + +type Query { + user(id: ID!): User! + @connect( + http: { GET: "http://127.0.0.1:8000/resources/{$args.id}" } + entity: true + selection: "id name" + ) +} + +type User { + id: ID! + name: String! +} diff --git a/apollo-federation/src/sources/connect/validation/test_data/keys_and_entities/invalid/entity_true_returning_scalar.graphql b/apollo-federation/src/sources/connect/validation/test_data/keys_and_entities/invalid/entity_true_returning_scalar.graphql new file mode 100644 index 0000000000..66ff40b7ff --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/test_data/keys_and_entities/invalid/entity_true_returning_scalar.graphql @@ -0,0 +1,11 @@ +extend schema + @link(url: "https://specs.apollo.dev/connect/v0.1", import: ["@connect"]) + +type Query { + name: String + @connect( + http: { GET: "http://127.0.0.1:8000/resources" } + entity: true + selection: "$" + ) +} diff --git a/apollo-federation/src/sources/connect/validation/test_data/keys_and_entities/invalid/multiple_keys_not_all_resolved.graphql b/apollo-federation/src/sources/connect/validation/test_data/keys_and_entities/invalid/multiple_keys_not_all_resolved.graphql new file mode 100644 index 0000000000..20725c014c --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/test_data/keys_and_entities/invalid/multiple_keys_not_all_resolved.graphql @@ -0,0 +1,50 @@ +extend schema + @link(url: "https://specs.apollo.dev/federation/v2.10", import: ["@key"]) + @link(url: "https://specs.apollo.dev/connect/v0.1", import: ["@connect"]) + +type Query { + "The auto-key here matches the first `@key`, but the second `@key` is still unresolved" + product(id: ID!, store: StoreInput!): Product + @connect( + http: { + GET: "http://myapi/region/{$args.store.country.region}/country/{$args.store.country.id}/store/{$args.store.id}/products/{$args.id}" + } + selection: "id store { id country { id key_id key_id2 region } } name" + entity: true + ) +} + +type Product + @key(fields: "id store { id country { id region } }") + @key(fields: "id store { id country { key_id region } }") { + id: ID! + store: Store! + name: String +} + +type Store { + id: ID! + country: Country +} + +type Country { + id: ID! + key_id: ID! + key_id2: ID! + region: Region +} + +input StoreInput { + id: ID! + country: CountryInput! +} + +enum Region { + AMERICAS + EUROPE +} + +input CountryInput { + id: ID! + region: Region! +} diff --git a/apollo-federation/src/sources/connect/validation/test_data/keys_and_entities/invalid/no_args_for_entity_true.graphql b/apollo-federation/src/sources/connect/validation/test_data/keys_and_entities/invalid/no_args_for_entity_true.graphql new file mode 100644 index 0000000000..b0dc483bcc --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/test_data/keys_and_entities/invalid/no_args_for_entity_true.graphql @@ -0,0 +1,17 @@ +extend schema + @link(url: "https://specs.apollo.dev/federation/v2.10", import: ["@key"]) + @link(url: "https://specs.apollo.dev/connect/v0.1", import: ["@connect"]) + +type Query { + product: Product + @connect( + http: { GET: "http://myapi/products" } + selection: "id name" + entity: true + ) +} + +type Product { + id: ID! + name: String +} diff --git a/apollo-federation/src/sources/connect/validation/test_data/keys_and_entities/invalid/unrelated_keys.graphql b/apollo-federation/src/sources/connect/validation/test_data/keys_and_entities/invalid/unrelated_keys.graphql new file mode 100644 index 0000000000..e4781dafa6 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/test_data/keys_and_entities/invalid/unrelated_keys.graphql @@ -0,0 +1,28 @@ +extend schema + @link(url: "https://specs.apollo.dev/federation/v2.10", import: ["@key"]) + @link(url: "https://specs.apollo.dev/connect/v0.1", import: ["@connect"]) + +type Query { + products: [Product] + @connect( + http: { GET: "http://myapi/products" } + selection: "id name store { id } seller { id }" + ) +} + +type Product { + id: ID! + name: String + store: Store + seller: Seller +} + +# error +type Store @key(fields: "id") { + id: ID! +} + +# no error +type Seller @key(fields: "id", resolvable: false) { + id: ID! +} diff --git a/apollo-federation/src/sources/connect/validation/test_data/keys_and_entities/valid/basic_implicit_key.graphql b/apollo-federation/src/sources/connect/validation/test_data/keys_and_entities/valid/basic_implicit_key.graphql new file mode 100644 index 0000000000..e529d1cc20 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/test_data/keys_and_entities/valid/basic_implicit_key.graphql @@ -0,0 +1,16 @@ +extend schema + @link(url: "https://specs.apollo.dev/connect/v0.1", import: ["@connect"]) + +type Query { + user(id: ID!): User + @connect( + http: { GET: "http://127.0.0.1:8000/resources/{$args.id}" } + entity: true + selection: "id name" + ) +} + +type User { + id: ID! + name: String! +} diff --git a/apollo-federation/src/sources/connect/validation/test_data/keys_and_entities/valid/entity_connector_matches_non_resolvable_key.graphql b/apollo-federation/src/sources/connect/validation/test_data/keys_and_entities/valid/entity_connector_matches_non_resolvable_key.graphql new file mode 100644 index 0000000000..40d5f5b19f --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/test_data/keys_and_entities/valid/entity_connector_matches_non_resolvable_key.graphql @@ -0,0 +1,17 @@ +extend schema + @link(url: "https://specs.apollo.dev/federation/v2.8", import: ["@key"]) + @link(url: "https://specs.apollo.dev/connect/v0.1", import: ["@connect"]) + +type Query { + product(id: ID!): Product + @connect( + http: { GET: "http://127.0.0.1:8000/v1/products/{$args.id}" } + entity: true + selection: "id name" + ) +} + +type Product @key(fields: "id", resolvable: false) { + id: ID! + name: String! +} diff --git a/apollo-federation/src/sources/connect/validation/test_data/keys_and_entities/valid/entity_connector_matches_one_of_multiple_keys.graphql b/apollo-federation/src/sources/connect/validation/test_data/keys_and_entities/valid/entity_connector_matches_one_of_multiple_keys.graphql new file mode 100644 index 0000000000..a7f81db808 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/test_data/keys_and_entities/valid/entity_connector_matches_one_of_multiple_keys.graphql @@ -0,0 +1,47 @@ +extend schema + @link(url: "https://specs.apollo.dev/federation/v2.8", import: ["@key"]) + @link(url: "https://specs.apollo.dev/connect/v0.1", import: ["@connect"]) + +type Query { + product(id: ID!, store: StoreInput!): Product + @connect( + http: { + GET: "http://myapi/region/{$args.store.country.region}/country/{$args.store.country.id}/store/{$args.store.id}/products/{$args.id}" + } + selection: "id store { id country { id key_id region } } name" + entity: true + ) +} + +type Product + @key(fields: "id store { id country { key_id region } }", resolvable: false) + @key(fields: "id store { id country { id region } }") { + id: ID! + store: Store! + name: String +} + +type Store { + id: ID! + country: Country +} + +type Country { + id: ID! + key_id: ID! + region: Region +} + +input StoreInput { + id: ID! + country: CountryInput! +} + +enum Region { + AMERICAS + EUROPE +} +input CountryInput { + id: ID! + region: Region! +} diff --git a/apollo-federation/src/sources/connect/validation/test_data/keys_and_entities/valid/entity_field_counts_as_key_resolver.graphql b/apollo-federation/src/sources/connect/validation/test_data/keys_and_entities/valid/entity_field_counts_as_key_resolver.graphql new file mode 100644 index 0000000000..d3d846ef37 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/test_data/keys_and_entities/valid/entity_field_counts_as_key_resolver.graphql @@ -0,0 +1,37 @@ +extend schema + @link(url: "https://specs.apollo.dev/federation/v2.8", import: ["@key"]) + @link( + url: "https://specs.apollo.dev/connect/v0.1" + import: ["@connect", "@source"] + ) + @source(name: "v2", http: { baseURL: "http://localhost" }) + +type Query { + price(id: ID!): Price + @connect( + source: "v2" + http: { GET: "/price/{$args.id}" } + selection: "id" + entity: true + ) +} + +type Sku { + id: ID! +} + +type Product @key(fields: "sku { id }") { + sku: Sku! + price: Price + @connect( + source: "v2" + http: { GET: "/products/", body: "$this { sku { id } }" } + selection: """ + id: default_price + """ + ) +} + +type Price { + id: ID! +} diff --git a/apollo-federation/src/sources/connect/validation/test_data/keys_and_entities/valid/mix_explicit_and_implicit.graphql b/apollo-federation/src/sources/connect/validation/test_data/keys_and_entities/valid/mix_explicit_and_implicit.graphql new file mode 100644 index 0000000000..960ecc55f2 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/test_data/keys_and_entities/valid/mix_explicit_and_implicit.graphql @@ -0,0 +1,24 @@ +extend schema + @link(url: "https://specs.apollo.dev/federation/v2.8", import: ["@key"]) + @link(url: "https://specs.apollo.dev/connect/v0.1", import: ["@connect"]) + +type Query { + implicit(id: ID!): Product + @connect( + http: { GET: "http://myapi/products/{$args.id}" } + selection: "id key_id name" + entity: true + ) + explicit(key_id: ID!): Product + @connect( + http: { GET: "http://myapi/products/{$args.key_id}" } + selection: "id key_id name" + entity: true + ) +} + +type Product @key(fields: "key_id") { + id: ID! + key_id: ID! + name: String +} diff --git a/apollo-federation/src/sources/connect/validation/test_data/keys_and_entities/valid/multiple_entity_connectors_for_multiple_keys.graphql b/apollo-federation/src/sources/connect/validation/test_data/keys_and_entities/valid/multiple_entity_connectors_for_multiple_keys.graphql new file mode 100644 index 0000000000..2945addbfc --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/test_data/keys_and_entities/valid/multiple_entity_connectors_for_multiple_keys.graphql @@ -0,0 +1,31 @@ +extend schema + @link(url: "https://specs.apollo.dev/federation/v2.8", import: ["@key"]) + @link( + url: "https://specs.apollo.dev/connect/v0.1" + import: ["@connect", "@source"] + ) + @source(name: "v1", http: { baseURL: "http://localhost" }) + @source(name: "v2", http: { baseURL: "http://localhost" }) + +type Query { + productById(id: ID!): Product + @connect( + source: "v1" + http: { GET: "/products/{$args.id}" } + selection: "id sku name" + entity: true + ) + productBySku(sku: ID!): Product + @connect( + source: "v2" + http: { GET: "/products/{$args.sku}" } + selection: "id sku name" + entity: true + ) +} + +type Product @key(fields: "id") @key(fields: "sku") { + id: ID! + sku: ID! + name: String +} diff --git a/apollo-federation/src/sources/connect/validation/test_data/missing_connect_on_mutation_field.graphql b/apollo-federation/src/sources/connect/validation/test_data/missing_connect_on_mutation_field.graphql new file mode 100644 index 0000000000..6a0bd3e603 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/test_data/missing_connect_on_mutation_field.graphql @@ -0,0 +1,10 @@ +extend schema + @link( + url: "https://specs.apollo.dev/connect/v0.1" + import: ["@connect", "@source"] + ) + @source(name: "v2", http: { baseURL: "http://127.0.0.1" }) + +type Mutation { + setMessage(message: String): String +} diff --git a/apollo-federation/src/sources/connect/validation/test_data/missing_connect_on_query_field.graphql b/apollo-federation/src/sources/connect/validation/test_data/missing_connect_on_query_field.graphql new file mode 100644 index 0000000000..edd21d5267 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/test_data/missing_connect_on_query_field.graphql @@ -0,0 +1,10 @@ +extend schema +@link( + url: "https://specs.apollo.dev/connect/v0.1" + import: ["@connect", "@source"] +) +@source(name: "v2", http: {baseURL: "http://127.0.0.1"}) + +type Query { + resources: [String!]! +} diff --git a/apollo-federation/src/sources/connect/validation/test_data/missing_http_method_on_connect.graphql b/apollo-federation/src/sources/connect/validation/test_data/missing_http_method_on_connect.graphql new file mode 100644 index 0000000000..c3900be6b6 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/test_data/missing_http_method_on_connect.graphql @@ -0,0 +1,10 @@ +extend schema + @link( + url: "https://specs.apollo.dev/connect/v0.1" + import: ["@source", "@connect"] + ) + @source(name: "v2", http: { baseURL: "http://127.0.0.1" }) + +type Query { + resources: [String!]! @connect(source: "v2", http: {}, selection: "$") +} diff --git a/apollo-federation/src/sources/connect/validation/test_data/missing_source_import.graphql b/apollo-federation/src/sources/connect/validation/test_data/missing_source_import.graphql new file mode 100644 index 0000000000..4895abc631 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/test_data/missing_source_import.graphql @@ -0,0 +1,8 @@ +extend schema + @link(url: "https://specs.apollo.dev/connect/v0.1", import: ["@connect"]) + @source(name: "v2", http: { baseURL: "http://127.0.0.1" }) + +type Query { + resources: [String!]! + @connect(source: "v2", http: { GET: "/resources" }, selection: "$") +} diff --git a/apollo-federation/src/sources/connect/validation/test_data/multiple_errors.graphql b/apollo-federation/src/sources/connect/validation/test_data/multiple_errors.graphql new file mode 100644 index 0000000000..57e50694f1 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/test_data/multiple_errors.graphql @@ -0,0 +1,11 @@ +extend schema + @link( + url: "https://specs.apollo.dev/connect/v0.1" + import: ["@connect", "@source"] + ) + @source(name: "u$ers", http: { baseURL: "ftp://127.0.0.1" }) + +type Query { + resources: [String!]! + @connect(source: "u$ers", http: { GET: "/resources" }, selection: "$") +} diff --git a/apollo-federation/src/sources/connect/validation/test_data/multiple_http_methods_on_connect.graphql b/apollo-federation/src/sources/connect/validation/test_data/multiple_http_methods_on_connect.graphql new file mode 100644 index 0000000000..4030fb4ee1 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/test_data/multiple_http_methods_on_connect.graphql @@ -0,0 +1,15 @@ +extend schema + @link( + url: "https://specs.apollo.dev/connect/v0.1" + import: ["@source", "@connect"] + ) + @source(name: "v2", http: { baseURL: "http://127.0.0.1" }) + +type Query { + resources: [String!]! + @connect( + source: "v2" + http: { GET: "/resources", DELETE: "/resource" } + selection: "$" + ) +} diff --git a/apollo-federation/src/sources/connect/validation/test_data/non_root_circular_reference.graphql b/apollo-federation/src/sources/connect/validation/test_data/non_root_circular_reference.graphql new file mode 100644 index 0000000000..c9904b6fe3 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/test_data/non_root_circular_reference.graphql @@ -0,0 +1,42 @@ +extend schema + @link( + url: "https://specs.apollo.dev/connect/v0.1" + import: ["@connect", "@source"] + ) + @source(name: "v2", http: { baseURL: "http://127.0.0.1" }) + +type Query { + user(id: ID!): User + @connect( + http: { GET: "https://api.example.com/users/{$args.id}" } + selection: """ + id + name + favoriteBooks { + id + author { + id + books { + id + } + } + } + """ + ) +} + +type User { + id: ID! + name: String + favoriteBooks: [Book] +} + +type Book { + id: ID! + author: Author +} + +type Author { + id: ID! + books: [Book] +} diff --git a/apollo-federation/src/sources/connect/validation/test_data/renamed_connect_directive.graphql b/apollo-federation/src/sources/connect/validation/test_data/renamed_connect_directive.graphql new file mode 100644 index 0000000000..da1a44065b --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/test_data/renamed_connect_directive.graphql @@ -0,0 +1,10 @@ +extend schema +@link( + url: "https://specs.apollo.dev/connect/v0.1" + import: [{name: "@connect", as: "@data"}, "@source"] +) +@source(name: "v2", http: {baseURL: "http://127.0.0.1"}) + +type Query { + resources: [String!]! +} diff --git a/apollo-federation/src/sources/connect/validation/test_data/select_nonexistant_group.graphql b/apollo-federation/src/sources/connect/validation/test_data/select_nonexistant_group.graphql new file mode 100644 index 0000000000..092adae6ba --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/test_data/select_nonexistant_group.graphql @@ -0,0 +1,13 @@ +extend schema +@link( + url: "https://specs.apollo.dev/connect/v0.1" + import: ["@connect"] +) + +type Query { + me: User @connect(http: {GET: "http://127.0.0.1/something"}, selection: "id group { id }") +} + +type User { + id: ID! +} diff --git a/apollo-federation/src/sources/connect/validation/test_data/source_directive_rename.graphql b/apollo-federation/src/sources/connect/validation/test_data/source_directive_rename.graphql new file mode 100644 index 0000000000..b6d1261885 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/test_data/source_directive_rename.graphql @@ -0,0 +1,11 @@ +extend schema + @link( + url: "https://specs.apollo.dev/connect/v0.1" + import: ["@connect", { name: "@source", as: "@api" }] + ) + @api(name: "users", http: { baseURL: "blahblahblah" }) + +type Query { + resources: [String!]! + @connect(source: "users", http: { GET: "/resources" }, selection: "$") +} diff --git a/apollo-federation/src/sources/connect/validation/test_data/subscriptions_with_connectors.graphql b/apollo-federation/src/sources/connect/validation/test_data/subscriptions_with_connectors.graphql new file mode 100644 index 0000000000..9506ad1fc9 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/test_data/subscriptions_with_connectors.graphql @@ -0,0 +1,15 @@ +extend schema + @link( + url: "https://specs.apollo.dev/connect/v0.1" + import: ["@connect", "@source"] + ) + @source(name: "v2", http: { baseURL: "http://127.0.0.1" }) + +type Query { + resources: [String!]! + @connect(source: "v2", http: { GET: "/resources" }, selection: "$") +} + +type Subscription { + resourceAdded: String! +} diff --git a/apollo-federation/src/sources/connect/validation/test_data/uri_templates/absolute_connect_url_with_source.graphql b/apollo-federation/src/sources/connect/validation/test_data/uri_templates/absolute_connect_url_with_source.graphql new file mode 100644 index 0000000000..e76fc5d5e1 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/test_data/uri_templates/absolute_connect_url_with_source.graphql @@ -0,0 +1,15 @@ +extend schema + @link( + url: "https://specs.apollo.dev/connect/v0.1" + import: ["@connect", "@source"] + ) + @source(name: "v2", http: { baseURL: "http://127.0.0.1" }) + +type Query { + resources: [String!]! + @connect( + source: "v2" + http: { GET: "http://127.0.0.1/resources" } + selection: "$" + ) +} diff --git a/apollo-federation/src/sources/connect/validation/test_data/uri_templates/invalid-jsonselection-in-expression.graphql b/apollo-federation/src/sources/connect/validation/test_data/uri_templates/invalid-jsonselection-in-expression.graphql new file mode 100644 index 0000000000..cbc47441ff --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/test_data/uri_templates/invalid-jsonselection-in-expression.graphql @@ -0,0 +1,15 @@ +extend schema + @link( + url: "https://specs.apollo.dev/connect/v0.1" + import: ["@connect", "@source"] + ) + @source(name: "v2", http: { baseURL: "http://127.0.0.1" }) + +type Query { + resources: [String!]! + @connect( + source: "v2" + http: { GET: "/{blah!}" } + selection: "$" + ) +} diff --git a/apollo-federation/src/sources/connect/validation/test_data/uri_templates/invalid-path-parameter.graphql b/apollo-federation/src/sources/connect/validation/test_data/uri_templates/invalid-path-parameter.graphql new file mode 100644 index 0000000000..8dfb31ce68 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/test_data/uri_templates/invalid-path-parameter.graphql @@ -0,0 +1,15 @@ +extend schema + @link( + url: "https://specs.apollo.dev/connect/v0.1" + import: ["@connect", "@source"] + ) + @source(name: "v2", http: { baseURL: "http://127.0.0.1" }) + +type Query { + resources: [String!]! + @connect( + source: "v2" + http: { GET: "/{$blah}" } + selection: "$" + ) +} diff --git a/apollo-federation/src/sources/connect/validation/test_data/uri_templates/invalid_connect_url.graphql b/apollo-federation/src/sources/connect/validation/test_data/uri_templates/invalid_connect_url.graphql new file mode 100644 index 0000000000..dc3c5cddd9 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/test_data/uri_templates/invalid_connect_url.graphql @@ -0,0 +1,6 @@ +extend schema + @link(url: "https://specs.apollo.dev/connect/v0.1", import: ["@connect"]) + +type Query { + resources: [String!]! @connect(http: { GET: "127.0.0.1" }, selection: "$") +} diff --git a/apollo-federation/src/sources/connect/validation/test_data/uri_templates/invalid_connect_url_scheme.graphql b/apollo-federation/src/sources/connect/validation/test_data/uri_templates/invalid_connect_url_scheme.graphql new file mode 100644 index 0000000000..f29d5d2a10 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/test_data/uri_templates/invalid_connect_url_scheme.graphql @@ -0,0 +1,7 @@ +extend schema + @link(url: "https://specs.apollo.dev/connect/v0.1", import: ["@connect"]) + +type Query { + resources: [String!]! + @connect(http: { GET: "file://data.json" }, selection: "$") +} diff --git a/apollo-federation/src/sources/connect/validation/test_data/uri_templates/invalid_namespace_in_url_template_variables.graphql b/apollo-federation/src/sources/connect/validation/test_data/uri_templates/invalid_namespace_in_url_template_variables.graphql new file mode 100644 index 0000000000..5af8460751 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/test_data/uri_templates/invalid_namespace_in_url_template_variables.graphql @@ -0,0 +1,29 @@ +extend schema + @link( + url: "https://specs.apollo.dev/connect/v0.1" + import: ["@connect"] + ) + +type Query { + unknown(bar: String): String + @connect( + http: { + GET: "http://127.0.0.1/{$foo.bar}" + } + selection: "$" + ) + invalid(bar: String): String + @connect( + http: { + GET: "http://127.0.0.1/{$status.bar}" + } + selection: "$" + ) + nodollar(bar: String): String + @connect( + http: { + GET: "http://127.0.0.1/{config.bar}" + } + selection: "$" + ) +} \ No newline at end of file diff --git a/apollo-federation/src/sources/connect/validation/test_data/uri_templates/invalid_nested_paths_in_url_template_variables.graphql b/apollo-federation/src/sources/connect/validation/test_data/uri_templates/invalid_nested_paths_in_url_template_variables.graphql new file mode 100644 index 0000000000..70f771ce35 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/test_data/uri_templates/invalid_nested_paths_in_url_template_variables.graphql @@ -0,0 +1,40 @@ +extend schema + @link( + url: "https://specs.apollo.dev/connect/v0.1" + import: ["@connect", "@source"] + ) + +type Query { + scalar(scalar: String): String + @connect( + http: { GET: "http://127.0.0.1?something={$args.scalar.blah}" } + selection: "$" + ) + object(input: InputObject): Object + @connect( + http: { GET: "http://127.0.0.1?something={$args.input.fieldThatDoesntExist}" } + selection: "id" + ) + enum(enum: Enum): Enum + @connect( + http: { GET: "http://127.0.0.1?something={$args.enum.cantHaveFields}" } + selection: "$" + ) +} + +type Object { + id: ID! + newField: String + @connect( + http: { GET: "http://127.0.0.1?something={$this.fieldThatDoesntExist}" } + selection: "$" + ) +} + +input InputObject { + id: ID! +} + +enum Enum { + VALUE +} diff --git a/apollo-federation/src/sources/connect/validation/test_data/uri_templates/invalid_source_url.graphql b/apollo-federation/src/sources/connect/validation/test_data/uri_templates/invalid_source_url.graphql new file mode 100644 index 0000000000..89eb02ad98 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/test_data/uri_templates/invalid_source_url.graphql @@ -0,0 +1,11 @@ +extend schema + @link( + url: "https://specs.apollo.dev/connect/v0.1" + import: ["@connect", "@source"] + ) + @source(name: "v1", http: { baseURL: "127.0.0.1" }) + +type Query { + resources: [String!]! + @connect(source: "v1", http: { GET: "/resources" }, selection: "$") +} diff --git a/apollo-federation/src/sources/connect/validation/test_data/uri_templates/invalid_source_url_scheme.graphql b/apollo-federation/src/sources/connect/validation/test_data/uri_templates/invalid_source_url_scheme.graphql new file mode 100644 index 0000000000..7c63d06a51 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/test_data/uri_templates/invalid_source_url_scheme.graphql @@ -0,0 +1,11 @@ +extend schema + @link( + url: "https://specs.apollo.dev/connect/v0.1" + import: ["@connect", "@source"] + ) + @source(name: "v1", http: { baseURL: "file://data.json" }) + +type Query { + resources: [String!]! + @connect(source: "v1", http: { GET: "/resources" }, selection: "$") +} diff --git a/apollo-federation/src/sources/connect/validation/test_data/uri_templates/invalid_types.graphql b/apollo-federation/src/sources/connect/validation/test_data/uri_templates/invalid_types.graphql new file mode 100644 index 0000000000..fb5a014fda --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/test_data/uri_templates/invalid_types.graphql @@ -0,0 +1,42 @@ +extend schema +@link( + url: "https://specs.apollo.dev/connect/v0.1" + import: ["@connect"] +) + +type Query { + argIsArray(val: [String]): String + @connect( + http: { GET: "http://127.0.0.1/{$args.val}" } + selection: "$" + ) + + argIsObject(val: Input): String + @connect( + http: { GET: "http://127.0.0.1/{$args.val}" } + selection: "$" + ) +} + +type This { + anArray: [String] @external + thisIsArray: String + @connect( + http: { GET: "http://127.0.0.1/{$this.anArray}" } + selection: "$" + ) + anObject: Object @external + requiresAnObject: String + @connect( + http: { GET: "http://127.0.0.1?obj={$this.anObject}" } + selection: "$" + ) +} + +input Input { + val: String +} + +type Object { + stuff: String @external +} diff --git a/apollo-federation/src/sources/connect/validation/test_data/uri_templates/relative_connect_url_without_source.graphql b/apollo-federation/src/sources/connect/validation/test_data/uri_templates/relative_connect_url_without_source.graphql new file mode 100644 index 0000000000..b6c9d1f4cb --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/test_data/uri_templates/relative_connect_url_without_source.graphql @@ -0,0 +1,6 @@ +extend schema + @link(url: "https://specs.apollo.dev/connect/v0.1", import: ["@connect"]) + +type Query { + resources: [String!]! @connect(http: { GET: "/resources" }, selection: "$") +} diff --git a/apollo-federation/src/sources/connect/validation/test_data/uri_templates/this_on_root_types.graphql b/apollo-federation/src/sources/connect/validation/test_data/uri_templates/this_on_root_types.graphql new file mode 100644 index 0000000000..7fe9c2cb3e --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/test_data/uri_templates/this_on_root_types.graphql @@ -0,0 +1,23 @@ +extend schema + @link( + url: "https://specs.apollo.dev/connect/v0.1" + import: ["@connect", "@source"] + ) + +type Query { + static: String @connect(http: { GET: "http://127.0.0.1/static" }, selection: "$") + requiresThis: String + @connect( + http: { GET: "http://127.0.0.1/{$this.static}"} + selection: "$" + ) +} + +type Mutation { + static: String @connect(http: { GET: "http://127.0.0.1/static" }, selection: "$") + requiresThis: String + @connect( + http: { GET: "http://127.0.0.1/{$this.static}"} + selection: "$" + ) +} diff --git a/apollo-federation/src/sources/connect/validation/test_data/uri_templates/undefined_arg_in_url_template.graphql b/apollo-federation/src/sources/connect/validation/test_data/uri_templates/undefined_arg_in_url_template.graphql new file mode 100644 index 0000000000..a4fba63a93 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/test_data/uri_templates/undefined_arg_in_url_template.graphql @@ -0,0 +1,13 @@ +extend schema + @link( + url: "https://specs.apollo.dev/connect/v0.1" + import: ["@connect", "@source"] + ) + +type Query { + resources: [String!]! + @connect( + http: { GET: "http://127.0.0.1/{$args.blah}?something={$args.something}" } + selection: "$" + ) +} diff --git a/apollo-federation/src/sources/connect/validation/test_data/uri_templates/undefined_this_in_url_template.graphql b/apollo-federation/src/sources/connect/validation/test_data/uri_templates/undefined_this_in_url_template.graphql new file mode 100644 index 0000000000..cd384decde --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/test_data/uri_templates/undefined_this_in_url_template.graphql @@ -0,0 +1,22 @@ +extend schema + @link( + url: "https://specs.apollo.dev/connect/v0.1" + import: ["@connect", "@source"] + ) + +type Query { + things: [Something] + @connect( + http: { GET: "http://127.0.0.1/somethings"} + selection: "id" + ) +} + +type Something { + id: ID! + resources: [String!]! + @connect( + http: { GET: "http://127.0.0.1/{$this.blah}?something={$this.something}" } + selection: "$" + ) +} diff --git a/apollo-federation/src/sources/connect/validation/test_data/uri_templates/valid_connect_absolute_url.graphql b/apollo-federation/src/sources/connect/validation/test_data/uri_templates/valid_connect_absolute_url.graphql new file mode 100644 index 0000000000..d4e8ab90ab --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/test_data/uri_templates/valid_connect_absolute_url.graphql @@ -0,0 +1,7 @@ +extend schema + @link(url: "https://specs.apollo.dev/connect/v0.1", import: ["@connect"]) + +type Query { + resources: [String!]! + @connect(http: { GET: "http://127.0.0.1:8000/resources" }, selection: "$") +} diff --git a/apollo-federation/src/sources/connect/validation/test_data/valid_selection_with_escapes.graphql b/apollo-federation/src/sources/connect/validation/test_data/valid_selection_with_escapes.graphql new file mode 100644 index 0000000000..979d10cf64 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/test_data/valid_selection_with_escapes.graphql @@ -0,0 +1,30 @@ +extend schema +@link(url: "https://specs.apollo.dev/connect/v0.1", import: ["@connect"]) + +type Query { + block: T + @connect( + http: { GET: "http://127.0.0.1/something" } + selection: """ + + + one + two + + + + unicode:$('﷽é€中π'). + """ + ) + standard: T + @connect( + http: { GET: "http://127.0.0.1/something" } + selection: "\n\n\none two\t\t\t\n\n\nunicode:$('\uFDFD\u0065\u0301\u20AC\u4E2D\u03C0')." + ) +} + +type T { + one: String + two: String + unicode: String +} diff --git a/apollo-federation/src/sources/connect/validation/variable/mod.rs b/apollo-federation/src/sources/connect/validation/variable/mod.rs new file mode 100644 index 0000000000..ee2d885410 --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/variable/mod.rs @@ -0,0 +1,77 @@ +//! Variable validation. +mod resolver; + +use std::collections::HashMap; + +use itertools::Itertools; +use resolver::args::ArgsResolver; +use resolver::this::ThisResolver; +use resolver::NamespaceResolver; + +use crate::sources::connect::validation::graphql::GraphQLString; +use crate::sources::connect::validation::graphql::SchemaInfo; +use crate::sources::connect::validation::Code; +use crate::sources::connect::validation::Message; +use crate::sources::connect::variable::Namespace; +use crate::sources::connect::variable::Target; +use crate::sources::connect::variable::VariableContext; +use crate::sources::connect::variable::VariableReference; + +pub(crate) struct VariableResolver<'a> { + context: VariableContext<'a>, + schema: &'a SchemaInfo<'a>, + resolvers: HashMap>, +} + +impl<'a> VariableResolver<'a> { + pub(super) fn new(context: VariableContext<'a>, schema: &'a SchemaInfo<'a>) -> Self { + let mut resolvers = HashMap::>::new(); + resolvers.insert( + Namespace::This, + Box::new(ThisResolver::new(context.object, context.field)), + ); + resolvers.insert(Namespace::Args, Box::new(ArgsResolver::new(context.field))); + Self { + context, + schema, + resolvers, + } + } + + pub(super) fn resolve( + &self, + reference: &VariableReference, + expression: GraphQLString, + ) -> Result<(), Message> { + if !self + .context + .available_namespaces() + .contains(&reference.namespace.namespace) + { + return Err(Message { + code: self.error_code(), + message: format!( + "variable `{namespace}` is not valid at this location, must be one of {available}", + namespace = reference.namespace.namespace.as_str(), + available = self.context.namespaces_joined(), + ), + locations: expression.line_col_for_subslice( + reference.namespace.location.start..reference.namespace.location.end, + self.schema + ).into_iter().collect(), + }); + } + if let Some(resolver) = self.resolvers.get(&reference.namespace.namespace) { + resolver.check(reference, expression, self.schema)?; + } + Ok(()) + } + + fn error_code(&self) -> Code { + match self.context.target { + Target::Url => Code::InvalidUrl, + Target::Header => Code::InvalidHeader, + Target::Body => Code::InvalidJsonSelection, + } + } +} diff --git a/apollo-federation/src/sources/connect/validation/variable/resolver/args.rs b/apollo-federation/src/sources/connect/validation/variable/resolver/args.rs new file mode 100644 index 0000000000..57a3dea6cd --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/variable/resolver/args.rs @@ -0,0 +1,58 @@ +use std::format; + +use apollo_compiler::ast::FieldDefinition; +use apollo_compiler::schema::Component; + +use crate::sources::connect::validation::graphql::GraphQLString; +use crate::sources::connect::validation::graphql::SchemaInfo; +use crate::sources::connect::validation::variable::resolver; +use crate::sources::connect::validation::variable::resolver::NamespaceResolver; +use crate::sources::connect::validation::Code; +use crate::sources::connect::validation::Message; +use crate::sources::connect::variable::Namespace; +use crate::sources::connect::variable::VariableReference; + +/// Resolves variables in the `$args` namespace +pub(crate) struct ArgsResolver<'a> { + field: &'a Component, +} + +impl<'a> ArgsResolver<'a> { + pub(crate) fn new(field: &'a Component) -> Self { + Self { field } + } +} + +impl NamespaceResolver for ArgsResolver<'_> { + fn check( + &self, + reference: &VariableReference, + expression: GraphQLString, + schema: &SchemaInfo, + ) -> Result<(), Message> { + let Some(root) = resolver::get_root(reference) else { + return Ok(()); // Not something we can type check this way TODO: delete all of this when Shape is available + }; + + let field_type = self + .field + .arguments + .iter() + .find(|arg| arg.name == root.as_str()) + .ok_or_else(|| Message { + code: Code::UndefinedArgument, + message: format!( + "`{object}` does not have an argument named `{root}`", + object = self.field.name, + root = root.as_str(), + ), + locations: expression + .line_col_for_subslice(root.location.start..root.location.end, schema) + .into_iter() + .collect(), + }) + .map(|field| field.ty.clone())?; + + resolver::resolve_path(schema, reference, expression, &field_type, self.field) + } +} diff --git a/apollo-federation/src/sources/connect/validation/variable/resolver/mod.rs b/apollo-federation/src/sources/connect/validation/variable/resolver/mod.rs new file mode 100644 index 0000000000..1899c7f19f --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/variable/resolver/mod.rs @@ -0,0 +1,102 @@ +use apollo_compiler::ast::FieldDefinition; +use apollo_compiler::ast::Type; +use apollo_compiler::schema::Component; +use apollo_compiler::schema::ExtendedType; +use apollo_compiler::Schema; + +use crate::sources::connect::validation::graphql::GraphQLString; +use crate::sources::connect::validation::graphql::SchemaInfo; +use crate::sources::connect::validation::Code; +use crate::sources::connect::validation::Message; +use crate::sources::connect::variable::Namespace; +use crate::sources::connect::variable::VariablePathPart; +use crate::sources::connect::variable::VariableReference; + +pub(super) mod args; +pub(super) mod this; + +/// Checks that the variables are valid within a specific namespace +pub(crate) trait NamespaceResolver { + fn check( + &self, + reference: &VariableReference, + expression: GraphQLString, + schema: &SchemaInfo, + ) -> Result<(), Message>; +} + +pub(super) fn resolve_type<'schema>( + schema: &'schema Schema, + ty: &Type, + field: &Component, +) -> Result<&'schema ExtendedType, Message> { + schema + .types + .get(ty.inner_named_type()) + .ok_or_else(|| Message { + code: Code::GraphQLError, + message: format!("The type {ty} is referenced but not defined in the schema.",), + locations: field + .line_column_range(&schema.sources) + .into_iter() + .collect(), + }) +} + +/// Resolve a variable reference path relative to a type. Assumes that the first element of the +/// path has already been resolved to the type, and validates any remainder. +fn resolve_path( + schema: &SchemaInfo, + reference: &VariableReference, + expression: GraphQLString, + field_type: &Type, + field: &Component, +) -> Result<(), Message> { + let mut variable_type = field_type.clone(); + for nested_field_name in reference.path.clone().iter().skip(1) { + let path_component_range = nested_field_name.location.clone(); + let nested_field_name = nested_field_name.as_str(); + let parent_is_nullable = !field_type.is_non_null(); + variable_type = resolve_type(schema, &variable_type, field) + .and_then(|extended_type| { + match extended_type { + ExtendedType::Enum(_) | ExtendedType::Scalar(_) => None, + ExtendedType::Object(object) => object.fields.get(nested_field_name).map(|field| &field.ty), + ExtendedType::InputObject(input_object) => input_object.fields.get(nested_field_name).map(|field| field.ty.as_ref()), + // TODO: at the time of writing, you can't declare interfaces or unions in connectors schemas at all, so these aren't tested + ExtendedType::Interface(interface) => interface.fields.get(nested_field_name).map(|field| &field.ty), + ExtendedType::Union(_) => { + return Err(Message { + code: Code::UnsupportedVariableType, + message: format!( + "The type {field_type} is a union, which is not supported in variables yet.", + ), + locations: field + .line_column_range(&schema.sources) + .into_iter() + .collect(), + }) + }, + } + .ok_or_else(|| Message { + code: Code::UndefinedField, + message: format!( + "`{variable_type}` does not have a field named `{nested_field_name}`." + ), + locations: expression.line_col_for_subslice( + path_component_range.start..path_component_range.end, + schema + ).into_iter().collect(), + }) + })?.clone(); + if parent_is_nullable && variable_type.is_non_null() { + variable_type = variable_type.nullable(); + } + } + Ok(()) +} + +/// Require a variable reference to have a path +fn get_root<'a>(reference: &'a VariableReference<'a, Namespace>) -> Option> { + reference.path.first().cloned() +} diff --git a/apollo-federation/src/sources/connect/validation/variable/resolver/this.rs b/apollo-federation/src/sources/connect/validation/variable/resolver/this.rs new file mode 100644 index 0000000000..affaa178ae --- /dev/null +++ b/apollo-federation/src/sources/connect/validation/variable/resolver/this.rs @@ -0,0 +1,57 @@ +use apollo_compiler::ast::FieldDefinition; +use apollo_compiler::schema::Component; +use apollo_compiler::schema::ObjectType; +use apollo_compiler::Node; + +use crate::sources::connect::validation::graphql::GraphQLString; +use crate::sources::connect::validation::graphql::SchemaInfo; +use crate::sources::connect::validation::variable::resolver; +use crate::sources::connect::validation::variable::resolver::NamespaceResolver; +use crate::sources::connect::validation::Code; +use crate::sources::connect::validation::Message; +use crate::sources::connect::variable::Namespace; +use crate::sources::connect::variable::VariableReference; + +/// Resolves variables in the `$this` namespace +pub(crate) struct ThisResolver<'a> { + object: &'a Node, + field: &'a Component, +} + +impl<'a> ThisResolver<'a> { + pub(crate) fn new(object: &'a Node, field: &'a Component) -> Self { + Self { object, field } + } +} + +impl NamespaceResolver for ThisResolver<'_> { + fn check( + &self, + reference: &VariableReference, + expression: GraphQLString, + schema: &SchemaInfo, + ) -> Result<(), Message> { + let Some(root) = resolver::get_root(reference) else { + return Ok(()); // Not something we can type check this way + }; + let field_type = self + .object + .fields + .get(root.as_str()) + .ok_or_else(|| Message { + code: Code::UndefinedField, + message: format!( + "`{object}` does not have a field named `{root}`", + object = self.object.name, + root = root.as_str(), + ), + locations: expression + .line_col_for_subslice(root.location.start..root.location.end, schema) + .into_iter() + .collect(), + }) + .map(|field| field.ty.clone())?; + + resolver::resolve_path(schema, reference, expression, &field_type, self.field) + } +} diff --git a/apollo-federation/src/sources/connect/variable.rs b/apollo-federation/src/sources/connect/variable.rs new file mode 100644 index 0000000000..9c7c684cc8 --- /dev/null +++ b/apollo-federation/src/sources/connect/variable.rs @@ -0,0 +1,199 @@ +//! Variables used in connector directives `@connect` and `@source`. + +use std::fmt::Display; +use std::fmt::Formatter; +use std::ops::Range; +use std::str::FromStr; + +use apollo_compiler::ast::FieldDefinition; +use apollo_compiler::schema::Component; +use apollo_compiler::schema::ObjectType; +use apollo_compiler::Node; +use itertools::Itertools; + +/// A variable context for Apollo Connectors. Variables are used within a `@connect` or `@source` +/// [`Directive`], are used in a particular [`Phase`], and have a specific [`Target`]. +#[derive(Clone, PartialEq)] +pub(crate) struct VariableContext<'schema> { + /// The object type containing the field the directive is on + pub(crate) object: &'schema Node, + + /// The field definition of the field the directive is on + pub(crate) field: &'schema Component, + pub(super) phase: Phase, + pub(super) target: Target, +} + +impl<'schema> VariableContext<'schema> { + pub(crate) fn new( + object: &'schema Node, + field: &'schema Component, + phase: Phase, + target: Target, + ) -> Self { + Self { + object, + field, + phase, + target, + } + } + + /// Get the variable namespaces that are available in this context + pub(crate) fn available_namespaces(&self) -> impl Iterator { + match &self.phase { + Phase::Response => { + vec![ + Namespace::Args, + Namespace::Config, + Namespace::Context, + Namespace::Status, + Namespace::This, + ] + } + Phase::Request => { + vec![ + Namespace::Config, + Namespace::Context, + Namespace::This, + Namespace::Args, + ] + } + } + .into_iter() + } + + /// Get the list of namespaces joined as a comma separated list + pub(crate) fn namespaces_joined(&self) -> String { + self.available_namespaces() + .map(|s| s.to_string()) + .sorted() + .join(", ") + } +} + +/// The phase an expression is associated with +#[derive(Clone, Copy, PartialEq)] +pub(crate) enum Phase { + /// The request phase + Request, + + /// The response phase + Response, +} + +/// The target of an expression containing a variable reference +#[allow(unused)] +#[derive(Clone, Copy, PartialEq)] +pub(crate) enum Target { + /// The expression is used in an HTTP header + Header, + + /// The expression is used in a URL + Url, + + /// The expression is used in the body of a request or response + Body, +} + +/// The variable namespaces defined for Apollo Connectors +#[derive(PartialEq, Eq, Clone, Copy, Hash)] +pub enum Namespace { + Args, + Config, + Context, + Status, + This, +} + +impl Namespace { + pub fn as_str(&self) -> &'static str { + match self { + Self::Args => "$args", + Self::Config => "$config", + Self::Context => "$context", + Self::Status => "$status", + Self::This => "$this", + } + } +} + +impl FromStr for Namespace { + type Err = (); + + fn from_str(s: &str) -> Result { + match s { + "$args" => Ok(Self::Args), + "$config" => Ok(Self::Config), + "$context" => Ok(Self::Context), + "$status" => Ok(Self::Status), + "$this" => Ok(Self::This), + _ => Err(()), + } + } +} + +impl std::fmt::Debug for Namespace { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.as_str()) + } +} + +impl Display for Namespace { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.as_str()) + } +} + +/// A variable reference. Consists of a namespace starting with a `$` and an optional path +/// separated by '.' characters. +#[derive(Debug, Eq, PartialEq, Clone, Hash)] +pub(crate) struct VariableReference<'a, N: FromStr + ToString> { + /// The namespace of the variable - `$this`, `$args`, `$status`, etc. + pub(crate) namespace: VariableNamespace, + + /// The path elements of this reference. For example, the reference `$this.a.b.c` + /// has path elements `a`, `b`, `c`. May be empty in some cases, as in the reference `$status`. + pub(crate) path: Vec>, + + /// The location of the reference within the original text. + pub(crate) location: Range, +} + +impl Display for VariableReference<'_, N> { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + f.write_str(self.namespace.namespace.to_string().as_str())?; + for part in &self.path { + f.write_str(".")?; + f.write_str(part.as_str())?; + } + Ok(()) + } +} + +/// A namespace in a variable reference, like `$this` in `$this.a.b.c` +#[derive(Debug, Eq, PartialEq, Clone, Hash)] +pub(crate) struct VariableNamespace { + pub(crate) namespace: N, + pub(crate) location: Range, +} + +/// Part of a variable path, like `a` in `$this.a.b.c` +#[derive(Debug, Eq, PartialEq, Clone, Hash)] +pub(crate) struct VariablePathPart<'a> { + pub(crate) part: &'a str, + pub(crate) location: Range, +} + +impl VariablePathPart<'_> { + pub(crate) fn as_str(&self) -> &str { + self.part + } +} + +impl Display for VariablePathPart<'_> { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + f.write_str(self.part.to_string().as_str())?; + Ok(()) + } +} diff --git a/apollo-federation/src/sources/mod.rs b/apollo-federation/src/sources/mod.rs index eb7f65c88d..be6843511f 100644 --- a/apollo-federation/src/sources/mod.rs +++ b/apollo-federation/src/sources/mod.rs @@ -1 +1 @@ -mod connect; +pub mod connect; diff --git a/apollo-federation/src/supergraph/join_directive.rs b/apollo-federation/src/supergraph/join_directive.rs new file mode 100644 index 0000000000..0c3bd07e7e --- /dev/null +++ b/apollo-federation/src/supergraph/join_directive.rs @@ -0,0 +1,230 @@ +//! @join__directive extraction +use std::sync::Arc; + +use apollo_compiler::ast::Argument; +use apollo_compiler::ast::Directive; +use apollo_compiler::collections::IndexMap; +use apollo_compiler::schema::Component; +use apollo_compiler::Name; +use apollo_compiler::Node; +use itertools::Itertools; + +use super::get_subgraph; +use super::subgraph::FederationSubgraphs; +use crate::error::FederationError; +use crate::link::DEFAULT_LINK_NAME; +use crate::schema::position::ObjectFieldDefinitionPosition; +use crate::schema::position::TypeDefinitionPosition; +use crate::schema::FederationSchema; +use crate::sources::connect::ConnectSpec; + +static JOIN_DIRECTIVE: &str = "join__directive"; + +/// Converts `@join__directive(graphs: [A], name: "foo")` to `@foo` in the A subgraph. +/// If the directive is a link directive on the schema definition, we also need +/// to update the metadata and add the imported definitions. +pub(super) fn extract( + supergraph_schema: &FederationSchema, + subgraphs: &mut FederationSubgraphs, + graph_enum_value_name_to_subgraph_name: &IndexMap>, +) -> Result<(), FederationError> { + let join_directives = match supergraph_schema + .referencers() + .get_directive(JOIN_DIRECTIVE) + { + Ok(directives) => directives, + Err(_) => { + // No join directives found, nothing to do. + return Ok(()); + } + }; + + if let Some(schema_def_pos) = &join_directives.schema { + let schema_def = schema_def_pos.get(supergraph_schema.schema()); + let directives = schema_def + .directives + .iter() + .filter_map(|d| { + if d.name == JOIN_DIRECTIVE { + Some(to_real_directive(d)) + } else { + None + } + }) + .collect_vec(); + + // TODO: Do we need to handle the link directive being renamed? + let (links, others) = directives + .into_iter() + .partition::, _>(|(d, _)| d.name == DEFAULT_LINK_NAME); + + // After adding links, we'll check the link against a safelist of + // specs and check_or_add the spec definitions if necessary. + for (link_directive, subgraph_enum_values) in links { + for subgraph_enum_value in subgraph_enum_values { + let subgraph = get_subgraph( + subgraphs, + graph_enum_value_name_to_subgraph_name, + &subgraph_enum_value, + )?; + + schema_def_pos.insert_directive( + &mut subgraph.schema, + Component::new(link_directive.clone()), + )?; + + if ConnectSpec::from_directive(&link_directive)?.is_some() { + ConnectSpec::check_or_add(&mut subgraph.schema)?; + } + } + } + + // Other directives are added normally. + for (directive, subgraph_enum_values) in others { + for subgraph_enum_value in subgraph_enum_values { + let subgraph = get_subgraph( + subgraphs, + graph_enum_value_name_to_subgraph_name, + &subgraph_enum_value, + )?; + + schema_def_pos + .insert_directive(&mut subgraph.schema, Component::new(directive.clone()))?; + } + } + } + + for object_field_pos in &join_directives.object_fields { + let object_field = object_field_pos.get(supergraph_schema.schema())?; + let directives = object_field + .directives + .iter() + .filter_map(|d| { + if d.name == JOIN_DIRECTIVE { + Some(to_real_directive(d)) + } else { + None + } + }) + .collect_vec(); + + for (directive, subgraph_enum_values) in directives { + for subgraph_enum_value in subgraph_enum_values { + let subgraph = get_subgraph( + subgraphs, + graph_enum_value_name_to_subgraph_name, + &subgraph_enum_value, + )?; + + object_field_pos + .insert_directive(&mut subgraph.schema, Node::new(directive.clone()))?; + } + } + } + + for intf_field_pos in &join_directives.interface_fields { + let intf_field = intf_field_pos.get(supergraph_schema.schema())?; + let directives = intf_field + .directives + .iter() + .filter_map(|d| { + if d.name == JOIN_DIRECTIVE { + Some(to_real_directive(d)) + } else { + None + } + }) + .collect_vec(); + + for (directive, subgraph_enum_values) in directives { + for subgraph_enum_value in subgraph_enum_values { + let subgraph = get_subgraph( + subgraphs, + graph_enum_value_name_to_subgraph_name, + &subgraph_enum_value, + )?; + + if subgraph + .schema + .try_get_type(intf_field_pos.type_name.clone()) + .map(|t| matches!(t, TypeDefinitionPosition::Interface(_))) + .unwrap_or_default() + { + intf_field_pos + .insert_directive(&mut subgraph.schema, Node::new(directive.clone()))?; + } else { + // In the subgraph it's defined as an object with @interfaceObject + let object_field_pos = ObjectFieldDefinitionPosition { + type_name: intf_field_pos.type_name.clone(), + field_name: intf_field_pos.field_name.clone(), + }; + object_field_pos + .insert_directive(&mut subgraph.schema, Node::new(directive.clone()))?; + } + } + } + } + + // TODO + // - join_directives.directive_arguments + // - join_directives.enum_types + // - join_directives.enum_values + // - join_directives.input_object_fields + // - join_directives.input_object_types + // - join_directives.interface_field_arguments + // - join_directives.interface_types + // - join_directives.object_field_arguments + // - join_directives.object_types + // - join_directives.scalar_types + // - join_directives.union_types + + Ok(()) +} + +fn to_real_directive(directive: &Node) -> (Directive, Vec) { + let subgraph_enum_values = directive + .specified_argument_by_name("graphs") + .and_then(|arg| arg.as_list()) + .map(|list| { + list.iter() + .map(|node| { + Name::new( + node.as_enum() + .expect("join__directive(graphs:) value is an enum") + .as_str(), + ) + .expect("join__directive(graphs:) value is a valid name") + }) + .collect() + }) + .expect("join__directive(graphs:) missing"); + + let name = directive + .specified_argument_by_name("name") + .expect("join__directive(name:) is present") + .as_str() + .expect("join__directive(name:) is a string"); + + let arguments = directive + .specified_argument_by_name("args") + .and_then(|a| a.as_object()) + .map(|args| { + args.iter() + .map(|(k, v)| { + Argument { + name: k.clone(), + value: v.clone(), + } + .into() + }) + .collect() + }) + .unwrap_or_default(); + + let directive = Directive { + name: Name::new(name).expect("join__directive(name:) is a valid name"), + arguments, + }; + + (directive, subgraph_enum_values) +} diff --git a/apollo-federation/src/supergraph/mod.rs b/apollo-federation/src/supergraph/mod.rs index d44978db9d..758f5690f2 100644 --- a/apollo-federation/src/supergraph/mod.rs +++ b/apollo-federation/src/supergraph/mod.rs @@ -1,3 +1,4 @@ +mod join_directive; mod schema; mod subgraph; @@ -7,8 +8,6 @@ use std::ops::Not; use std::sync::Arc; use std::sync::LazyLock; -use apollo_compiler::ast::Argument; -use apollo_compiler::ast::Directive; use apollo_compiler::ast::FieldDefinition; use apollo_compiler::collections::IndexMap; use apollo_compiler::collections::IndexSet; @@ -58,7 +57,6 @@ use crate::link::join_spec_definition::TypeDirectiveArguments; use crate::link::spec::Identity; use crate::link::spec::Version; use crate::link::spec_definition::SpecDefinition; -use crate::link::DEFAULT_LINK_NAME; use crate::schema::field_set::parse_field_set_without_normalization; use crate::schema::position::is_graphql_reserved_name; use crate::schema::position::CompositeTypeDefinitionPosition; @@ -318,7 +316,7 @@ fn extract_subgraphs_from_fed_2_supergraph( &input_object_types, )?; - extract_join_directives( + join_directive::extract( supergraph_schema, subgraphs, graph_enum_value_name_to_subgraph_name, @@ -2109,175 +2107,6 @@ fn maybe_dump_subgraph_schema(subgraph: FederationSubgraph, message: &mut String }; } -//////////////////////////////////////////////////////////////////////////////// -/// @join__directive extraction -static JOIN_DIRECTIVE: &str = "join__directive"; - -/// Converts `@join__directive(graphs: [A], name: "foo")` to `@foo` in the A subgraph. -/// If the directive is a link directive on the schema definition, we also need -/// to update the metadata and add the imported definitions. -fn extract_join_directives( - supergraph_schema: &FederationSchema, - subgraphs: &mut FederationSubgraphs, - graph_enum_value_name_to_subgraph_name: &IndexMap>, -) -> Result<(), FederationError> { - let join_directives = match supergraph_schema - .referencers() - .get_directive(JOIN_DIRECTIVE) - { - Ok(directives) => directives, - Err(_) => { - // No join directives found, nothing to do. - return Ok(()); - } - }; - - if let Some(schema_def_pos) = &join_directives.schema { - let schema_def = schema_def_pos.get(supergraph_schema.schema()); - let directives = schema_def - .directives - .iter() - .filter_map(|d| { - if d.name == JOIN_DIRECTIVE { - Some(join_directive_to_real_directive(d)) - } else { - None - } - }) - .collect_vec(); - - // TODO: Do we need to handle the link directive being renamed? - let (links, others) = directives - .into_iter() - .partition::, _>(|(d, _)| d.name == DEFAULT_LINK_NAME); - - // After adding links, we'll check the link against a safelist of - // specs and check_or_add the spec definitions if necessary. - for (link_directive, subgraph_enum_values) in links { - for subgraph_enum_value in subgraph_enum_values { - let subgraph = get_subgraph( - subgraphs, - graph_enum_value_name_to_subgraph_name, - &subgraph_enum_value, - )?; - - schema_def_pos.insert_directive( - &mut subgraph.schema, - Component::new(link_directive.clone()), - )?; - - // TODO: add imported definitions from relevant specs - } - } - - // Other directives are added normally. - for (directive, subgraph_enum_values) in others { - for subgraph_enum_value in subgraph_enum_values { - let subgraph = get_subgraph( - subgraphs, - graph_enum_value_name_to_subgraph_name, - &subgraph_enum_value, - )?; - - schema_def_pos - .insert_directive(&mut subgraph.schema, Component::new(directive.clone()))?; - } - } - } - - for object_field_pos in &join_directives.object_fields { - let object_field = object_field_pos.get(supergraph_schema.schema())?; - let directives = object_field - .directives - .iter() - .filter_map(|d| { - if d.name == JOIN_DIRECTIVE { - Some(join_directive_to_real_directive(d)) - } else { - None - } - }) - .collect_vec(); - - for (directive, subgraph_enum_values) in directives { - for subgraph_enum_value in subgraph_enum_values { - let subgraph = get_subgraph( - subgraphs, - graph_enum_value_name_to_subgraph_name, - &subgraph_enum_value, - )?; - - object_field_pos - .insert_directive(&mut subgraph.schema, Node::new(directive.clone()))?; - } - } - } - - // TODO - // - join_directives.directive_arguments - // - join_directives.enum_types - // - join_directives.enum_values - // - join_directives.input_object_fields - // - join_directives.input_object_types - // - join_directives.interface_field_arguments - // - join_directives.interface_fields - // - join_directives.interface_types - // - join_directives.object_field_arguments - // - join_directives.object_types - // - join_directives.scalar_types - // - join_directives.union_types - - Ok(()) -} - -fn join_directive_to_real_directive(directive: &Node) -> (Directive, Vec) { - let subgraph_enum_values = directive - .specified_argument_by_name("graphs") - .and_then(|arg| arg.as_list()) - .map(|list| { - list.iter() - .map(|node| { - Name::new( - node.as_enum() - .expect("join__directive(graphs:) value is an enum") - .as_str(), - ) - .expect("join__directive(graphs:) value is a valid name") - }) - .collect() - }) - .expect("join__directive(graphs:) missing"); - - let name = directive - .specified_argument_by_name("name") - .expect("join__directive(name:) is present") - .as_str() - .expect("join__directive(name:) is a string"); - - let arguments = directive - .specified_argument_by_name("args") - .and_then(|a| a.as_object()) - .map(|args| { - args.iter() - .map(|(k, v)| { - Argument { - name: k.clone(), - value: v.clone(), - } - .into() - }) - .collect() - }) - .unwrap_or_default(); - - let directive = Directive { - name: Name::new(name).expect("join__directive(name:) is a valid name"), - arguments, - }; - - (directive, subgraph_enum_values) -} - #[cfg(test)] mod tests { use apollo_compiler::name; diff --git a/apollo-federation/tests/snapshots/main__composition_tests__can_compose_supergraph.snap b/apollo-federation/tests/snapshots/main__composition_tests__can_compose_supergraph.snap index e4dcf8530b..f2adc95c5b 100644 --- a/apollo-federation/tests/snapshots/main__composition_tests__can_compose_supergraph.snap +++ b/apollo-federation/tests/snapshots/main__composition_tests__can_compose_supergraph.snap @@ -6,6 +6,8 @@ schema @link(url: "https://specs.apollo.dev/link/v1.0") @link(url: "https://spec query: Query } +directive @join__directive(graphs: [join__Graph!], name: String!, args: join__DirectiveArguments!) repeatable on SCHEMA | OBJECT | INTERFACE | FIELD_DEFINITION + directive @join__enumValue(graph: join__Graph!) repeatable on ENUM_VALUE directive @join__field(graph: join__Graph, requires: join__FieldSet, provides: join__FieldSet, type: String, external: Boolean, override: String, overrideLabel: String, usedOverridden: Boolean) repeatable on FIELD_DEFINITION | INPUT_FIELD_DEFINITION @@ -28,21 +30,23 @@ enum E @join__type(graph: SUBGRAPH2) { scalar Import @join__type(graph: SUBGRAPH1) @join__type(graph: SUBGRAPH2) type Query @join__type(graph: SUBGRAPH1) @join__type(graph: SUBGRAPH2) { - t: T @join__field(graph: SUBGRAPH1) + t: T @join__field(graph: SUBGRAPH1, type: "T") } type S @join__type(graph: SUBGRAPH1) { - x: Int @join__field(graph: SUBGRAPH1) + x: Int @join__field(graph: SUBGRAPH1, type: "Int") } type T @join__type(graph: SUBGRAPH1, key: "k") @join__type(graph: SUBGRAPH2, key: "k") { - k: ID @join__field(graph: SUBGRAPH1) @join__field(graph: SUBGRAPH2) - a: Int @join__field(graph: SUBGRAPH2) - b: String @join__field(graph: SUBGRAPH2) + k: ID @join__field(graph: SUBGRAPH1, type: "ID") @join__field(graph: SUBGRAPH2, type: "ID") + a: Int @join__field(graph: SUBGRAPH2, type: "Int") + b: String @join__field(graph: SUBGRAPH2, type: "String") } union U @join__type(graph: SUBGRAPH1) @join__unionMember(graph: SUBGRAPH1, member: "S") @join__unionMember(graph: SUBGRAPH1, member: "T") = S | T +scalar join__DirectiveArguments + scalar join__FieldSet enum join__Graph { diff --git a/apollo-federation/tests/snapshots/main__composition_tests__can_compose_types_from_different_subgraphs.snap b/apollo-federation/tests/snapshots/main__composition_tests__can_compose_types_from_different_subgraphs.snap index dfe41eb6ab..d93147fbf0 100644 --- a/apollo-federation/tests/snapshots/main__composition_tests__can_compose_types_from_different_subgraphs.snap +++ b/apollo-federation/tests/snapshots/main__composition_tests__can_compose_types_from_different_subgraphs.snap @@ -6,6 +6,8 @@ schema @link(url: "https://specs.apollo.dev/link/v1.0") @link(url: "https://spec query: Query } +directive @join__directive(graphs: [join__Graph!], name: String!, args: join__DirectiveArguments!) repeatable on SCHEMA | OBJECT | INTERFACE | FIELD_DEFINITION + directive @join__enumValue(graph: join__Graph!) repeatable on ENUM_VALUE directive @join__field(graph: join__Graph, requires: join__FieldSet, provides: join__FieldSet, type: String, external: Boolean, override: String, overrideLabel: String, usedOverridden: Boolean) repeatable on FIELD_DEFINITION | INPUT_FIELD_DEFINITION @@ -23,19 +25,21 @@ directive @link(url: String, as: String, for: link__Purpose, import: [link__Impo scalar Import @join__type(graph: SUBGRAPHA) @join__type(graph: SUBGRAPHB) type Product @join__type(graph: SUBGRAPHA) { - sku: String! @join__field(graph: SUBGRAPHA) - name: String! @join__field(graph: SUBGRAPHA) + sku: String! @join__field(graph: SUBGRAPHA, type: "String!") + name: String! @join__field(graph: SUBGRAPHA, type: "String!") } type Query @join__type(graph: SUBGRAPHA) @join__type(graph: SUBGRAPHB) { - products: [Product!] @join__field(graph: SUBGRAPHA) + products: [Product!] @join__field(graph: SUBGRAPHA, type: "[Product!]") } type User @join__type(graph: SUBGRAPHB) { - name: String @join__field(graph: SUBGRAPHB) - email: String! @join__field(graph: SUBGRAPHB) + name: String @join__field(graph: SUBGRAPHB, type: "String") + email: String! @join__field(graph: SUBGRAPHB, type: "String!") } +scalar join__DirectiveArguments + scalar join__FieldSet enum join__Graph { diff --git a/apollo-federation/tests/snapshots/main__composition_tests__can_compose_with_descriptions.snap b/apollo-federation/tests/snapshots/main__composition_tests__can_compose_with_descriptions.snap index 59ae3be771..ceb2c5deaf 100644 --- a/apollo-federation/tests/snapshots/main__composition_tests__can_compose_with_descriptions.snap +++ b/apollo-federation/tests/snapshots/main__composition_tests__can_compose_with_descriptions.snap @@ -10,6 +10,8 @@ schema @link(url: "https://specs.apollo.dev/link/v1.0") @link(url: "https://spec """The foo directive description""" directive @foo(url: String) on FIELD +directive @join__directive(graphs: [join__Graph!], name: String!, args: join__DirectiveArguments!) repeatable on SCHEMA | OBJECT | INTERFACE | FIELD_DEFINITION + directive @join__enumValue(graph: join__Graph!) repeatable on ENUM_VALUE directive @join__field(graph: join__Graph, requires: join__FieldSet, provides: join__FieldSet, type: String, external: Boolean, override: String, overrideLabel: String, usedOverridden: Boolean) repeatable on FIELD_DEFINITION | INPUT_FIELD_DEFINITION @@ -43,9 +45,11 @@ type Query @join__type(graph: SUBGRAPH1) @join__type(graph: SUBGRAPH2) { t( """An argument that is very important""" x: String!, - ): String @join__field(graph: SUBGRAPH1) + ): String @join__field(graph: SUBGRAPH1, type: "String") } +scalar join__DirectiveArguments + scalar join__FieldSet enum join__Graph { diff --git a/apollo-federation/tests/snapshots/main__composition_tests__compose_removes_federation_directives.snap b/apollo-federation/tests/snapshots/main__composition_tests__compose_removes_federation_directives.snap index 443ee630a4..83f91fefc9 100644 --- a/apollo-federation/tests/snapshots/main__composition_tests__compose_removes_federation_directives.snap +++ b/apollo-federation/tests/snapshots/main__composition_tests__compose_removes_federation_directives.snap @@ -6,6 +6,8 @@ schema @link(url: "https://specs.apollo.dev/link/v1.0") @link(url: "https://spec query: Query } +directive @join__directive(graphs: [join__Graph!], name: String!, args: join__DirectiveArguments!) repeatable on SCHEMA | OBJECT | INTERFACE | FIELD_DEFINITION + directive @join__enumValue(graph: join__Graph!) repeatable on ENUM_VALUE directive @join__field(graph: join__Graph, requires: join__FieldSet, provides: join__FieldSet, type: String, external: Boolean, override: String, overrideLabel: String, usedOverridden: Boolean) repeatable on FIELD_DEFINITION | INPUT_FIELD_DEFINITION @@ -23,14 +25,16 @@ directive @link(url: String, as: String, for: link__Purpose, import: [link__Impo scalar Import @join__type(graph: SUBGRAPHA) @join__type(graph: SUBGRAPHB) type Product @join__type(graph: SUBGRAPHA, key: "sku") @join__type(graph: SUBGRAPHB, key: "sku") { - sku: String! @join__field(graph: SUBGRAPHA) @join__field(graph: SUBGRAPHB) - name: String! @join__field(graph: SUBGRAPHA, external: true) @join__field(graph: SUBGRAPHB) + sku: String! @join__field(graph: SUBGRAPHA, type: "String!") @join__field(graph: SUBGRAPHB, type: "String!") + name: String! @join__field(graph: SUBGRAPHA, external: true, type: "String!") @join__field(graph: SUBGRAPHB, type: "String!") } type Query @join__type(graph: SUBGRAPHA) @join__type(graph: SUBGRAPHB) { - products: [Product!] @join__field(graph: SUBGRAPHA, provides: "name") + products: [Product!] @join__field(graph: SUBGRAPHA, provides: "name", type: "[Product!]") } +scalar join__DirectiveArguments + scalar join__FieldSet enum join__Graph { diff --git a/apollo-router-benchmarks/Cargo.toml b/apollo-router-benchmarks/Cargo.toml index d1e21f2b36..c96ba47a1f 100644 --- a/apollo-router-benchmarks/Cargo.toml +++ b/apollo-router-benchmarks/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "apollo-router-benchmarks" -version = "1.59.1" +version = "2.0.0-preview.4" authors = ["Apollo Graph, Inc. "] edition = "2021" license = "Elastic-2.0" diff --git a/apollo-router-scaffold/Cargo.toml b/apollo-router-scaffold/Cargo.toml deleted file mode 100644 index d4a57baa5e..0000000000 --- a/apollo-router-scaffold/Cargo.toml +++ /dev/null @@ -1,26 +0,0 @@ -[package] -name = "apollo-router-scaffold" -version = "1.59.1" -authors = ["Apollo Graph, Inc. "] -edition = "2021" -license = "Elastic-2.0" -publish = false - -[package.metadata.cargo-machete] -ignored = [ - # usage not found because the crate name is `inflector` without `str_` - "str_inflector", -] - -[dependencies] -anyhow = "1.0.80" -clap = { version = "4.5.1", features = ["derive"] } -cargo-scaffold = { version = "0.14.0", default-features = false } -regex = "1" -str_inflector = "0.12.0" -toml = "0.8.10" -[dev-dependencies] -tempfile = "3.10.0" -copy_dir = "0.1.3" -dircmp = "0.2.0" -similar = "2.5.0" diff --git a/apollo-router-scaffold/scaffold-test/.cargo/config b/apollo-router-scaffold/scaffold-test/.cargo/config deleted file mode 100644 index 24a9882b48..0000000000 --- a/apollo-router-scaffold/scaffold-test/.cargo/config +++ /dev/null @@ -1,3 +0,0 @@ -[alias] -xtask = "run --package xtask --" -router = "run --package xtask -- router" diff --git a/apollo-router-scaffold/scaffold-test/.dockerignore b/apollo-router-scaffold/scaffold-test/.dockerignore deleted file mode 100644 index c2c4a5aa95..0000000000 --- a/apollo-router-scaffold/scaffold-test/.dockerignore +++ /dev/null @@ -1 +0,0 @@ -target/** \ No newline at end of file diff --git a/apollo-router-scaffold/scaffold-test/.gitignore b/apollo-router-scaffold/scaffold-test/.gitignore deleted file mode 100644 index bba7b53950..0000000000 --- a/apollo-router-scaffold/scaffold-test/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -/target/ -/.idea/ diff --git a/apollo-router-scaffold/scaffold-test/Cargo.toml b/apollo-router-scaffold/scaffold-test/Cargo.toml deleted file mode 100644 index 692dc3d08c..0000000000 --- a/apollo-router-scaffold/scaffold-test/Cargo.toml +++ /dev/null @@ -1,25 +0,0 @@ -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html -[package] -name = "apollo-router-scaffold-test" -version = "0.1.0" -edition = "2021" - -[[bin]] -name = "apollo-router-scaffold-test" -path = "src/main.rs" - -[dependencies] -anyhow = "1.0.58" -apollo-router = { path = "../../apollo-router" } -async-trait = "0.1.52" -schemars = "0.8.10" -serde = "1.0.149" -serde_json = "1.0.79" -tokio = { version = "1.17.0", features = ["full"] } -tower = { version = "0.4.0", features = ["full"] } -tracing = "0.1.37" - -# this makes build scripts and proc macros faster to compile -[profile.dev.build-override] -strip = "debuginfo" -incremental = false diff --git a/apollo-router-scaffold/scaffold-test/Dockerfile b/apollo-router-scaffold/scaffold-test/Dockerfile deleted file mode 100644 index 3d3263130e..0000000000 --- a/apollo-router-scaffold/scaffold-test/Dockerfile +++ /dev/null @@ -1,54 +0,0 @@ -# Use the rust build image from docker as our base -# renovate-automation: rustc version -FROM rust:1.83.0 as build - -# Set our working directory for the build -WORKDIR /usr/src/router - -# Update our build image and install required packages -RUN apt-get update -RUN apt-get -y install \ - npm \ - protobuf-compiler - -# Add rustfmt since build requires it -RUN rustup component add rustfmt - -# Copy the router source to our build environment -COPY . . - -# Build and install the custom binary -RUN cargo build --release - -# Make directories for config and schema -RUN mkdir -p /dist/config && \ - mkdir /dist/schema && \ - mv target/release/router /dist - -# Copy configuration for docker image -COPY router.yaml /dist/config.yaml - -FROM debian:bullseye-slim - -RUN apt-get update -RUN apt-get -y install \ - ca-certificates - -# Set labels for our image -LABEL org.opencontainers.image.authors="Apollo Graph, Inc. https://github.com/apollographql/router" -LABEL org.opencontainers.image.source="https://github.com/apollographql/router" - -# Copy in the required files from our build image -COPY --from=build --chown=root:root /dist /dist - -WORKDIR /dist - -ENV APOLLO_ROUTER_CONFIG_PATH="/dist/config.yaml" - -# Make sure we can run the router -RUN chmod 755 /dist/router - -USER router - -# Default executable is the router -ENTRYPOINT ["/dist/router"] diff --git a/apollo-router-scaffold/scaffold-test/README.md b/apollo-router-scaffold/scaffold-test/README.md deleted file mode 100644 index 98ec70eb24..0000000000 --- a/apollo-router-scaffold/scaffold-test/README.md +++ /dev/null @@ -1,120 +0,0 @@ -# Apollo Router project - -This generated project is set up to create a custom Apollo Router binary that may include plugins that you have written. - -> Note: The Apollo Router is made available under the Elastic License v2.0 (ELv2). -> Read [our licensing page](https://www.apollographql.com/docs/resources/elastic-license-v2-faq/) for more details. - -# Compile the router - -To create a debug build use the following command. -```bash -cargo build -``` -Your debug binary is now located in `target/debug/router` - -For production, you will want to create a release build. -```bash -cargo build --release -``` -Your release binary is now located in `target/release/router` - -# Run the Apollo Router - -1. Download the example schema - - ```bash - curl -sSL https://supergraph.demo.starstuff.dev/ > supergraph-schema.graphql - ``` - -2. Run the Apollo Router - - During development it is convenient to use `cargo run` to run the Apollo Router as it will - ```bash - cargo run -- --hot-reload --config router.yaml --supergraph supergraph-schema.graphql - ``` - -> If you are using managed federation you can set APOLLO_KEY and APOLLO_GRAPH_REF environment variables instead of specifying the supergraph as a file. - -# Create a plugin - -1. From within your project directory scaffold a new plugin - ```bash - cargo router plugin create hello_world - ``` -2. Select the type of plugin you want to scaffold: - ```bash - Select a plugin template: - > "basic" - "auth" - "tracing" - ``` - - The different templates are: - * basic - a barebones plugin. - * auth - a basic authentication plugin that could make an external call. - * tracing - a plugin that adds a custom span and a log message. - - Choose `basic`. - -4. Add the plugin to the `router.yaml` - ```yaml - plugins: - starstuff.hello_world: - message: "Starting my plugin" - ``` - -5. Run the Apollo Router and see your plugin start up - ```bash - cargo run -- --hot-reload --config router.yaml --supergraph supergraph-schema.graphql - ``` - - In your output you should see something like: - ```bash - 2022-05-21T09:16:33.160288Z INFO router::plugins::hello_world: Starting my plugin - ``` - -# Remove a plugin - -1. From within your project run the following command. It makes a best effort to remove the plugin, but your mileage may vary. - ```bash - cargo router plugin remove hello_world - ``` - -# Docker - -You can use the provided Dockerfile to build a release container. - -Make sure your router is configured to listen to `0.0.0.0` so you can query it from outside the container: - -```yml - supergraph: - listen: 0.0.0.0:4000 -``` - -Use your `APOLLO_KEY` and `APOLLO_GRAPH_REF` environment variables to run the router in managed federation. - - ```bash - docker build -t my_custom_router . - docker run -e APOLLO_KEY="your apollo key" -e APOLLO_GRAPH_REF="your apollo graph ref" -p 4000:4000 my_custom_router - ``` - -Otherwise add a `COPY` step to the Dockerfile, and edit the entrypoint: - -```Dockerfile -# Copy configuration for docker image -COPY router.yaml /dist/config.yaml -# Copy supergraph for docker image -COPY my_supergraph.graphql /dist/supergraph.graphql - -# [...] and change the entrypoint - -# Default executable is the router -ENTRYPOINT ["/dist/router", "-s", "/dist/supergraph.graphql"] -``` - -You can now build and run your custom router: - ```bash - docker build -t my_custom_router . - docker run -p 4000:4000 my_custom_router - ``` diff --git a/apollo-router-scaffold/scaffold-test/router.yaml b/apollo-router-scaffold/scaffold-test/router.yaml deleted file mode 100644 index 8411f80a8b..0000000000 --- a/apollo-router-scaffold/scaffold-test/router.yaml +++ /dev/null @@ -1,5 +0,0 @@ -# uncomment this section if you plan to use the Dockerfile -# supergraph: -# listen: 0.0.0.0:4000 -plugins: - # Add plugin configuration here diff --git a/apollo-router-scaffold/scaffold-test/src/main.rs b/apollo-router-scaffold/scaffold-test/src/main.rs deleted file mode 100644 index ca6699afe9..0000000000 --- a/apollo-router-scaffold/scaffold-test/src/main.rs +++ /dev/null @@ -1,7 +0,0 @@ -mod plugins; - -use anyhow::Result; - -fn main() -> Result<()> { - apollo_router::main() -} diff --git a/apollo-router-scaffold/scaffold-test/src/plugins/auth.rs b/apollo-router-scaffold/scaffold-test/src/plugins/auth.rs deleted file mode 100644 index b0da3b45c7..0000000000 --- a/apollo-router-scaffold/scaffold-test/src/plugins/auth.rs +++ /dev/null @@ -1,99 +0,0 @@ -use std::ops::ControlFlow; - -use apollo_router::layers::ServiceBuilderExt; -use apollo_router::plugin::Plugin; -use apollo_router::plugin::PluginInit; -use apollo_router::register_plugin; -use apollo_router::services::supergraph; -use schemars::JsonSchema; -use serde::Deserialize; -use tower::BoxError; -use tower::ServiceBuilder; -use tower::ServiceExt; - -#[derive(Debug)] -struct Auth { - #[allow(dead_code)] - configuration: Conf, -} - -#[derive(Debug, Default, Deserialize, JsonSchema)] -struct Conf { - // Put your plugin configuration here. It will automatically be deserialized from JSON. - // Always put some sort of config here, even if it is just a bool to say that the plugin is enabled, - // otherwise the yaml to enable the plugin will be confusing. - message: String, -} -// This plugin is a skeleton for doing authentication that requires a remote call. -#[async_trait::async_trait] -impl Plugin for Auth { - type Config = Conf; - - async fn new(init: PluginInit) -> Result { - tracing::info!("{}", init.config.message); - Ok(Auth { - configuration: init.config, - }) - } - - fn supergraph_service(&self, service: supergraph::BoxService) -> supergraph::BoxService { - ServiceBuilder::new() - .oneshot_checkpoint_async(|request: supergraph::Request| async { - // Do some async call here to auth, and decide if to continue or not. - Ok(ControlFlow::Continue(request)) - }) - .service(service) - .boxed() - } -} - -// This macro allows us to use it in our plugin registry! -// register_plugin takes a group name, and a plugin name. -register_plugin!("acme", "auth", Auth); - -#[cfg(test)] -mod tests { - use apollo_router::graphql; - use apollo_router::services::supergraph; - use apollo_router::TestHarness; - use tower::BoxError; - use tower::ServiceExt; - - #[tokio::test] - async fn basic_test() -> Result<(), BoxError> { - let test_harness = TestHarness::builder() - .configuration_json(serde_json::json!({ - "plugins": { - "acme.auth": { - "message" : "Starting my plugin" - } - } - })) - .unwrap() - .build_router() - .await - .unwrap(); - let request = supergraph::Request::canned_builder().build().unwrap(); - let mut streamed_response = test_harness.oneshot(request.try_into()?).await?; - - let first_response: graphql::Response = serde_json::from_slice( - streamed_response - .next_response() - .await - .expect("couldn't get primary response")? - .to_vec() - .as_slice(), - ) - .unwrap(); - - assert!(first_response.data.is_some()); - - println!("first response: {:?}", first_response); - let next = streamed_response.next_response().await; - println!("next response: {:?}", next); - - // You could keep calling .next_response() until it yields None if you're expexting more parts. - assert!(next.is_none()); - Ok(()) - } -} diff --git a/apollo-router-scaffold/scaffold-test/src/plugins/basic.rs b/apollo-router-scaffold/scaffold-test/src/plugins/basic.rs deleted file mode 100644 index 4f83a15401..0000000000 --- a/apollo-router-scaffold/scaffold-test/src/plugins/basic.rs +++ /dev/null @@ -1,123 +0,0 @@ -use apollo_router::plugin::Plugin; -use apollo_router::plugin::PluginInit; -use apollo_router::register_plugin; -use apollo_router::services::execution; -use apollo_router::services::router; -use apollo_router::services::subgraph; -use apollo_router::services::supergraph; -use schemars::JsonSchema; -use serde::Deserialize; -use tower::BoxError; - -#[derive(Debug)] -struct Basic { - #[allow(dead_code)] - configuration: Conf, -} - -#[derive(Debug, Default, Deserialize, JsonSchema)] -struct Conf { - // Put your plugin configuration here. It will automatically be deserialized from JSON. - // Always put some sort of config here, even if it is just a bool to say that the plugin is enabled, - // otherwise the yaml to enable the plugin will be confusing. - message: String, -} -// This is a bare bones plugin that can be duplicated when creating your own. -#[async_trait::async_trait] -impl Plugin for Basic { - type Config = Conf; - - async fn new(init: PluginInit) -> Result { - tracing::info!("{}", init.config.message); - Ok(Basic { - configuration: init.config, - }) - } - - // Delete this function if you are not customizing it. - fn router_service(&self, service: router::BoxService) -> router::BoxService { - // Always use service builder to compose your plugins. - // It provides off the shelf building blocks for your plugin. - // - // ServiceBuilder::new() - // .service(service) - // .boxed() - - // Returning the original service means that we didn't add any extra functionality at this point in the lifecycle. - service - } - - // Delete this function if you are not customizing it. - fn supergraph_service(&self, service: supergraph::BoxService) -> supergraph::BoxService { - // Always use service builder to compose your plugins. - // It provides off the shelf building blocks for your plugin. - // - // ServiceBuilder::new() - // .service(service) - // .boxed() - - // Returning the original service means that we didn't add any extra functionality for at this point in the lifecycle. - service - } - - // Delete this function if you are not customizing it. - fn execution_service(&self, service: execution::BoxService) -> execution::BoxService { - service - } - - // Delete this function if you are not customizing it. - fn subgraph_service(&self, _name: &str, service: subgraph::BoxService) -> subgraph::BoxService { - service - } -} - -// This macro allows us to use it in our plugin registry! -// register_plugin takes a group name, and a plugin name. -register_plugin!("acme", "basic", Basic); - -#[cfg(test)] -mod tests { - use apollo_router::graphql; - use apollo_router::services::supergraph; - use apollo_router::TestHarness; - use tower::BoxError; - use tower::ServiceExt; - - #[tokio::test] - async fn basic_test() -> Result<(), BoxError> { - let test_harness = TestHarness::builder() - .configuration_json(serde_json::json!({ - "plugins": { - "acme.basic": { - "message" : "Starting my plugin" - } - } - })) - .unwrap() - .build_router() - .await - .unwrap(); - let request = supergraph::Request::canned_builder().build().unwrap(); - let mut streamed_response = test_harness.oneshot(request.try_into()?).await?; - - let first_response: graphql::Response = serde_json::from_slice( - streamed_response - .next_response() - .await - .expect("couldn't get primary response")? - .to_vec() - .as_slice(), - ) - .unwrap(); - - assert!(first_response.data.is_some()); - - println!("first response: {:?}", first_response); - let next = streamed_response.next_response().await; - println!("next response: {:?}", next); - - // You could keep calling .next_response() until it yields None if you're expexting more parts. - assert!(next.is_none()); - Ok(()) - } -} diff --git a/apollo-router-scaffold/scaffold-test/src/plugins/mod.rs b/apollo-router-scaffold/scaffold-test/src/plugins/mod.rs deleted file mode 100644 index 738d9a3f7e..0000000000 --- a/apollo-router-scaffold/scaffold-test/src/plugins/mod.rs +++ /dev/null @@ -1,3 +0,0 @@ -mod auth; -mod basic; -mod tracing; diff --git a/apollo-router-scaffold/scaffold-test/src/plugins/tracing.rs b/apollo-router-scaffold/scaffold-test/src/plugins/tracing.rs deleted file mode 100644 index e62ba30df3..0000000000 --- a/apollo-router-scaffold/scaffold-test/src/plugins/tracing.rs +++ /dev/null @@ -1,103 +0,0 @@ -use apollo_router::layers::ServiceBuilderExt; -use apollo_router::plugin::Plugin; -use apollo_router::plugin::PluginInit; -use apollo_router::register_plugin; -use apollo_router::services::supergraph; -use schemars::JsonSchema; -use serde::Deserialize; -use tower::BoxError; -use tower::ServiceBuilder; -use tower::ServiceExt; - -#[derive(Debug)] -struct Tracing { - #[allow(dead_code)] - configuration: Conf, -} - -#[derive(Debug, Default, Deserialize, JsonSchema)] -struct Conf { - // Put your plugin configuration here. It will automatically be deserialized from JSON. - // Always put some sort of config here, even if it is just a bool to say that the plugin is enabled, - // otherwise the yaml to enable the plugin will be confusing. - message: String, -} -// This plugin adds a span and an error to the logs. -#[async_trait::async_trait] -impl Plugin for Tracing { - type Config = Conf; - - async fn new(init: PluginInit) -> Result { - tracing::info!("{}", init.config.message); - Ok(Tracing { - configuration: init.config, - }) - } - - fn supergraph_service(&self, service: supergraph::BoxService) -> supergraph::BoxService { - ServiceBuilder::new() - .instrument(|_request| { - // Optionally take information from the request and insert it into the span as attributes - // See https://docs.rs/tracing/latest/tracing/ for more information - tracing::info_span!("my_custom_span") - }) - .map_request(|request| { - // Add a log message, this will appear within the context of the current span - tracing::error!("error detected"); - request - }) - .service(service) - .boxed() - } -} - -// This macro allows us to use it in our plugin registry! -// register_plugin takes a group name, and a plugin name. -register_plugin!("acme", "tracing", Tracing); - -#[cfg(test)] -mod tests { - use apollo_router::graphql; - use apollo_router::services::supergraph; - use apollo_router::TestHarness; - use tower::BoxError; - use tower::ServiceExt; - - #[tokio::test] - async fn basic_test() -> Result<(), BoxError> { - let test_harness = TestHarness::builder() - .configuration_json(serde_json::json!({ - "plugins": { - "acme.tracing": { - "message" : "Starting my plugin" - } - } - })) - .unwrap() - .build_router() - .await - .unwrap(); - let request = supergraph::Request::canned_builder().build().unwrap(); - let mut streamed_response = test_harness.oneshot(request.try_into()?).await?; - - let first_response: graphql::Response = serde_json::from_slice( - streamed_response - .next_response() - .await - .expect("couldn't get primary response")? - .to_vec() - .as_slice(), - ) - .unwrap(); - - assert!(first_response.data.is_some()); - - println!("first response: {:?}", first_response); - let next = streamed_response.next_response().await; - println!("next response: {:?}", next); - - // You could keep calling .next_response() until it yields None if you're expexting more parts. - assert!(next.is_none()); - Ok(()) - } -} diff --git a/apollo-router-scaffold/scaffold-test/xtask/Cargo.toml b/apollo-router-scaffold/scaffold-test/xtask/Cargo.toml deleted file mode 100644 index c96b9d0e98..0000000000 --- a/apollo-router-scaffold/scaffold-test/xtask/Cargo.toml +++ /dev/null @@ -1,12 +0,0 @@ -[package] -name = "xtask" -edition = "2021" -publish = false -version = "0.1.0" - -[dependencies] -# This dependency should stay in line with your router version - -apollo-router-scaffold = { git = "https://github.com/apollographql/router.git", tag = "v1.46.0" } -anyhow = "1.0.58" -clap = "4.0.32" diff --git a/apollo-router-scaffold/scaffold-test/xtask/src/main.rs b/apollo-router-scaffold/scaffold-test/xtask/src/main.rs deleted file mode 100644 index 211ded5255..0000000000 --- a/apollo-router-scaffold/scaffold-test/xtask/src/main.rs +++ /dev/null @@ -1,38 +0,0 @@ -use anyhow::Result; -use apollo_router_scaffold::RouterAction; -use clap::Parser; -use clap::Subcommand; - -#[derive(Parser, Debug)] -struct Args { - #[clap(subcommand)] - action: Action, -} - -impl Args { - fn execute(&self) -> Result<()> { - self.action.execute() - } -} - -#[derive(Subcommand, Debug)] -enum Action { - /// Forward to router action - Router { - #[clap(subcommand)] - action: RouterAction, - }, -} - -impl Action { - fn execute(&self) -> Result<()> { - match self { - Action::Router { action } => action.execute(), - } - } -} - -fn main() -> Result<()> { - let args = Args::parse(); - args.execute() -} diff --git a/apollo-router-scaffold/src/lib.rs b/apollo-router-scaffold/src/lib.rs deleted file mode 100644 index dbc76bc877..0000000000 --- a/apollo-router-scaffold/src/lib.rs +++ /dev/null @@ -1,204 +0,0 @@ -mod plugin; - -use anyhow::Result; -use clap::Subcommand; - -use crate::plugin::PluginAction; - -#[derive(Subcommand, Debug)] -pub enum RouterAction { - /// Manage plugins - Plugin { - #[clap(subcommand)] - action: PluginAction, - }, -} - -impl RouterAction { - pub fn execute(&self) -> Result<()> { - match self { - RouterAction::Plugin { action } => action.execute(), - } - } -} - -#[cfg(test)] -mod test { - use std::collections::BTreeMap; - use std::env; - use std::path::Path; - use std::path::PathBuf; - use std::path::MAIN_SEPARATOR; - - use anyhow::Result; - use cargo_scaffold::Opts; - use cargo_scaffold::ScaffoldDescription; - use dircmp::Comparison; - use inflector::Inflector; - use similar::ChangeTag; - use similar::TextDiff; - - #[test] - // this test takes a while, I hope the above test name - // let users know they should not worry and wait a bit. - // Hang in there! - // Note that we configure nextest to use all threads for this test as invoking rustc will use all available CPU and cause timing tests to fail. - fn test_scaffold() { - let manifest_dir = PathBuf::from(std::env::var_os("CARGO_MANIFEST_DIR").unwrap()); - let repo_root = manifest_dir.parent().unwrap(); - let target_dir = repo_root.join("target"); - assert!(target_dir.exists()); - let temp_dir = tempfile::Builder::new() - .prefix("router_scaffold") - .tempdir() - .unwrap(); - let temp_dir_path = temp_dir.path(); - - let current_dir = env::current_dir().unwrap(); - // Scaffold the main project - let opts = Opts::builder(PathBuf::from("templates").join("base")) - .project_name("temp") - .target_dir(temp_dir_path) - .force(true); - ScaffoldDescription::new(opts) - .unwrap() - .scaffold_with_parameters(BTreeMap::from([( - "integration_test".to_string(), - toml::Value::String( - format!( - "{}{}", - current_dir - .parent() - .expect("current dir cannot be the root") - .to_str() - .expect("current dir must be convertable to string"), - // add / or \ depending on windows or unix - MAIN_SEPARATOR, - ) - // we need to double \ so they don't get interpreted as escape characters in TOML - .replace('\\', "\\\\"), - ), - )])) - .unwrap(); - - // Scaffold one of each type of plugin - scaffold_plugin(¤t_dir, temp_dir_path, "basic").unwrap(); - scaffold_plugin(¤t_dir, temp_dir_path, "auth").unwrap(); - scaffold_plugin(¤t_dir, temp_dir_path, "tracing").unwrap(); - std::fs::write( - temp_dir.path().join("src").join("plugins").join("mod.rs"), - "mod auth;\nmod basic;\nmod tracing;\n", - ) - .unwrap(); - - #[cfg(target_os = "windows")] - let left = ".\\scaffold-test\\"; - #[cfg(not(target_os = "windows"))] - let left = "./scaffold-test/"; - - let cmp = Comparison::default(); - let diff = cmp - .compare(left, temp_dir_path.to_str().unwrap()) - .expect("should compare"); - - let mut found = false; - if !diff.is_empty() { - println!("generated scaffolding project has changed:\n{:#?}", diff); - for file in diff.changed { - println!("file: {file:?}"); - let file = PathBuf::from(file.to_str().unwrap().strip_prefix(left).unwrap()); - - // we do not check the Cargo.toml files because they have differences due to import paths and workspace usage - if file == PathBuf::from("Cargo.toml") || file == PathBuf::from("xtask/Cargo.toml") - { - println!("skipping {}", file.to_str().unwrap()); - continue; - } - // we are not dealing with windows line endings - if file == PathBuf::from("src\\plugins\\mod.rs") { - println!("skipping {}", file.to_str().unwrap()); - continue; - } - - found = true; - diff_file(&PathBuf::from("./scaffold-test"), temp_dir_path, &file); - } - if found { - panic!(); - } - } - } - - fn scaffold_plugin(current_dir: &Path, dir_path: &Path, plugin_type: &str) -> Result<()> { - let opts = Opts::builder(PathBuf::from("templates").join("plugin")) - .project_name(plugin_type) - .target_dir(dir_path) - .append(true); - ScaffoldDescription::new(opts)?.scaffold_with_parameters(BTreeMap::from([ - ( - format!("type_{plugin_type}"), - toml::Value::String(plugin_type.to_string()), - ), - ( - "snake_name".to_string(), - toml::Value::String(plugin_type.to_snake_case()), - ), - ( - "pascal_name".to_string(), - toml::Value::String(plugin_type.to_pascal_case()), - ), - ( - "project_name".to_string(), - toml::Value::String("acme".to_string()), - ), - ( - "integration_test".to_string(), - toml::Value::String( - format!( - "{}{}", - current_dir - .parent() - .expect("current dir cannot be the root") - .to_str() - .expect("current dir must be convertable to string"), - // add / or \ depending on windows or unix - MAIN_SEPARATOR, - ) - // we need to double \ so they don't get interpreted as escape characters in TOML - .replace('\\', "\\\\"), - ), - ), - ]))?; - Ok(()) - } - - fn diff_file(left_folder: &Path, right_folder: &Path, file: &Path) { - println!("file changed: {}\n", file.to_str().unwrap()); - let left = std::fs::read_to_string(left_folder.join(file)).unwrap(); - let right = std::fs::read_to_string(right_folder.join(file)).unwrap(); - - let diff = TextDiff::from_lines(&left, &right); - - for change in diff.iter_all_changes() { - let sign = match change.tag() { - ChangeTag::Delete => "-", - ChangeTag::Insert => "+", - ChangeTag::Equal => " ", - }; - print!( - "{} {}|\t{}{}", - change - .old_index() - .map(|s| s.to_string()) - .unwrap_or("-".to_string()), - change - .new_index() - .map(|s| s.to_string()) - .unwrap_or("-".to_string()), - sign, - change - ); - } - println!("\n\n"); - } -} diff --git a/apollo-router-scaffold/src/plugin.rs b/apollo-router-scaffold/src/plugin.rs deleted file mode 100644 index 911e2a86c0..0000000000 --- a/apollo-router-scaffold/src/plugin.rs +++ /dev/null @@ -1,177 +0,0 @@ -use std::fs; -use std::path::Path; -use std::path::PathBuf; - -use anyhow::Result; -use cargo_scaffold::ScaffoldDescription; -use clap::Subcommand; -use inflector::Inflector; -use regex::Regex; -use toml::Value; - -#[derive(Subcommand, Debug)] -pub enum PluginAction { - /// Add a plugin. - Create { - /// The name of the plugin you want to add. - name: String, - - /// Optional override of the scaffold template path. - #[clap(long)] - template_override: Option, - }, - - /// Remove a plugin. - Remove { - /// The name of the plugin you want to remove. - name: String, - }, -} - -impl PluginAction { - pub fn execute(&self) -> Result<()> { - match self { - PluginAction::Create { - name, - template_override, - } => create_plugin(name, template_override), - PluginAction::Remove { name } => remove_plugin(name), - } - } -} - -fn create_plugin(name: &str, template_path: &Option) -> Result<()> { - let plugin_path = plugin_path(name); - if plugin_path.exists() { - return Err(anyhow::anyhow!("plugin '{}' already exists", name)); - } - - let cargo_toml = fs::read_to_string("Cargo.toml")?.parse::()?; - let project_name = cargo_toml - .get("package") - .unwrap_or(&toml::Value::String("default".to_string())) - .get("name") - .map(|n| n.to_string().to_snake_case()) - .unwrap_or_else(|| "default".to_string()); - - let version = get_router_version(cargo_toml); - - let opts = cargo_scaffold::Opts::builder(template_path.as_ref().unwrap_or(&PathBuf::from( - "https://github.com/apollographql/router.git", - ))) - .git_ref(version) - .repository_template_path( - PathBuf::from("apollo-router-scaffold") - .join("templates") - .join("plugin"), - ) - .target_dir(".") - .project_name(name) - .parameters(vec![format!("name={name}")]) - .append(true); - let desc = ScaffoldDescription::new(opts)?; - let mut params = desc.fetch_parameters_value()?; - params.insert( - "pascal_name".to_string(), - Value::String(name.to_pascal_case()), - ); - params.insert( - "snake_name".to_string(), - Value::String(name.to_snake_case()), - ); - params.insert( - "project_name".to_string(), - Value::String(project_name.to_snake_case()), - ); - - params.insert( - format!( - "type_{}", - params - .get("type") - .expect("type must have been set") - .as_str() - .expect("type must be a string") - ), - Value::Boolean(true), - ); - - desc.scaffold_with_parameters(params)?; - - let mod_path = mod_path(); - let mut mod_rs = if mod_path.exists() { - std::fs::read_to_string(&mod_path)? - } else { - "".to_string() - }; - - let snake_name = name.to_snake_case(); - let re = Regex::new(&format!(r"(?m)^mod {snake_name};$")).unwrap(); - if re.find(&mod_rs).is_none() { - mod_rs = format!("mod {snake_name};\n{mod_rs}"); - } - - std::fs::write(mod_path, mod_rs)?; - - println!( - "Plugin created at '{}'.\nRemember to add the plugin to your router.yaml to activate it.", - plugin_path.display() - ); - Ok(()) -} - -fn get_router_version(cargo_toml: Value) -> String { - match cargo_toml - .get("dependencies") - .cloned() - .unwrap_or_else(|| Value::Table(toml::value::Table::default())) - .get("apollo-router") - { - Some(Value::String(version)) => format!("v{version}"), - Some(Value::Table(table)) => { - if let Some(Value::String(branch)) = table.get("branch") { - format!("origin/{}", branch.clone()) - } else if let Some(Value::String(tag)) = table.get("tag") { - tag.clone() - } else if let Some(Value::String(rev)) = table.get("rev") { - rev.clone() - } else { - format!("v{}", std::env!("CARGO_PKG_VERSION")) - } - } - _ => format!("v{}", std::env!("CARGO_PKG_VERSION")), - } -} - -fn remove_plugin(name: &str) -> Result<()> { - let plugin_path = plugin_path(name); - let snake_name = name.to_snake_case(); - - std::fs::remove_file(&plugin_path)?; - - // Remove the mod; - let mod_path = mod_path(); - if Path::new(&mod_path).exists() { - let mut mod_rs = std::fs::read_to_string(&mod_path)?; - let re = Regex::new(&format!(r"(?m)^mod {snake_name};$")).unwrap(); - mod_rs = re.replace(&mod_rs, "").to_string(); - - std::fs::write(mod_path, mod_rs)?; - } - - println!( - "Plugin removed at '{}'. This is a best effort, and you may need to edit some files manually.", - plugin_path.display() - ); - Ok(()) -} - -fn mod_path() -> PathBuf { - PathBuf::from("src").join("plugins").join("mod.rs") -} - -fn plugin_path(name: &str) -> PathBuf { - PathBuf::from("src") - .join("plugins") - .join(format!("{}.rs", name.to_snake_case())) -} diff --git a/apollo-router-scaffold/templates/base/.cargo/config b/apollo-router-scaffold/templates/base/.cargo/config deleted file mode 100644 index 24a9882b48..0000000000 --- a/apollo-router-scaffold/templates/base/.cargo/config +++ /dev/null @@ -1,3 +0,0 @@ -[alias] -xtask = "run --package xtask --" -router = "run --package xtask -- router" diff --git a/apollo-router-scaffold/templates/base/.dockerignore b/apollo-router-scaffold/templates/base/.dockerignore deleted file mode 100644 index c2c4a5aa95..0000000000 --- a/apollo-router-scaffold/templates/base/.dockerignore +++ /dev/null @@ -1 +0,0 @@ -target/** \ No newline at end of file diff --git a/apollo-router-scaffold/templates/base/.gitignore b/apollo-router-scaffold/templates/base/.gitignore deleted file mode 100644 index bba7b53950..0000000000 --- a/apollo-router-scaffold/templates/base/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -/target/ -/.idea/ diff --git a/apollo-router-scaffold/templates/base/.scaffold.toml b/apollo-router-scaffold/templates/base/.scaffold.toml deleted file mode 100644 index 02fd1e20d9..0000000000 --- a/apollo-router-scaffold/templates/base/.scaffold.toml +++ /dev/null @@ -1,32 +0,0 @@ -[template] -name = "apollo-router" -author = "Apollo" -version = "0.1.0" - -exclude = [ - "./target" -] - -disable_templating = [ - "./scaffold/**/*" -] - -notes = """ -Created new Apollo Router project '{{name}}'. - -> Note: The Apollo Router is made available under the Elastic License v2.0 (ELv2). -> Read [our licensing page](https://www.apollographql.com/docs/resources/elastic-license-v2-faq/) for more details. -""" - -[hooks] -post = [ - "mv Cargo.template.toml Cargo.toml", - "mv xtask/Cargo.template.toml xtask/Cargo.toml", -] - -[parameters] -[parameters.name] -type = "string" -message = "What is the name of your new router project?" -required = true - diff --git a/apollo-router-scaffold/templates/base/Cargo.template.toml b/apollo-router-scaffold/templates/base/Cargo.template.toml deleted file mode 100644 index 6a203b1a17..0000000000 --- a/apollo-router-scaffold/templates/base/Cargo.template.toml +++ /dev/null @@ -1,39 +0,0 @@ -# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html -[package] -name = "{{name}}" -version = "0.1.0" -edition = "2021" - -[workspace] -members = [ - "xtask", -] - -[[bin]] -name = "router" -path = "src/main.rs" - -[dependencies] -anyhow = "1.0.58" -{{#if integration_test}} -apollo-router = { path ="{{integration_test}}apollo-router" } -{{else}} -{{#if branch}} -apollo-router = { git="https://github.com/apollographql/router.git", branch="{{branch}}" } -{{else}} -# Note if you update these dependencies then also update xtask/Cargo.toml -apollo-router = "1.59.1" -{{/if}} -{{/if}} -async-trait = "0.1.52" -schemars = "0.8.10" -serde = "1.0.149" -serde_json = "1.0.79" -tokio = { version = "1.17.0", features = ["full"] } -tower = { version = "0.4.12", features = ["full"] } -tracing = "0.1.37" - -# this makes build scripts and proc macros faster to compile -[profile.dev.build-override] -strip = "debuginfo" -incremental = false diff --git a/apollo-router-scaffold/templates/base/Dockerfile b/apollo-router-scaffold/templates/base/Dockerfile deleted file mode 100644 index 3d3263130e..0000000000 --- a/apollo-router-scaffold/templates/base/Dockerfile +++ /dev/null @@ -1,54 +0,0 @@ -# Use the rust build image from docker as our base -# renovate-automation: rustc version -FROM rust:1.83.0 as build - -# Set our working directory for the build -WORKDIR /usr/src/router - -# Update our build image and install required packages -RUN apt-get update -RUN apt-get -y install \ - npm \ - protobuf-compiler - -# Add rustfmt since build requires it -RUN rustup component add rustfmt - -# Copy the router source to our build environment -COPY . . - -# Build and install the custom binary -RUN cargo build --release - -# Make directories for config and schema -RUN mkdir -p /dist/config && \ - mkdir /dist/schema && \ - mv target/release/router /dist - -# Copy configuration for docker image -COPY router.yaml /dist/config.yaml - -FROM debian:bullseye-slim - -RUN apt-get update -RUN apt-get -y install \ - ca-certificates - -# Set labels for our image -LABEL org.opencontainers.image.authors="Apollo Graph, Inc. https://github.com/apollographql/router" -LABEL org.opencontainers.image.source="https://github.com/apollographql/router" - -# Copy in the required files from our build image -COPY --from=build --chown=root:root /dist /dist - -WORKDIR /dist - -ENV APOLLO_ROUTER_CONFIG_PATH="/dist/config.yaml" - -# Make sure we can run the router -RUN chmod 755 /dist/router - -USER router - -# Default executable is the router -ENTRYPOINT ["/dist/router"] diff --git a/apollo-router-scaffold/templates/base/README.md b/apollo-router-scaffold/templates/base/README.md deleted file mode 100644 index 98ec70eb24..0000000000 --- a/apollo-router-scaffold/templates/base/README.md +++ /dev/null @@ -1,120 +0,0 @@ -# Apollo Router project - -This generated project is set up to create a custom Apollo Router binary that may include plugins that you have written. - -> Note: The Apollo Router is made available under the Elastic License v2.0 (ELv2). -> Read [our licensing page](https://www.apollographql.com/docs/resources/elastic-license-v2-faq/) for more details. - -# Compile the router - -To create a debug build use the following command. -```bash -cargo build -``` -Your debug binary is now located in `target/debug/router` - -For production, you will want to create a release build. -```bash -cargo build --release -``` -Your release binary is now located in `target/release/router` - -# Run the Apollo Router - -1. Download the example schema - - ```bash - curl -sSL https://supergraph.demo.starstuff.dev/ > supergraph-schema.graphql - ``` - -2. Run the Apollo Router - - During development it is convenient to use `cargo run` to run the Apollo Router as it will - ```bash - cargo run -- --hot-reload --config router.yaml --supergraph supergraph-schema.graphql - ``` - -> If you are using managed federation you can set APOLLO_KEY and APOLLO_GRAPH_REF environment variables instead of specifying the supergraph as a file. - -# Create a plugin - -1. From within your project directory scaffold a new plugin - ```bash - cargo router plugin create hello_world - ``` -2. Select the type of plugin you want to scaffold: - ```bash - Select a plugin template: - > "basic" - "auth" - "tracing" - ``` - - The different templates are: - * basic - a barebones plugin. - * auth - a basic authentication plugin that could make an external call. - * tracing - a plugin that adds a custom span and a log message. - - Choose `basic`. - -4. Add the plugin to the `router.yaml` - ```yaml - plugins: - starstuff.hello_world: - message: "Starting my plugin" - ``` - -5. Run the Apollo Router and see your plugin start up - ```bash - cargo run -- --hot-reload --config router.yaml --supergraph supergraph-schema.graphql - ``` - - In your output you should see something like: - ```bash - 2022-05-21T09:16:33.160288Z INFO router::plugins::hello_world: Starting my plugin - ``` - -# Remove a plugin - -1. From within your project run the following command. It makes a best effort to remove the plugin, but your mileage may vary. - ```bash - cargo router plugin remove hello_world - ``` - -# Docker - -You can use the provided Dockerfile to build a release container. - -Make sure your router is configured to listen to `0.0.0.0` so you can query it from outside the container: - -```yml - supergraph: - listen: 0.0.0.0:4000 -``` - -Use your `APOLLO_KEY` and `APOLLO_GRAPH_REF` environment variables to run the router in managed federation. - - ```bash - docker build -t my_custom_router . - docker run -e APOLLO_KEY="your apollo key" -e APOLLO_GRAPH_REF="your apollo graph ref" -p 4000:4000 my_custom_router - ``` - -Otherwise add a `COPY` step to the Dockerfile, and edit the entrypoint: - -```Dockerfile -# Copy configuration for docker image -COPY router.yaml /dist/config.yaml -# Copy supergraph for docker image -COPY my_supergraph.graphql /dist/supergraph.graphql - -# [...] and change the entrypoint - -# Default executable is the router -ENTRYPOINT ["/dist/router", "-s", "/dist/supergraph.graphql"] -``` - -You can now build and run your custom router: - ```bash - docker build -t my_custom_router . - docker run -p 4000:4000 my_custom_router - ``` diff --git a/apollo-router-scaffold/templates/base/router.yaml b/apollo-router-scaffold/templates/base/router.yaml deleted file mode 100644 index 8411f80a8b..0000000000 --- a/apollo-router-scaffold/templates/base/router.yaml +++ /dev/null @@ -1,5 +0,0 @@ -# uncomment this section if you plan to use the Dockerfile -# supergraph: -# listen: 0.0.0.0:4000 -plugins: - # Add plugin configuration here diff --git a/apollo-router-scaffold/templates/base/rust-toolchain.toml b/apollo-router-scaffold/templates/base/rust-toolchain.toml deleted file mode 100644 index 8de91b2c20..0000000000 --- a/apollo-router-scaffold/templates/base/rust-toolchain.toml +++ /dev/null @@ -1,6 +0,0 @@ -# Note that the contents should be same as https://github.com/apollographql/router/blob/main/rust-toolchain.toml - -[toolchain] -# renovate-automation: rustc version -channel = "1.83.0" -components = ["rustfmt", "clippy"] diff --git a/apollo-router-scaffold/templates/base/src/main.rs b/apollo-router-scaffold/templates/base/src/main.rs deleted file mode 100644 index ca6699afe9..0000000000 --- a/apollo-router-scaffold/templates/base/src/main.rs +++ /dev/null @@ -1,7 +0,0 @@ -mod plugins; - -use anyhow::Result; - -fn main() -> Result<()> { - apollo_router::main() -} diff --git a/apollo-router-scaffold/templates/base/src/plugins/mod.rs b/apollo-router-scaffold/templates/base/src/plugins/mod.rs deleted file mode 100644 index 8b13789179..0000000000 --- a/apollo-router-scaffold/templates/base/src/plugins/mod.rs +++ /dev/null @@ -1 +0,0 @@ - diff --git a/apollo-router-scaffold/templates/base/xtask/Cargo.template.toml b/apollo-router-scaffold/templates/base/xtask/Cargo.template.toml deleted file mode 100644 index e54f595298..0000000000 --- a/apollo-router-scaffold/templates/base/xtask/Cargo.template.toml +++ /dev/null @@ -1,20 +0,0 @@ -[package] -name = "xtask" -edition = "2021" -publish = false -version = "0.1.0" - -[dependencies] -# This dependency should stay in line with your router version - -{{#if integration_test}} -apollo-router-scaffold = { path ="{{integration_test}}apollo-router-scaffold" } -{{else}} -{{#if branch}} -apollo-router-scaffold = { git="https://github.com/apollographql/router.git", branch="{{branch}}" } -{{else}} -apollo-router-scaffold = { git = "https://github.com/apollographql/router.git", tag = "v1.59.1" } -{{/if}} -{{/if}} -anyhow = "1.0.58" -clap = "4.0.32" diff --git a/apollo-router-scaffold/templates/base/xtask/src/main.rs b/apollo-router-scaffold/templates/base/xtask/src/main.rs deleted file mode 100644 index 211ded5255..0000000000 --- a/apollo-router-scaffold/templates/base/xtask/src/main.rs +++ /dev/null @@ -1,38 +0,0 @@ -use anyhow::Result; -use apollo_router_scaffold::RouterAction; -use clap::Parser; -use clap::Subcommand; - -#[derive(Parser, Debug)] -struct Args { - #[clap(subcommand)] - action: Action, -} - -impl Args { - fn execute(&self) -> Result<()> { - self.action.execute() - } -} - -#[derive(Subcommand, Debug)] -enum Action { - /// Forward to router action - Router { - #[clap(subcommand)] - action: RouterAction, - }, -} - -impl Action { - fn execute(&self) -> Result<()> { - match self { - Action::Router { action } => action.execute(), - } - } -} - -fn main() -> Result<()> { - let args = Args::parse(); - args.execute() -} diff --git a/apollo-router-scaffold/templates/plugin/.scaffold.toml b/apollo-router-scaffold/templates/plugin/.scaffold.toml deleted file mode 100644 index 09a3e558d6..0000000000 --- a/apollo-router-scaffold/templates/plugin/.scaffold.toml +++ /dev/null @@ -1,25 +0,0 @@ -[template] -name = "apollo-router-plugins" -author = "Apollo" -version = "0.1.0" - -exclude = [ - "./target" -] - -notes = """ -Created new plugin {{project_name}}.{{snake_name}} -Source: src/plugins/{{snake_name}}.rs. - -To use the plugin add it to router.yaml: - -plugins: - {{project_name}}.{{snake_name}}: - # Plugin configuration -""" - -[parameters] -[parameters.type] -type = "select" -message = "Select a plugin template" -values = ["basic", "auth", "tracing"] diff --git a/apollo-router-scaffold/templates/plugin/src/plugins/mod.rs b/apollo-router-scaffold/templates/plugin/src/plugins/mod.rs deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/apollo-router-scaffold/templates/plugin/src/plugins/{{snake_name}}.rs b/apollo-router-scaffold/templates/plugin/src/plugins/{{snake_name}}.rs deleted file mode 100644 index 2208f6eb3f..0000000000 --- a/apollo-router-scaffold/templates/plugin/src/plugins/{{snake_name}}.rs +++ /dev/null @@ -1,199 +0,0 @@ -{{#if type_auth}} -use std::ops::ControlFlow; - -use apollo_router::layers::ServiceBuilderExt; -{{/if}} -{{#if type_tracing}} -use apollo_router::layers::ServiceBuilderExt; -{{/if}} -use apollo_router::plugin::Plugin; -use apollo_router::plugin::PluginInit; -use apollo_router::register_plugin; -{{#if type_basic}} -use apollo_router::services::execution; -{{/if}} -{{#if type_basic}} -use apollo_router::services::router; -use apollo_router::services::subgraph; -{{/if}} -use apollo_router::services::supergraph; -use schemars::JsonSchema; -use serde::Deserialize; -use tower::BoxError; -{{#if type_auth}} -use tower::ServiceBuilder; -use tower::ServiceExt; -{{/if}} -{{#if type_tracing}} -use tower::ServiceBuilder; -use tower::ServiceExt; -{{/if}} - -#[derive(Debug)] -struct {{pascal_name}} { - #[allow(dead_code)] - configuration: Conf, -} - -#[derive(Debug, Default, Deserialize, JsonSchema)] -struct Conf { - // Put your plugin configuration here. It will automatically be deserialized from JSON. - // Always put some sort of config here, even if it is just a bool to say that the plugin is enabled, - // otherwise the yaml to enable the plugin will be confusing. - message: String, -} -{{#if type_basic}} -// This is a bare bones plugin that can be duplicated when creating your own. -#[async_trait::async_trait] -impl Plugin for {{pascal_name}} { - type Config = Conf; - - async fn new(init: PluginInit) -> Result { - tracing::info!("{}", init.config.message); - Ok({{pascal_name}} { - configuration: init.config, - }) - } - - // Delete this function if you are not customizing it. - fn router_service(&self, service: router::BoxService) -> router::BoxService { - // Always use service builder to compose your plugins. - // It provides off the shelf building blocks for your plugin. - // - // ServiceBuilder::new() - // .service(service) - // .boxed() - - // Returning the original service means that we didn't add any extra functionality at this point in the lifecycle. - service - } - - // Delete this function if you are not customizing it. - fn supergraph_service(&self, service: supergraph::BoxService) -> supergraph::BoxService { - // Always use service builder to compose your plugins. - // It provides off the shelf building blocks for your plugin. - // - // ServiceBuilder::new() - // .service(service) - // .boxed() - - // Returning the original service means that we didn't add any extra functionality for at this point in the lifecycle. - service - } - - // Delete this function if you are not customizing it. - fn execution_service(&self, service: execution::BoxService) -> execution::BoxService { - service - } - - // Delete this function if you are not customizing it. - fn subgraph_service(&self, _name: &str, service: subgraph::BoxService) -> subgraph::BoxService { - service - } -} -{{/if}} -{{#if type_auth}} -// This plugin is a skeleton for doing authentication that requires a remote call. -#[async_trait::async_trait] -impl Plugin for {{pascal_name}} { - type Config = Conf; - - async fn new(init: PluginInit) -> Result { - tracing::info!("{}", init.config.message); - Ok({{pascal_name}} { - configuration: init.config, - }) - } - - fn supergraph_service(&self, service: supergraph::BoxService) -> supergraph::BoxService { - ServiceBuilder::new() - .oneshot_checkpoint_async(|request: supergraph::Request| async { - // Do some async call here to auth, and decide if to continue or not. - Ok(ControlFlow::Continue(request)) - }) - .service(service) - .boxed() - } -} -{{/if}} -{{#if type_tracing}} -// This plugin adds a span and an error to the logs. -#[async_trait::async_trait] -impl Plugin for {{pascal_name}} { - type Config = Conf; - - async fn new(init: PluginInit) -> Result { - tracing::info!("{}", init.config.message); - Ok({{pascal_name}} { - configuration: init.config, - }) - } - - fn supergraph_service(&self, service: supergraph::BoxService) -> supergraph::BoxService { - ServiceBuilder::new() - .instrument(|_request| { - // Optionally take information from the request and insert it into the span as attributes - // See https://docs.rs/tracing/latest/tracing/ for more information - tracing::info_span!("my_custom_span") - }) - .map_request(|request| { - // Add a log message, this will appear within the context of the current span - tracing::error!("error detected"); - request - }) - .service(service) - .boxed() - } -} -{{/if}} - -// This macro allows us to use it in our plugin registry! -// register_plugin takes a group name, and a plugin name. -register_plugin!("{{project_name}}", "{{snake_name}}", {{pascal_name}}); - -#[cfg(test)] -mod tests { - use apollo_router::graphql; - use apollo_router::services::supergraph; - use apollo_router::TestHarness; - use tower::BoxError; - use tower::ServiceExt; - - #[tokio::test] - async fn basic_test() -> Result<(), BoxError> { - let test_harness = TestHarness::builder() - .configuration_json(serde_json::json!({ - "plugins": { - "{{project_name}}.{{snake_name}}": { - "message" : "Starting my plugin" - } - } - })) - .unwrap() - .build_router() - .await - .unwrap(); - let request = supergraph::Request::canned_builder().build().unwrap(); - let mut streamed_response = test_harness.oneshot(request.try_into()?).await?; - - let first_response: graphql::Response = serde_json::from_slice( - streamed_response - .next_response() - .await - .expect("couldn't get primary response")? - .to_vec() - .as_slice(), - ) - .unwrap(); - - assert!(first_response.data.is_some()); - - println!("first response: {:?}", first_response); - let next = streamed_response.next_response().await; - println!("next response: {:?}", next); - - // You could keep calling .next_response() until it yields None if you're expexting more parts. - assert!(next.is_none()); - Ok(()) - } -} diff --git a/apollo-router/Cargo.toml b/apollo-router/Cargo.toml index 5563e7d699..d3c2861e91 100644 --- a/apollo-router/Cargo.toml +++ b/apollo-router/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "apollo-router" -version = "1.59.1" +version = "2.0.0-preview.4" authors = ["Apollo Graph, Inc. "] repository = "https://github.com/apollographql/router/" documentation = "https://docs.rs/apollo-router" @@ -48,19 +48,21 @@ failfast = [] # and not yet ready for production use. telemetry_next = [] -# Allow Router to use feature from custom fork of Hyper until it is merged: -# https://github.com/hyperium/hyper/pull/3523 -hyper_header_limits = [] - # is set when ci builds take place. It allows us to disable some tests when CI is running on certain platforms. ci = [] +# Enables the HTTP snapshot server for testing +snapshot = ["axum-server"] + +[package.metadata.docs.rs] +features = ["docs_rs"] + [dependencies] +arc-swap = "1.6.0" access-json = "0.1.0" anyhow = "1.0.86" apollo-compiler.workspace = true -apollo-federation = { path = "../apollo-federation", version = "=1.59.1" } -arc-swap = "1.6.0" +apollo-federation = { path = "../apollo-federation", version = "=2.0.0-preview.4" } async-channel = "1.9.0" async-compression = { version = "0.4.6", features = [ "tokio", @@ -69,7 +71,9 @@ async-compression = { version = "0.4.6", features = [ "deflate", ] } async-trait.workspace = true -axum = { version = "0.6.20", features = ["headers", "json", "original-uri"] } +axum = { version = "0.8.1", features = ["http2"] } +axum-extra = { version = "0.10.0", features = [ "typed-header" ] } +axum-server = { version = "0.7.1", optional = true } base64 = "0.22.0" bloomfilter = "1.0.13" buildstructor = "0.5.4" @@ -93,17 +97,20 @@ dhat = { version = "0.3.3", optional = true } diff = "0.1.13" displaydoc = "0.2" flate2 = "1.0.30" -fred = { version = "7.1.2", features = ["enable-rustls"] } +fred = { version = "9.4.0", features = ["enable-rustls", "i-cluster"] } futures = { version = "0.3.30", features = ["thread-pool"] } graphql_client = "0.14.0" hex.workspace = true http.workspace = true -http-body = "0.4.6" +http-0_2 = { version = "0.2.12", package = "http" } +http-body = "1.0.1" +http-body-util = {version = "0.1.2" } heck = "0.5.0" humantime = "2.1.0" humantime-serde = "1.1.1" -hyper = { version = "0.14.31", features = ["server", "client", "stream"] } -hyper-rustls = { version = "0.24.2", features = ["http1", "http2"] } +hyper = { version = "1.5.1", features = ["full"] } +hyper-util = { version = "0.1.10", features = ["full"] } +hyper-rustls = { version = "0.27.3", features = ["http1", "http2", "rustls-native-certs"] } indexmap = { version = "2.2.6", features = ["serde"] } itertools = "0.13.0" jsonpath_lib = "0.3.0" @@ -117,7 +124,7 @@ maplit = "1.0.2" mediatype = "0.19.18" mockall = "0.13.0" mime = "0.3.17" -multer = "2.1.0" +multer = "3.1.0" multimap = "0.9.1" # Warning: part of the public API # To avoid tokio issues notify = { version = "6.1.1", default-features = false, features = [ @@ -180,11 +187,25 @@ proteus = "0.5.0" rand = "0.8.5" rhai = { version = "1.19.0", features = ["sync", "serde", "internals"] } regex = "1.10.5" -reqwest.workspace = true +reqwest = { version = "0.12.9", default-features = false, features = [ + "rustls-tls", + "rustls-tls-native-roots", + "gzip", + "json", + "stream", +] } + +reqwest-0_11 = { version = "0.11.27", default-features = false, features = [ + "rustls-tls", + "rustls-tls-native-roots", + "gzip", + "json", + "stream", +], package="reqwest" } rust-embed = { version = "8.4.0", features = ["include-exclude"] } -rustls = "0.21.12" -rustls-native-certs = "0.6.3" -rustls-pemfile = "1.0.4" +rustls = "0.23.19" +rustls-native-certs = "0.8.1" +rustls-pemfile = "2.2.0" schemars.workspace = true shellexpand = "3.1.0" sha2 = "0.10.8" @@ -203,25 +224,21 @@ thiserror = "1.0.61" tokio.workspace = true tokio-stream = { version = "0.1.15", features = ["sync", "net"] } tokio-util = { version = "0.7.11", features = ["net", "codec", "time"] } -tonic = { version = "0.9.2", features = [ +tonic = { version = "0.12.3", features = [ "transport", "tls", "tls-roots", "gzip", ] } + +tonic-0_9 = { version = "0.9.0", features = [ + "transport", + "tls", + "tls-roots", + "gzip", +], package = "tonic" } tower.workspace = true -tower-http = { version = "0.4.0", features = [ - "add-extension", - "trace", - "cors", - "compression-br", - "compression-deflate", - "compression-gzip", - "decompression-br", - "decompression-deflate", - "decompression-gzip", - "timeout", -] } +tower-http = { version = "0.6.2", features = ["full"] } tower-service = "0.3.2" tracing = "0.1.40" tracing-core = "0.1.32" @@ -233,12 +250,13 @@ uuid = { version = "1.9.1", features = ["serde", "v4"] } yaml-rust = "0.4.5" wiremock = "0.5.22" wsl = "0.1.0" -tokio-tungstenite = { version = "0.20.1", features = [ +tokio-tungstenite = { version = "0.26.1", features = [ "rustls-tls-native-roots", ] } -tokio-rustls = "0.24.1" +tokio-rustls = "0.26.0" hickory-resolver = "0.24.1" -http-serde = "1.1.3" +http-serde = "2.1.1" +http-serde-1_1 = { version="1.1.3", package = "http-serde" } hmac = "0.12.1" parking_lot = { version = "0.12.3", features = ["serde"] } memchr = "2.7.4" @@ -246,11 +264,12 @@ brotli = "3.5.0" zstd = "0.13.1" zstd-safe = "7.1.0" # note: AWS dependencies should always use the same version -aws-sigv4 = "1.1.6" -aws-credential-types = "1.1.6" -aws-config = "1.1.6" -aws-types = "1.1.6" -aws-smithy-runtime-api = { version = "1.1.6", features = ["client"] } +# note: hyper 1.0 update seems to mean this isn't true... +aws-sigv4 = "1.2.6" +aws-credential-types = "1.2.1" # XXX: This is the latest version +aws-config = "1.5.5" +aws-types = "1.3.3" +aws-smithy-runtime-api = { version = "1.7.3", features = ["client"] } aws-sdk-sso = "=1.39.0" # TODO: unpin when on Rust 1.78+ aws-sdk-ssooidc = "=1.40.0" # TODO: unpin when on Rust 1.78+ aws-sdk-sts = "=1.39.0" # TODO: unpin when on Rust 1.78+ @@ -263,6 +282,7 @@ bytesize = { version = "1.3.0", features = ["serde"] } ahash = "0.8.11" itoa = "1.0.9" ryu = "1.0.15" +form_urlencoded = "1.2.1" apollo-environment-detector = "0.1.0" [target.'cfg(macos)'.dependencies] @@ -270,7 +290,7 @@ uname = "0.1.1" [target.'cfg(unix)'.dependencies] uname = "0.1.1" -hyperlocal = { version = "0.8.0", default-features = false, features = [ +hyperlocal = { version = "0.9.1", default-features = false, features = [ "client", ] } @@ -278,14 +298,10 @@ hyperlocal = { version = "0.8.0", default-features = false, features = [ tikv-jemallocator = "0.6.0" [dev-dependencies] -axum = { version = "0.6.20", features = [ - "headers", - "json", - "original-uri", - "ws", -] } +axum = { version = "0.8.1", features = ["http2", "ws"] } +axum-server = "0.7.1" ecdsa = { version = "0.16.9", features = ["signing", "pem", "pkcs8"] } -fred = { version = "7.1.2", features = ["enable-rustls", "mocks"] } +fred = { version = "9.4.0", features = ["enable-rustls", "mocks", "i-cluster"] } futures-test = "0.3.30" insta.workspace = true maplit = "1.0.2" @@ -303,8 +319,9 @@ opentelemetry-proto = { version = "0.5.0", features = [ ] } opentelemetry-datadog = { version = "0.8.0", features = ["reqwest-client"] } p256 = "0.13.2" +pretty_assertions = "1.4.0" rand_core = "0.6.4" -reqwest = { version = "0.11.0", default-features = false, features = [ +reqwest = { version = "0.12.9", default-features = false, features = [ "json", "multipart", "stream", @@ -334,18 +351,19 @@ tracing-test = "0.2.5" walkdir = "2.5.0" wiremock = "0.5.22" libtest-mimic = "0.7.3" +rstest = "0.22.0" [target.'cfg(target_os = "linux")'.dev-dependencies] rstack = { version = "0.3.3", features = ["dw"], default-features = false } [target.'cfg(unix)'.dev-dependencies] -hyperlocal = { version = "0.8.0", default-features = false, features = [ +hyperlocal = { version = "0.9.1", default-features = false, features = [ "client", "server", ] } [build-dependencies] -tonic-build = "0.9.2" +tonic-build = "0.12.3" basic-toml = "0.1.9" serde_json.workspace = true @@ -367,6 +385,12 @@ name = "samples" path = "tests/samples_tests.rs" harness = false +[[bin]] +name = "snapshot" +path = "src/test_harness/http_snapshot_main.rs" +test = false +required-features = ["snapshot"] + [[bench]] name = "huge_requests" harness = false diff --git a/apollo-router/benches/deeply_nested.rs b/apollo-router/benches/deeply_nested.rs index c99f2ce99d..5d05681b78 100644 --- a/apollo-router/benches/deeply_nested.rs +++ b/apollo-router/benches/deeply_nested.rs @@ -6,8 +6,12 @@ #![allow(clippy::single_char_add_str)] // don’t care use std::fmt::Write; +use std::time::Duration; -use futures::stream::StreamExt; +use apollo_router::services::router::body::RouterBody; +use http_body_util::BodyExt; +use http_body_util::Full; +use hyper_util::rt::TokioExecutor; use serde_json_bytes::Value; use tokio::io::AsyncBufReadExt; use tokio::process::Command; @@ -48,7 +52,7 @@ async fn main() { }}; } - let _subgraph = spawn_subgraph(); + let _subgraph = spawn_subgraph().await; let graphql_recursion_limit = 5_000; let _router = spawn_router(graphql_recursion_limit).await; @@ -133,14 +137,16 @@ async fn graphql_client(nesting_level: usize) -> Result { let request = http::Request::post(format!("http://127.0.0.1:{SUPERGRAPH_PORT}")) .header("content-type", "application/json") .header("fibonacci-iterations", nesting_level) - .body(json.into()) + .body(json) .unwrap(); - let client = hyper::Client::new(); + let client = hyper_util::client::legacy::Client::builder(TokioExecutor::new()).build_http(); let mut response = client.request(request).await.map_err(|e| e.to_string())?; - let body = hyper::body::to_bytes(response.body_mut()) + let body = response + .body_mut() + .collect() .await .map_err(|e| e.to_string())?; - let json = serde_json::from_slice::(&body).map_err(|e| e.to_string())?; + let json = serde_json::from_slice::(&body.to_bytes()).map_err(|e| e.to_string())?; if let Some(errors) = json.get("errors") { if !errors.is_null() { return Err(errors.to_string()); @@ -149,29 +155,56 @@ async fn graphql_client(nesting_level: usize) -> Result { Ok(json.get("data").cloned().unwrap_or(Value::Null)) } -fn spawn_subgraph() -> ShutdownOnDrop { - let (tx, rx) = tokio::sync::oneshot::channel::<()>(); +async fn spawn_subgraph() -> ShutdownOnDrop { + let (tx, mut rx) = tokio::sync::mpsc::channel::<()>(2); let shutdown_on_drop = ShutdownOnDrop(Some(tx)); - let service = hyper::service::make_service_fn(|_| async { - Ok::<_, hyper::Error>(hyper::service::service_fn(subgraph)) - }); - let server = hyper::Server::bind(&([127, 0, 0, 1], SUBGRAPH_PORT).into()) - .serve(service) - .with_graceful_shutdown(async { - let _ = rx.await; - }); + let listener = tokio::net::TcpListener::bind(("127.0.0.1", SUBGRAPH_PORT)) + .await + .unwrap(); + let server = hyper_util::server::conn::auto::Builder::new(TokioExecutor::new()); + let graceful = hyper_util::server::graceful::GracefulShutdown::new(); + tokio::spawn(async move { - if let Err(e) = server.await { - eprintln!("server error: {}", e); + loop { + tokio::select! { + conn = listener.accept() => { + let (stream, peer_addr) = conn.unwrap(); + let stream = hyper_util::rt::TokioIo::new(Box::pin(stream)); + let conn = server + .serve_connection_with_upgrades(stream, hyper::service::service_fn(subgraph)); + let conn = graceful.watch(conn.into_owned()); + + tokio::spawn(async move { + if let Err(err) = conn.await { + eprintln!("connection error: {}", err); + } + eprintln!("connection dropped: {}", peer_addr); + }); + } + _ = rx.recv() => { + drop(listener); + break; + } + } + } + + tokio::select! { + _ = graceful.shutdown() => { + eprintln!("Gracefully shutdown!"); + }, + _ = tokio::time::sleep(Duration::from_secs(5)) => { + eprintln!("Waited 10 seconds for graceful shutdown, aborting..."); + } } }); + shutdown_on_drop } async fn subgraph( - request: http::Request, -) -> Result, hyper::Error> { + request: http::Request, +) -> Result, hyper::Error> { let nesting_level = request .headers() .get("fibonacci-iterations") @@ -183,11 +216,12 @@ async fn subgraph( // Read the request body and prompty ignore it request .into_body() - .for_each(|chunk| { - let _: &[u8] = &chunk.unwrap(); - async {} - }) - .await; + .collect() + .await + .unwrap() + .to_bytes() + .into_iter() + .for_each(|_chunk| {}); // Assume we got a GraphQL request with that many nested selection sets let mut json = String::from(r#"{"data":{"value":0"#); let mut a = 1; @@ -203,7 +237,11 @@ async fn subgraph( json.push_str("}"); } json.push_str("}}"); - let mut response = http::Response::new(hyper::Body::from(json)); + let mut response = http::Response::new( + Full::new(json.into()) + .map_err(|never| match never {}) + .boxed_unsync(), + ); let application_json = hyper::header::HeaderValue::from_static("application/json"); response .headers_mut() @@ -211,12 +249,12 @@ async fn subgraph( Ok(response) } -struct ShutdownOnDrop(Option>); +struct ShutdownOnDrop(Option>); impl Drop for ShutdownOnDrop { fn drop(&mut self) { if let Some(tx) = self.0.take() { - let _ = tx.send(()); + drop(tx.send(())); } } } diff --git a/apollo-router/benches/huge_requests.rs b/apollo-router/benches/huge_requests.rs index 0a5a4b23c0..0cd284d156 100644 --- a/apollo-router/benches/huge_requests.rs +++ b/apollo-router/benches/huge_requests.rs @@ -1,6 +1,9 @@ use std::time::Duration; -use futures::stream::StreamExt; +use apollo_router::services::router::body::RouterBody; +use http_body_util::BodyExt; +use http_body_util::Full; +use hyper_util::rt::TokioExecutor; use tokio::io::AsyncBufReadExt; use tokio::process::Command; @@ -119,14 +122,15 @@ async fn graphql_client(string_variable_bytes: usize) -> Duration { }); let request = http::Request::post(format!("http://127.0.0.1:{SUPERGRAPH_PORT}")) .header("content-type", "application/json") - .body(serde_json::to_string(&graphql_request).unwrap().into()) + .body(serde_json::to_string(&graphql_request).unwrap()) .unwrap(); - let client = hyper::Client::new(); + let client = hyper_util::client::legacy::Client::builder(TokioExecutor::new()).build_http(); let start_time = std::time::Instant::now(); let result = client.request(request).await; let latency = start_time.elapsed(); let mut response = result.unwrap(); - let body = hyper::body::to_bytes(response.body_mut()).await.unwrap(); + let body = response.body_mut().collect().await.unwrap(); + let body = body.to_bytes(); assert_eq!( String::from_utf8_lossy(&body), r#"{"data":{"upload":true}}"# @@ -139,47 +143,78 @@ async fn graphql_client(string_variable_bytes: usize) -> Duration { } async fn spawn_subgraph() -> ShutdownOnDrop { - let (tx, rx) = tokio::sync::oneshot::channel::<()>(); + let (tx, mut rx) = tokio::sync::mpsc::channel::<()>(2); let shutdown_on_drop = ShutdownOnDrop(Some(tx)); - let service = hyper::service::make_service_fn(|_| async { - Ok::<_, hyper::Error>(hyper::service::service_fn(subgraph)) - }); - let server = hyper::Server::bind(&([127, 0, 0, 1], SUBGRAPH_PORT).into()) - .serve(service) - .with_graceful_shutdown(async { - let _ = rx.await; - }); + let listener = tokio::net::TcpListener::bind(("127.0.0.1", SUBGRAPH_PORT)) + .await + .unwrap(); + let server = hyper_util::server::conn::auto::Builder::new(TokioExecutor::new()); + let graceful = hyper_util::server::graceful::GracefulShutdown::new(); + tokio::spawn(async move { - if let Err(e) = server.await { - eprintln!("server error: {}", e); + loop { + tokio::select! { + conn = listener.accept() => { + let (stream, peer_addr) = conn.unwrap(); + let stream = hyper_util::rt::TokioIo::new(Box::pin(stream)); + let conn = server + .serve_connection_with_upgrades(stream, hyper::service::service_fn(subgraph)); + let conn = graceful.watch(conn.into_owned()); + + tokio::spawn(async move { + if let Err(err) = conn.await { + eprintln!("connection error: {}", err); + } + eprintln!("connection dropped: {}", peer_addr); + }); + } + _ = rx.recv() => { + drop(listener); + break; + } + } + } + + tokio::select! { + _ = graceful.shutdown() => { + eprintln!("Gracefully shutdown!"); + }, + _ = tokio::time::sleep(Duration::from_secs(5)) => { + eprintln!("Waited 10 seconds for graceful shutdown, aborting..."); + } } }); + shutdown_on_drop } async fn subgraph( - request: http::Request, -) -> Result, hyper::Error> { + request: http::Request, +) -> Result, hyper::Error> { // Read the request body and prompty ignore it request .into_body() - .for_each(|chunk| { - let _: &[u8] = &chunk.unwrap(); - async {} - }) - .await; + .collect() + .await? + .to_bytes() + .iter() + .for_each(|_chunk| {}); // Assume we got a GraphQL request with `mutation Mutation { upload($some_string) }` let graphql_response = r#"{"data":{"upload":true}}"#; - Ok(http::Response::new(hyper::Body::from(graphql_response))) + Ok::<_, hyper::Error>(http::Response::new( + Full::new(graphql_response.as_bytes().into()) + .map_err(|never| match never {}) + .boxed_unsync(), + )) } -struct ShutdownOnDrop(Option>); +struct ShutdownOnDrop(Option>); impl Drop for ShutdownOnDrop { fn drop(&mut self) { if let Some(tx) = self.0.take() { - let _ = tx.send(()); + drop(tx.send(())); } } } diff --git a/apollo-router/build/studio.rs b/apollo-router/build/studio.rs index 2975f439a4..3d4431ef04 100644 --- a/apollo-router/build/studio.rs +++ b/apollo-router/build/studio.rs @@ -50,7 +50,7 @@ pub fn main() -> Result<(), Box> { .type_attribute(".", "#[derive(serde::Serialize)]") .type_attribute("StatsContext", "#[derive(Eq, Hash)]") .emit_rerun_if_changed(false) - .compile(&[reports_out], &[&out_dir])?; + .compile_protos(&[reports_out], &[&out_dir])?; Ok(()) } diff --git a/apollo-router/feature_discussions.json b/apollo-router/feature_discussions.json index 0c044e2219..f0dea9a7f4 100644 --- a/apollo-router/feature_discussions.json +++ b/apollo-router/feature_discussions.json @@ -1,9 +1,8 @@ { "experimental": { - "experimental_response_trace_id": "https://github.com/apollographql/router/discussions/2147", - "experimental_when_header": "https://github.com/apollographql/router/discussions/1961" + "experimental_response_trace_id": "https://github.com/apollographql/router/discussions/2147" }, "preview": { "preview_entity_cache": "https://github.com/apollographql/router/discussions/4592" } -} \ No newline at end of file +} diff --git a/apollo-router/src/axum_factory/axum_http_server_factory.rs b/apollo-router/src/axum_factory/axum_http_server_factory.rs index df6d9743d4..997abe353a 100644 --- a/apollo-router/src/axum_factory/axum_http_server_factory.rs +++ b/apollo-router/src/axum_factory/axum_http_server_factory.rs @@ -5,10 +5,8 @@ use std::sync::atomic::AtomicBool; use std::sync::atomic::AtomicU64; use std::sync::atomic::Ordering; use std::sync::Arc; -use std::time::Duration; use std::time::Instant; -use axum::error_handling::HandleErrorLayer; use axum::extract::Extension; use axum::extract::State; use axum::http::StatusCode; @@ -24,13 +22,12 @@ use http::header::ACCEPT_ENCODING; use http::header::CONTENT_ENCODING; use http::HeaderValue; use http::Request; -use http_body::combinators::UnsyncBoxBody; -use hyper::server::conn::Http; -use hyper::Body; use itertools::Itertools; use multimap::MultiMap; +use once_cell::sync::Lazy; use opentelemetry_api::metrics::MeterProvider as _; use opentelemetry_api::metrics::ObservableGauge; +use regex::Regex; use serde::Serialize; use serde_json::json; #[cfg(unix)] @@ -40,9 +37,7 @@ use tokio_rustls::TlsAcceptor; use tower::layer::layer_fn; use tower::service_fn; use tower::BoxError; -use tower::ServiceBuilder; use tower::ServiceExt; -use tower_http::decompression::DecompressionBody; use tower_http::trace::TraceLayer; use tracing::instrument::WithSubscriber; use tracing::Instrument; @@ -68,7 +63,6 @@ use crate::plugins::telemetry::SpanMode; use crate::router::ApolloRouterError; use crate::router_factory::Endpoint; use crate::router_factory::RouterFactory; -use crate::services::http::service::BodyStream; use crate::services::router; use crate::uplink::license_enforcement::LicenseState; use crate::uplink::license_enforcement::APOLLO_ROUTER_LICENSE_EXPIRED; @@ -76,6 +70,9 @@ use crate::uplink::license_enforcement::LICENSE_EXPIRED_SHORT_MESSAGE; use crate::Context; static ACTIVE_SESSION_COUNT: AtomicU64 = AtomicU64::new(0); +static BARE_WILDCARD_PATH_REGEX: Lazy = Lazy::new(|| { + Regex::new(r"^/\{\*[^/]+\}$").expect("this regex to check wildcard paths is valid") +}); fn session_count_instrument() -> ObservableGauge { let meter = meter_provider().meter("apollo/router"); @@ -195,8 +192,10 @@ where tracing::trace!(?health, request = ?req.router_request, "health check"); async move { Ok(router::Response { - response: http::Response::builder().status(status_code).body::( - serde_json::to_vec(&health).map_err(BoxError::from)?.into(), + response: http::Response::builder().status(status_code).body( + router::body::from_bytes( + serde_json::to_vec(&health).map_err(BoxError::from)?, + ), )?, context: req.context, }) @@ -313,25 +312,12 @@ impl HttpServerFactory for AxumHttpServerFactory { let actual_main_listen_address = main_listener .local_addr() .map_err(ApolloRouterError::ServerCreationError)?; - let mut http_config = Http::new(); - http_config.http1_keep_alive(true); - http_config.http1_header_read_timeout(Duration::from_secs(10)); - - #[cfg(feature = "hyper_header_limits")] - if let Some(max_headers) = configuration.limits.http1_max_request_headers { - http_config.http1_max_headers(max_headers); - } - - if let Some(max_buf_size) = configuration.limits.http1_max_request_buf_size { - http_config.max_buf_size(max_buf_size.as_u64() as usize); - } let (main_server, main_shutdown_sender) = serve_router_on_listen_addr( main_listener, - actual_main_listen_address.clone(), all_routers.main.1, - true, - http_config.clone(), + configuration.limits.http1_max_request_headers, + configuration.limits.http1_max_request_buf_size, all_connections_stopped_sender.clone(), ); @@ -368,10 +354,9 @@ impl HttpServerFactory for AxumHttpServerFactory { .map(|((listen_addr, listener), router)| { let (server, shutdown_sender) = serve_router_on_listen_addr( listener, - listen_addr.clone(), router, - false, - http_config.clone(), + configuration.limits.http1_max_request_headers, + configuration.limits.http1_max_request_buf_size, all_connections_stopped_sender.clone(), ); ( @@ -458,10 +443,6 @@ pub(crate) fn span_mode(configuration: &Configuration) -> SpanMode { .unwrap_or_default() } -async fn decompression_error(_error: BoxError) -> axum::response::Response { - (StatusCode::BAD_REQUEST, "cannot decompress request body").into_response() -} - fn main_endpoint( service_factory: RF, configuration: &Configuration, @@ -476,14 +457,15 @@ where })?; let span_mode = span_mode(configuration); - let decompression = ServiceBuilder::new() - .layer(HandleErrorLayer::<_, ()>::new(decompression_error)) - .layer( - tower_http::decompression::RequestDecompressionLayer::new() - .br(true) - .gzip(true) - .deflate(true), - ); + // XXX(@goto-bus-stop): in hyper 0.x, we required a HandleErrorLayer around this, + // to turn errors from decompression into an axum error response. Now, + // `RequestDecompressionLayer` appears to preserve(?) the error type from the inner service? + // So maybe we don't need this anymore? But I don't understand what happens to an error *caused + // by decompression* (such as an invalid compressed data stream). + let decompression = tower_http::decompression::RequestDecompressionLayer::new() + .br(true) + .gzip(true) + .deflate(true); let mut main_route = main_router::(configuration) .layer(decompression) .layer(middleware::from_fn_with_state( @@ -518,7 +500,7 @@ where Ok(ListenAddrAndRouter(listener, route)) } -async fn metrics_handler(request: Request, next: Next) -> Response { +async fn metrics_handler(request: Request, next: Next) -> Response { let resp = next.run(request).await; u64_counter!( "apollo.router.operations", @@ -529,22 +511,15 @@ async fn metrics_handler(request: Request, next: Next) -> Response { resp } -async fn license_handler( +async fn license_handler( State((license, start, delta)): State<(LicenseState, Instant, Arc)>, - request: Request, - next: Next, + request: Request, + next: Next, ) -> Response { if matches!( license, LicenseState::LicensedHalt | LicenseState::LicensedWarn ) { - u64_counter!( - "apollo_router_http_requests_total", - "Total number of HTTP requests made.", - 1, - status = StatusCode::INTERNAL_SERVER_ERROR.as_u16() as i64, - error = LICENSE_EXPIRED_SHORT_MESSAGE - ); // This will rate limit logs about license to 1 a second. // The way it works is storing the delta in seconds from a starting instant. // If the delta is over one second from the last time we logged then try and do a compare_exchange and if successfull log. @@ -571,73 +546,36 @@ async fn license_handler( if matches!(license, LicenseState::LicensedHalt) { http::Response::builder() .status(StatusCode::INTERNAL_SERVER_ERROR) - .body(UnsyncBoxBody::default()) + .body(axum::body::Body::default()) .expect("canned response must be valid") } else { next.run(request).await } } -pub(super) fn main_router( - configuration: &Configuration, -) -> axum::Router<(), DecompressionBody> +#[derive(Clone)] +struct HandlerOptions { + early_cancel: bool, + experimental_log_on_broken_pipe: bool, +} + +pub(super) fn main_router(configuration: &Configuration) -> axum::Router<()> where RF: RouterFactory, { - let early_cancel = configuration.supergraph.early_cancel; - let experimental_log_on_broken_pipe = configuration.supergraph.experimental_log_on_broken_pipe; let mut router = Router::new().route( &configuration.supergraph.sanitized_path(), - get({ - move |Extension(service): Extension, request: Request>| { - handle_graphql( - service.create().boxed(), - early_cancel, - experimental_log_on_broken_pipe, - request, - ) - } - }) - .post({ - move |Extension(service): Extension, request: Request>| { - handle_graphql( - service.create().boxed(), - early_cancel, - experimental_log_on_broken_pipe, - request, - ) - } - }), + get(handle_graphql::).post(handle_graphql::), ); - if configuration.supergraph.path == "/*" { - router = router.route( - "/", - get({ - move |Extension(service): Extension, - request: Request>| { - handle_graphql( - service.create().boxed(), - early_cancel, - experimental_log_on_broken_pipe, - request, - ) - } - }) - .post({ - move |Extension(service): Extension, - request: Request>| { - handle_graphql( - service.create().boxed(), - early_cancel, - experimental_log_on_broken_pipe, - request, - ) - } - }), - ); + if BARE_WILDCARD_PATH_REGEX.is_match(configuration.supergraph.path.as_str()) { + router = router.route("/", get(handle_graphql::).post(handle_graphql::)); } + router = router.route_layer(Extension(HandlerOptions { + early_cancel: configuration.supergraph.early_cancel, + experimental_log_on_broken_pipe: configuration.supergraph.experimental_log_on_broken_pipe, + })); // Tie the lifetime of the session count instrument to the lifetime of the router // by moving it into a no-op layer. let instrument = session_count_instrument(); @@ -649,17 +587,18 @@ where router } -async fn handle_graphql( - service: router::BoxService, - early_cancel: bool, - experimental_log_on_broken_pipe: bool, - http_request: Request>, +async fn handle_graphql( + Extension(options): Extension, + Extension(service_factory): Extension, + http_request: Request, ) -> impl IntoResponse { let _guard = ActiveSessionCountGuard::start(); - let (parts, body) = http_request.into_parts(); - - let http_request = http::Request::from_parts(parts, Body::wrap_stream(BodyStream::new(body))); + let HandlerOptions { + early_cancel, + experimental_log_on_broken_pipe, + } = options; + let service = service_factory.create(); let request: router::Request = http_request.into(); let context = request.context.clone(); @@ -715,7 +654,7 @@ async fn handle_graphql( CONTENT_ENCODING, HeaderValue::from_static(compressor.content_encoding()), ); - Body::wrap_stream(compressor.process(body.into())) + router::body::from_result_stream(compressor.process(body)) } }; @@ -843,7 +782,9 @@ mod tests { .uri("/") .header(ACCEPT, "application/json") .header(CONTENT_TYPE, "application/json") - .body(hyper::Body::from(r#"{"query":"query { me { name }}"}"#)) + .body(router::body::from_bytes( + r#"{"query":"query { me { name }}"}"#, + )) .unwrap(), ), ) @@ -877,7 +818,9 @@ mod tests { .uri("/") .header(ACCEPT, "application/json") .header(CONTENT_TYPE, "application/json") - .body(hyper::Body::from(r#"{"query":"query { me { name }}"}"#)) + .body(router::body::from_bytes( + r#"{"query":"query { me { name }}"}"#, + )) .unwrap(), ), ) diff --git a/apollo-router/src/axum_factory/compression/mod.rs b/apollo-router/src/axum_factory/compression/mod.rs index feef4f5a0a..00c1cb0352 100644 --- a/apollo-router/src/axum_factory/compression/mod.rs +++ b/apollo-router/src/axum_factory/compression/mod.rs @@ -70,11 +70,12 @@ impl Compressor { pub(crate) fn process( mut self, - mut stream: RouterBody, + body: RouterBody, ) -> impl Stream> where { let (tx, rx) = mpsc::channel(10); + let mut stream = http_body_util::BodyDataStream::new(body); tokio::task::spawn( async move { while let Some(data) = stream.next().await { @@ -214,17 +215,20 @@ mod tests { use tokio::io::AsyncWriteExt; use super::*; + use crate::services::router; + use crate::services::router::body::{self}; #[tokio::test] async fn finish() { let compressor = Compressor::new(["gzip"].into_iter()).unwrap(); let mut rng = rand::thread_rng(); - let body: RouterBody = std::iter::repeat(()) - .map(|_| rng.gen_range(0u8..3)) - .take(5000) - .collect::>() - .into(); + let body: RouterBody = body::from_bytes( + std::iter::repeat(()) + .map(|_| rng.gen_range(0u8..3)) + .take(5000) + .collect::>(), + ); let mut stream = compressor.process(body); let mut decoder = GzipDecoder::new(Vec::new()); @@ -244,7 +248,7 @@ mod tests { async fn small_input() { let compressor = Compressor::new(["gzip"].into_iter()).unwrap(); - let body: RouterBody = vec![0u8, 1, 2, 3].into(); + let body: RouterBody = body::from_bytes(vec![0u8, 1, 2, 3]); let mut stream = compressor.process(body); let mut decoder = GzipDecoder::new(Vec::new()); @@ -264,7 +268,8 @@ mod tests { #[tokio::test] async fn gzip_header_writing() { let compressor = Compressor::new(["gzip"].into_iter()).unwrap(); - let body: RouterBody = r#"{"data":{"me":{"id":"1","name":"Ada Lovelace"}}}"#.into(); + let body: RouterBody = + body::from_bytes(r#"{"data":{"me":{"id":"1","name":"Ada Lovelace"}}}"#); let mut stream = compressor.process(body); let _ = stream.next().await.unwrap().unwrap(); @@ -279,15 +284,15 @@ content-type: application/json {"data":{"allProducts":[{"sku":"federation","id":"apollo-federation"},{"sku":"studio","id":"apollo-studio"},{"sku":"client","id":"apollo-client"}]},"hasNext":true} --graphql "#; + let deferred_response = r#"content-type: application/json {"hasNext":false,"incremental":[{"data":{"dimensions":{"size":"1"},"variation":{"id":"OSS","name":"platform"}},"path":["allProducts",0]},{"data":{"dimensions":{"size":"1"},"variation":{"id":"platform","name":"platform-name"}},"path":["allProducts",1]},{"data":{"dimensions":{"size":"1"},"variation":{"id":"OSS","name":"client"}},"path":["allProducts",2]}]} --graphql-- "#; - let compressor = Compressor::new(["gzip"].into_iter()).unwrap(); - let body: RouterBody = RouterBody::wrap_stream(stream::iter(vec![ + let body: RouterBody = router::body::from_result_stream(stream::iter(vec![ Ok::<_, BoxError>(Bytes::from(primary_response)), Ok(Bytes::from(deferred_response)), ])); diff --git a/apollo-router/src/axum_factory/listeners.rs b/apollo-router/src/axum_factory/listeners.rs index d691d84c85..8795822ac7 100644 --- a/apollo-router/src/axum_factory/listeners.rs +++ b/apollo-router/src/axum_factory/listeners.rs @@ -3,19 +3,20 @@ use std::collections::HashMap; use std::collections::HashSet; use std::sync::atomic::AtomicBool; -use std::sync::atomic::AtomicU64; use std::sync::atomic::Ordering; use std::sync::Arc; use std::time::Duration; use axum::response::*; use axum::Router; +use bytesize::ByteSize; use futures::channel::oneshot; use futures::prelude::*; -use hyper::server::conn::Http; +use hyper_util::rt::TokioExecutor; +use hyper_util::rt::TokioIo; +use hyper_util::rt::TokioTimer; +use hyper_util::server::conn::auto::Builder; use multimap::MultiMap; -use opentelemetry::metrics::MeterProvider; -use opentelemetry::KeyValue; #[cfg(unix)] use tokio::net::UnixListener; use tokio::sync::mpsc; @@ -28,29 +29,12 @@ use crate::axum_factory::ENDPOINT_CALLBACK; use crate::configuration::Configuration; use crate::http_server_factory::Listener; use crate::http_server_factory::NetworkStream; -use crate::metrics::meter_provider; use crate::router::ApolloRouterError; use crate::router_factory::Endpoint; use crate::ListenAddr; -static TOTAL_SESSION_COUNT: AtomicU64 = AtomicU64::new(0); static MAX_FILE_HANDLES_WARN: AtomicBool = AtomicBool::new(false); -struct TotalSessionCountGuard; - -impl TotalSessionCountGuard { - fn start() -> Self { - TOTAL_SESSION_COUNT.fetch_add(1, Ordering::Acquire); - Self - } -} - -impl Drop for TotalSessionCountGuard { - fn drop(&mut self) { - TOTAL_SESSION_COUNT.fetch_sub(1, Ordering::Acquire); - } -} - #[derive(Clone, Debug)] pub(crate) struct ListenAddrAndRouter(pub(crate) ListenAddr, pub(crate) Router); @@ -215,28 +199,79 @@ pub(super) async fn get_extra_listeners( Ok(listeners_and_routers) } +async fn process_error(io_error: std::io::Error) { + match io_error.kind() { + // this is already handled by mio and tokio + //std::io::ErrorKind::WouldBlock => todo!(), + + // should be treated as EAGAIN + // https://man7.org/linux/man-pages/man2/accept.2.html + // Linux accept() (and accept4()) passes already-pending network + // errors on the new socket as an error code from accept(). This + // behavior differs from other BSD socket implementations. For + // reliable operation the application should detect the network + // errors defined for the protocol after accept() and treat them + // like EAGAIN by retrying. In the case of TCP/IP, these are + // ENETDOWN, EPROTO, ENOPROTOOPT, EHOSTDOWN, ENONET, EHOSTUNREACH, + // EOPNOTSUPP, and ENETUNREACH. + // + // those errors are not supported though: needs the unstable io_error_more feature + // std::io::ErrorKind::NetworkDown => todo!(), + // std::io::ErrorKind::HostUnreachable => todo!(), + // std::io::ErrorKind::NetworkUnreachable => todo!(), + + //ECONNABORTED + std::io::ErrorKind::ConnectionAborted| + //EINTR + std::io::ErrorKind::Interrupted| + // EINVAL + std::io::ErrorKind::InvalidInput| + std::io::ErrorKind::PermissionDenied | + std::io::ErrorKind::TimedOut | + std::io::ErrorKind::ConnectionReset| + std::io::ErrorKind::NotConnected => { + // the socket was invalid (maybe timedout waiting in accept queue, or was closed) + // we should ignore that and get to the next one + } + + // ignored errors, these should not happen with accept() + std::io::ErrorKind::NotFound | + std::io::ErrorKind::AddrInUse | + std::io::ErrorKind::AddrNotAvailable | + std::io::ErrorKind::BrokenPipe| + std::io::ErrorKind::AlreadyExists | + std::io::ErrorKind::InvalidData | + std::io::ErrorKind::WriteZero | + std::io::ErrorKind::Unsupported | + std::io::ErrorKind::UnexpectedEof | + std::io::ErrorKind::OutOfMemory => { + } + + // EPROTO, EOPNOTSUPP, EBADF, EFAULT, EMFILE, ENOBUFS, ENOMEM, ENOTSOCK + // We match on _ because max open file errors fall under ErrorKind::Uncategorized + _ => { + match io_error.raw_os_error() { + Some(libc::EMFILE) | Some(libc::ENFILE) => { + tracing::error!( + "reached the max open file limit, cannot accept any new connection" + ); + MAX_FILE_HANDLES_WARN.store(true, Ordering::SeqCst); + tokio::time::sleep(Duration::from_millis(1)).await; + } + _ => {} + } + } + } +} + pub(super) fn serve_router_on_listen_addr( mut listener: Listener, - address: ListenAddr, router: axum::Router, - main_graphql_port: bool, - http_config: Http, + opt_max_headers: Option, + opt_max_buf_size: Option, all_connections_stopped_sender: mpsc::Sender<()>, ) -> (impl Future, oneshot::Sender<()>) { let (shutdown_sender, shutdown_receiver) = oneshot::channel::<()>(); - - let meter = meter_provider().meter("apollo/router"); - let total_session_count_instrument = meter - .u64_observable_gauge("apollo_router_session_count_total") - .with_description("Number of currently connected clients") - .with_callback(move |gauge| { - gauge.observe( - TOTAL_SESSION_COUNT.load(Ordering::Relaxed), - &[KeyValue::new("listener", address.to_string())], - ); - }) - .init(); - // this server reproduces most of hyper::server::Server's behaviour // we select over the stop_listen_receiver channel and the listener's // accept future. If the channel received something or the sender @@ -264,20 +299,9 @@ pub(super) fn serve_router_on_listen_addr( MAX_FILE_HANDLES_WARN.store(false, Ordering::SeqCst); } - // The session count instrument must be kept alive as long as any - // request is in flight. So its lifetime is not related to the server - // itself. The simplest way to do this is to hold onto a reference for - // the duration of every request. - let session_count_instrument = total_session_count_instrument.clone(); - // We only want to count sessions if we are the main graphql port. - let session_count_guard = main_graphql_port.then(TotalSessionCountGuard::start); - - let mut http_config = http_config.clone(); tokio::task::spawn(async move { // this sender must be moved into the session to track that it is still running let _connection_stop_signal = connection_stop_signal; - let _session_count_instrument = session_count_instrument; - let _session_count_guard = session_count_guard; match res { NetworkStream::Tcp(stream) => { @@ -293,8 +317,26 @@ pub(super) fn serve_router_on_listen_addr( .expect( "this should not fail unless the socket is invalid", ); + let tokio_stream = TokioIo::new(stream); + let hyper_service = hyper::service::service_fn(move |request| { + app.clone().call(request) + }); - let connection = http_config.serve_connection(stream, app); + let mut builder = Builder::new(TokioExecutor::new()); + let mut http_connection = builder.http1(); + let http_config = http_connection + .keep_alive(true) + .timer(TokioTimer::new()) + .header_read_timeout(Duration::from_secs(10)); + if let Some(max_headers) = opt_max_headers { + http_config.max_headers(max_headers); + } + + if let Some(max_buf_size) = opt_max_buf_size { + http_config.max_buf_size(max_buf_size.as_u64() as usize); + } + + let connection = http_config.serve_connection_with_upgrades(tokio_stream, hyper_service); tokio::pin!(connection); tokio::select! { // the connection finished first @@ -320,7 +362,24 @@ pub(super) fn serve_router_on_listen_addr( NetworkStream::Unix(stream) => { let received_first_request = Arc::new(AtomicBool::new(false)); let app = IdleConnectionChecker::new(received_first_request.clone(), app); - let connection = http_config.serve_connection(stream, app); + let tokio_stream = TokioIo::new(stream); + let hyper_service = hyper::service::service_fn(move |request| { + app.clone().call(request) + }); + let mut builder = Builder::new(TokioExecutor::new()); + let mut http_connection = builder.http1(); + let http_config = http_connection + .keep_alive(true) + .timer(TokioTimer::new()) + .header_read_timeout(Duration::from_secs(10)); + if let Some(max_headers) = opt_max_headers { + http_config.max_headers(max_headers); + } + + if let Some(max_buf_size) = opt_max_buf_size { + http_config.max_buf_size(max_buf_size.as_u64() as usize); + } + let connection = http_config.serve_connection_with_upgrades(tokio_stream, hyper_service); tokio::pin!(connection); tokio::select! { @@ -353,12 +412,29 @@ pub(super) fn serve_router_on_listen_addr( "this should not fail unless the socket is invalid", ); - let protocol = stream.get_ref().1.alpn_protocol(); - let http2 = protocol == Some(&b"h2"[..]); + let mut builder = Builder::new(TokioExecutor::new()); + if stream.get_ref().1.alpn_protocol() == Some(&b"h2"[..]) { + builder = builder.http2_only(); + } + + let tokio_stream = TokioIo::new(stream); + let hyper_service = hyper::service::service_fn(move |request| { + app.clone().call(request) + }); + let mut http_connection = builder.http1(); + let http_config = http_connection + .keep_alive(true) + .timer(TokioTimer::new()) + .header_read_timeout(Duration::from_secs(10)); + if let Some(max_headers) = opt_max_headers { + http_config.max_headers(max_headers); + } + if let Some(max_buf_size) = opt_max_buf_size { + http_config.max_buf_size(max_buf_size.as_u64() as usize); + } let connection = http_config - .http2_only(http2) - .serve_connection(stream, app); + .serve_connection_with_upgrades(tokio_stream, hyper_service); tokio::pin!(connection); tokio::select! { @@ -384,73 +460,7 @@ pub(super) fn serve_router_on_listen_addr( } }); } - - Err(e) => match e.kind() { - // this is already handled by moi and tokio - //std::io::ErrorKind::WouldBlock => todo!(), - - // should be treated as EAGAIN - // https://man7.org/linux/man-pages/man2/accept.2.html - // Linux accept() (and accept4()) passes already-pending network - // errors on the new socket as an error code from accept(). This - // behavior differs from other BSD socket implementations. For - // reliable operation the application should detect the network - // errors defined for the protocol after accept() and treat them - // like EAGAIN by retrying. In the case of TCP/IP, these are - // ENETDOWN, EPROTO, ENOPROTOOPT, EHOSTDOWN, ENONET, EHOSTUNREACH, - // EOPNOTSUPP, and ENETUNREACH. - // - // those errors are not supported though: needs the unstable io_error_more feature - // std::io::ErrorKind::NetworkDown => todo!(), - // std::io::ErrorKind::HostUnreachable => todo!(), - // std::io::ErrorKind::NetworkUnreachable => todo!(), - - //ECONNABORTED - std::io::ErrorKind::ConnectionAborted| - //EINTR - std::io::ErrorKind::Interrupted| - // EINVAL - std::io::ErrorKind::InvalidInput| - std::io::ErrorKind::PermissionDenied | - std::io::ErrorKind::TimedOut | - std::io::ErrorKind::ConnectionReset| - std::io::ErrorKind::NotConnected => { - // the socket was invalid (maybe timedout waiting in accept queue, or was closed) - // we should ignore that and get to the next one - continue; - } - - // ignored errors, these should not happen with accept() - std::io::ErrorKind::NotFound | - std::io::ErrorKind::AddrInUse | - std::io::ErrorKind::AddrNotAvailable | - std::io::ErrorKind::BrokenPipe| - std::io::ErrorKind::AlreadyExists | - std::io::ErrorKind::InvalidData | - std::io::ErrorKind::WriteZero | - std::io::ErrorKind::Unsupported | - std::io::ErrorKind::UnexpectedEof | - std::io::ErrorKind::OutOfMemory => { - continue; - } - - // EPROTO, EOPNOTSUPP, EBADF, EFAULT, EMFILE, ENOBUFS, ENOMEM, ENOTSOCK - // We match on _ because max open file errors fall under ErrorKind::Uncategorized - _ => { - match e.raw_os_error() { - Some(libc::EMFILE) | Some(libc::ENFILE) => { - tracing::error!( - "reached the max open file limit, cannot accept any new connection" - ); - MAX_FILE_HANDLES_WARN.store(true, Ordering::SeqCst); - tokio::time::sleep(Duration::from_millis(1)).await; - } - _ => {} - } - continue; - } - - } + Err(e) => process_error(e).await } } } @@ -465,12 +475,13 @@ pub(super) fn serve_router_on_listen_addr( (server, shutdown_sender) } +#[derive(Clone)] struct IdleConnectionChecker { received_request: Arc, inner: S, } -impl IdleConnectionChecker { +impl IdleConnectionChecker { fn new(b: Arc, service: S) -> Self { IdleConnectionChecker { received_request: b, @@ -515,6 +526,7 @@ mod tests { use crate::configuration::Sandbox; use crate::configuration::Supergraph; use crate::services::router; + use crate::services::router::body; #[tokio::test] async fn it_makes_sure_same_listenaddrs_are_accepted() { @@ -544,7 +556,9 @@ mod tests { let endpoint = service_fn(|req: router::Request| async move { Ok::<_, BoxError>(router::Response { response: http::Response::builder() - .body::("this is a test".to_string().into()) + .body::(body::from_bytes( + "this is a test".to_string(), + )) .unwrap(), context: req.context, }) @@ -583,7 +597,9 @@ mod tests { let endpoint = service_fn(|req: router::Request| async move { Ok::<_, BoxError>(router::Response { response: http::Response::builder() - .body::("this is a test".to_string().into()) + .body::(body::from_bytes( + "this is a test".to_string(), + )) .unwrap(), context: req.context, }) diff --git a/apollo-router/src/axum_factory/tests.rs b/apollo-router/src/axum_factory/tests.rs index fa00789f61..9f7e2582fb 100644 --- a/apollo-router/src/axum_factory/tests.rs +++ b/apollo-router/src/axum_factory/tests.rs @@ -6,11 +6,11 @@ use std::str::FromStr; use std::sync::atomic::AtomicU32; use std::sync::atomic::Ordering; use std::sync::Arc; +use std::task::Poll; use std::time::Duration; use async_compression::tokio::write::GzipDecoder; use async_compression::tokio::write::GzipEncoder; -use axum::body::BoxBody; use futures::future::BoxFuture; use futures::stream; use futures::stream::poll_fn; @@ -22,10 +22,14 @@ use http::header::CONTENT_TYPE; use http::header::{self}; use http::HeaderMap; use http::HeaderValue; -use http_body::Body; +#[cfg(unix)] +use http_body_util::BodyExt; +use hyper::rt::ReadBufCursor; +use hyper_util::rt::TokioIo; use mime::APPLICATION_JSON; use mockall::mock; use multimap::MultiMap; +use pin_project_lite::pin_project; use reqwest::header::ACCEPT; use reqwest::header::ACCESS_CONTROL_ALLOW_HEADERS; use reqwest::header::ACCESS_CONTROL_ALLOW_METHODS; @@ -42,7 +46,9 @@ use serde_json::json; use test_log::test; use tokio::io::AsyncRead; use tokio::io::AsyncReadExt; +use tokio::io::AsyncWrite; use tokio::io::AsyncWriteExt; +use tokio::io::ReadBuf; use tokio::sync::mpsc; use tokio_util::io::StreamReader; use tower::service_fn; @@ -61,7 +67,6 @@ use crate::graphql; use crate::http_server_factory::HttpServerFactory; use crate::http_server_factory::HttpServerHandle; use crate::json_ext::Path; -use crate::metrics::FutureMetricsExt as _; use crate::plugin::test::MockSubgraph; use crate::query_planner::QueryPlannerService; use crate::router_factory::create_plugins; @@ -513,16 +518,20 @@ async fn it_compress_response_body() -> Result<(), ApolloRouterError> { Ok(()) } -#[tokio::test] -async fn it_decompress_request_body() -> Result<(), ApolloRouterError> { - let original_body = json!({ "query": "query { me { name } }" }); +async fn gzip(json: serde_json::Value) -> Vec { let mut encoder = GzipEncoder::new(Vec::new()); encoder - .write_all(original_body.to_string().as_bytes()) + .write_all(json.to_string().as_bytes()) .await .unwrap(); encoder.shutdown().await.unwrap(); - let compressed_body = encoder.into_inner(); + encoder.into_inner() +} + +#[tokio::test] +async fn it_decompress_request_body() -> Result<(), ApolloRouterError> { + let original_body = json!({ "query": "query { me { name } }" }); + let compressed_body = gzip(original_body).await; let expected_response = graphql::Response::builder() .data(json!({"response": "yayyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyyy"})) // Body must be bigger than 32 to be compressed .build(); @@ -562,6 +571,54 @@ async fn it_decompress_request_body() -> Result<(), ApolloRouterError> { Ok(()) } +#[tokio::test] +async fn unsupported_compression() -> Result<(), ApolloRouterError> { + let original_body = json!({ "query": "query { me { name } }" }); + let compressed_body = gzip(original_body).await; + + let router_service = router::service::empty().await; + let (server, client) = init(router_service).await; + let url = format!("{}/", server.graphql_listen_address().as_ref().unwrap()); + + let response = client + .post(url.as_str()) + // Telling the router we used a compression algorithm it can't decompress + .header(CONTENT_ENCODING, HeaderValue::from_static("unsupported")) + .body(compressed_body.clone()) + .send() + .await + .unwrap(); + + assert_eq!(response.status(), StatusCode::UNSUPPORTED_MEDIA_TYPE); + + server.shutdown().await?; + Ok(()) +} + +#[tokio::test] +async fn mismatched_compression_header() -> Result<(), ApolloRouterError> { + let original_body = json!({ "query": "query { me { name } }" }); + let compressed_body = gzip(original_body).await; + + let router_service = router::service::empty().await; + let (server, client) = init(router_service).await; + let url = format!("{}/", server.graphql_listen_address().as_ref().unwrap()); + + let response = client + .post(url.as_str()) + // Telling the router we used a different (valid) compression algorithm than the one we actually used + .header(CONTENT_ENCODING, HeaderValue::from_static("br")) + .body(compressed_body.clone()) + .send() + .await + .unwrap(); + + assert_eq!(response.status(), StatusCode::INTERNAL_SERVER_ERROR); + + server.shutdown().await?; + Ok(()) +} + #[tokio::test] async fn malformed_request() -> Result<(), ApolloRouterError> { let (server, client) = init(router::service::empty().await).await; @@ -708,7 +765,7 @@ async fn response_with_root_wildcard() -> Result<(), ApolloRouterError> { let conf = Configuration::fake_builder() .supergraph( crate::configuration::Supergraph::fake_builder() - .path(String::from("/*")) + .path(String::from("/{*rest}")) .build(), ) .build() @@ -858,7 +915,7 @@ async fn response_with_custom_prefix_endpoint() -> Result<(), ApolloRouterError> let conf = Configuration::fake_builder() .supergraph( crate::configuration::Supergraph::fake_builder() - .path(String::from("/:my_prefix/graphql")) + .path(String::from("/{my_prefix}/graphql")) .build(), ) .build() @@ -923,7 +980,7 @@ async fn response_with_custom_endpoint_wildcard() -> Result<(), ApolloRouterErro let conf = Configuration::fake_builder() .supergraph( crate::configuration::Supergraph::fake_builder() - .path(String::from("/graphql/*")) + .path(String::from("/graphql/{*rest}")) .build(), ) .build() @@ -1918,11 +1975,11 @@ async fn http_deferred_service() -> impl Service< .await .unwrap() .map_err(Into::into) - .map_response(|response: http::Response| { + .map_response(|response: http::Response| { let response = response.map(|body| { // Convert from axum’s BoxBody to AsyncBufRead - let mut body = Box::pin(body); - let stream = poll_fn(move |ctx| body.as_mut().poll_data(ctx)) + let mut body = body.into_data_stream(); + let stream = poll_fn(move |ctx| body.poll_next_unpin(ctx)) .map(|result| result.map_err(|e| io::Error::new(io::ErrorKind::Other, e))); StreamReader::new(stream) }); @@ -2089,24 +2146,125 @@ async fn listening_to_unix_socket() { server.shutdown().await.unwrap(); } +#[cfg(unix)] +pin_project! { + /// Wrapper around [`tokio::net::UnixStream`]. + #[derive(Debug)] + struct UnixStream { + #[pin] + unix_stream: tokio::net::UnixStream, + } +} + +#[cfg(unix)] +impl UnixStream { + async fn connect(path: impl AsRef) -> io::Result { + let unix_stream = tokio::net::UnixStream::connect(path).await?; + Ok(Self { unix_stream }) + } +} + +#[cfg(unix)] +impl AsyncWrite for UnixStream { + fn poll_write( + self: Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + buf: &[u8], + ) -> Poll> { + self.project().unix_stream.poll_write(cx, buf) + } + + fn poll_flush( + self: Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + ) -> Poll> { + self.project().unix_stream.poll_flush(cx) + } + + fn poll_shutdown( + self: Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + ) -> Poll> { + self.project().unix_stream.poll_shutdown(cx) + } + + fn poll_write_vectored( + self: Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + bufs: &[io::IoSlice<'_>], + ) -> Poll> { + self.project().unix_stream.poll_write_vectored(cx, bufs) + } + + fn is_write_vectored(&self) -> bool { + self.unix_stream.is_write_vectored() + } +} + +#[cfg(unix)] +impl hyper::rt::Write for UnixStream { + fn poll_write( + self: Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + buf: &[u8], + ) -> Poll> { + self.project().unix_stream.poll_write(cx, buf) + } + + fn poll_flush( + self: Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + ) -> Poll> { + self.project().unix_stream.poll_flush(cx) + } + + fn poll_shutdown( + self: Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + ) -> Poll> { + self.project().unix_stream.poll_shutdown(cx) + } +} + +#[cfg(unix)] +impl AsyncRead for UnixStream { + fn poll_read( + self: Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + buf: &mut ReadBuf<'_>, + ) -> Poll> { + self.project().unix_stream.poll_read(cx, buf) + } +} + +#[cfg(unix)] +impl hyper::rt::Read for UnixStream { + fn poll_read( + self: Pin<&mut Self>, + cx: &mut std::task::Context<'_>, + buf: ReadBufCursor<'_>, + ) -> Poll> { + let mut t = TokioIo::new(self.project().unix_stream); + Pin::new(&mut t).poll_read(cx, buf) + } +} + #[cfg(unix)] async fn send_to_unix_socket(addr: &ListenAddr, method: Method, body: &str) -> String { - use tokio::net::UnixStream; let stream = UnixStream::connect(addr.to_string()).await.unwrap(); - let (mut sender, conn) = hyper::client::conn::handshake(stream).await.unwrap(); + let (mut sender, conn) = hyper::client::conn::http1::handshake(stream).await.unwrap(); tokio::task::spawn(async move { if let Err(err) = conn.await { println!("Connection failed: {:?}", err); } }); - let http_body = hyper::Body::from(body.to_string()); let mut request = http::Request::builder() .method(method.clone()) .header("Host", "localhost:4100") .header("Content-Type", "application/json") .header("Accept", "application/json") - .body(http_body) + .body(body.to_string()) .unwrap(); if method == Method::GET { *request.uri_mut() = body.parse().unwrap(); @@ -2359,95 +2517,3 @@ async fn test_supergraph_timeout() { }) ); } - -/// There are two session count gauges: -/// - apollo_router_session_count_total, the number of open client connections -/// - apollo_router_session_count_active, the number of in-flight HTTP requests -/// -/// To test them, we use two hyper clients. Each client has its own connection pool, so by manually -/// sending requests and completing responses, and creating and dropping clients, we can control -/// the amount of open connections and the amount of in-flight requests. -/// -/// XXX(@goto-bus-stop): this only tests the `session_count_total` metric right now. The -/// `session_count_active` metric is reported from inside an axum router, so its lifetime is -/// actually a little shorter than the full request/response cycle, in a way that is not easy to -/// test from the outside. To test it we could use a custom inner service (passed to -/// `init_with_config`) where we can control the progress the inner service makes. -/// For now, I've tested the `session_count_active` metric manually and confirmed its value makes -/// sense... -#[tokio::test] -async fn it_reports_session_count_metric() { - let configuration = Configuration::fake_builder().build().unwrap(); - - async { - let (server, _client) = init_with_config( - router::service::empty().await, - Arc::new(configuration), - MultiMap::new(), - ) - .await - .unwrap(); - - let url = server - .graphql_listen_address() - .as_ref() - .unwrap() - .to_string(); - - let make_request = || { - http::Request::builder() - .uri(&url) - .body(hyper::Body::from(r#"{ "query": "{ me }" }"#)) - .unwrap() - }; - - let client = hyper::Client::new(); - // Create a second client that does not reuse the same connection pool. - let second_client = hyper::Client::new(); - - let first_response = client.request(make_request()).await.unwrap(); - - assert_gauge!( - "apollo_router_session_count_total", - 1, - "listener" = url.clone() - ); - - let second_response = second_client.request(make_request()).await.unwrap(); - - // Both requests are in-flight - assert_gauge!( - "apollo_router_session_count_total", - 2, - "listener" = url.clone() - ); - - _ = hyper::body::to_bytes(first_response.into_body()).await; - - // Connection is still open in the pool even though the request is complete. - assert_gauge!( - "apollo_router_session_count_total", - 2, - "listener" = url.clone() - ); - - _ = hyper::body::to_bytes(second_response.into_body()).await; - - drop(client); - drop(second_client); - - // XXX(@goto-bus-stop): Not ideal, but we would probably have to drop down to very - // low-level hyper primitives to control the shutdown of connections to the required - // extent. 100ms is a long time so I hope it's not flaky. - tokio::time::sleep(Duration::from_millis(100)).await; - - // All connections are closed - assert_gauge!( - "apollo_router_session_count_total", - 0, - "listener" = url.clone() - ); - } - .with_metrics() - .await; -} diff --git a/apollo-router/src/axum_factory/utils.rs b/apollo-router/src/axum_factory/utils.rs index 1e208fdb00..754d17f12e 100644 --- a/apollo-router/src/axum_factory/utils.rs +++ b/apollo-router/src/axum_factory/utils.rs @@ -26,7 +26,7 @@ impl MakeSpan for PropagatingMakeSpan { // Before we make the span we need to attach span info that may have come in from the request. let context = global::get_text_map_propagator(|propagator| { - propagator.extract(&opentelemetry_http::HeaderExtractor(request.headers())) + propagator.extract(&crate::otel_compat::HeaderExtractor(request.headers())) }); let use_legacy_request_span = matches!(self.span_mode, SpanMode::Deprecated); @@ -62,6 +62,7 @@ impl MakeSpan for PropagatingMakeSpan { } } +#[derive(Clone)] pub(crate) struct InjectConnectionInfo { inner: S, connection_info: ConnectionInfo, diff --git a/apollo-router/src/batching.rs b/apollo-router/src/batching.rs index 0901497fdc..7e967275f2 100644 --- a/apollo-router/src/batching.rs +++ b/apollo-router/src/batching.rs @@ -26,7 +26,7 @@ use crate::plugins::telemetry::otel::span_ext::OpenTelemetrySpanExt; use crate::query_planner::fetch::QueryHash; use crate::services::http::HttpClientServiceFactory; use crate::services::process_batches; -use crate::services::router::body::get_body_bytes; +use crate::services::router; use crate::services::router::body::RouterBody; use crate::services::subgraph::SubgraphRequestId; use crate::services::SubgraphRequest; @@ -449,7 +449,7 @@ pub(crate) async fn assemble_batch( let (requests, gql_requests): (Vec<_>, Vec<_>) = request_pairs.into_iter().unzip(); // Construct the actual byte body of the batched request - let bytes = get_body_bytes(serde_json::to_string(&gql_requests)?).await?; + let bytes = router::body::into_bytes(serde_json::to_string(&gql_requests)?).await?; // Retain the various contexts for later use let contexts = requests @@ -470,7 +470,7 @@ pub(crate) async fn assemble_batch( let (parts, _) = first_request.into_parts(); // Generate the final request and pass it up - let request = http::Request::from_parts(parts, RouterBody::from(bytes)); + let request = http::Request::from_parts(parts, router::body::from_bytes(bytes)); Ok((operation_name, contexts, request, txs)) } @@ -496,6 +496,7 @@ mod tests { use crate::query_planner::fetch::QueryHash; use crate::services::http::HttpClientServiceFactory; use crate::services::router; + use crate::services::router::body; use crate::services::subgraph; use crate::services::subgraph::SubgraphRequestId; use crate::services::SubgraphRequest; @@ -553,7 +554,8 @@ mod tests { // We should see the aggregation of all of the requests let actual: Vec = serde_json::from_str( - std::str::from_utf8(&request.into_body().to_bytes().await.unwrap()).unwrap(), + std::str::from_utf8(&router::body::into_bytes(request.into_body()).await.unwrap()) + .unwrap(), ) .unwrap(); @@ -853,7 +855,7 @@ mod tests { .method("POST") .header(CONTENT_TYPE, "application/json") .header(ACCEPT, "application/json") - .body(serde_json::to_vec(&request).unwrap().into()) + .body(body::from_bytes(serde_json::to_vec(&request).unwrap())) .unwrap(), }; diff --git a/apollo-router/src/cache/redis.rs b/apollo-router/src/cache/redis.rs index c8d7b10d07..288e2d16db 100644 --- a/apollo-router/src/cache/redis.rs +++ b/apollo-router/src/cache/redis.rs @@ -160,7 +160,10 @@ impl RedisCacheStorage { if let Some(tls) = config.tls.as_ref() { let tls_cert_store = tls.create_certificate_store().transpose()?; let client_cert_config = tls.client_authentication.as_ref(); - let tls_client_config = generate_tls_client_config(tls_cert_store, client_cert_config)?; + let tls_client_config = generate_tls_client_config( + tls_cert_store, + client_cert_config.map(|arc| arc.as_ref()), + )?; let connector = tokio_rustls::TlsConnector::from(Arc::new(tls_client_config)); client_config.tls = Some(TlsConfig { diff --git a/apollo-router/src/cache/storage.rs b/apollo-router/src/cache/storage.rs index d7e3079441..786ae657d3 100644 --- a/apollo-router/src/cache/storage.rs +++ b/apollo-router/src/cache/storage.rs @@ -8,9 +8,9 @@ use std::sync::Arc; use lru::LruCache; use opentelemetry::metrics::MeterProvider; +use opentelemetry::KeyValue; use opentelemetry_api::metrics::ObservableGauge; use opentelemetry_api::metrics::Unit; -use opentelemetry_api::KeyValue; use serde::de::DeserializeOwned; use serde::Serialize; use tokio::sync::Mutex; diff --git a/apollo-router/src/configuration/connector.rs b/apollo-router/src/configuration/connector.rs new file mode 100644 index 0000000000..fdbacb3387 --- /dev/null +++ b/apollo-router/src/configuration/connector.rs @@ -0,0 +1,16 @@ +use std::collections::HashMap; + +use schemars::JsonSchema; +use serde::Deserialize; +use serde::Serialize; + +#[derive(Clone, Debug, Default, Deserialize, Serialize, JsonSchema)] +#[serde(bound(deserialize = "T: Deserialize<'de>"))] // T does not need to be Default +pub(crate) struct ConnectorConfiguration +where + T: Serialize + JsonSchema, +{ + // Map of subgraph_name.connector_source_name to configuration + #[serde(default)] + pub(crate) sources: HashMap, +} diff --git a/apollo-router/src/configuration/cors.rs b/apollo-router/src/configuration/cors.rs index 313d863214..732e58c4ca 100644 --- a/apollo-router/src/configuration/cors.rs +++ b/apollo-router/src/configuration/cors.rs @@ -4,7 +4,9 @@ use std::str::FromStr; use std::time::Duration; use http::request::Parts; +use http::HeaderName; use http::HeaderValue; +use http::Method; use regex::Regex; use schemars::JsonSchema; use serde::Deserialize; @@ -109,36 +111,24 @@ impl Cors { let allow_headers = if self.allow_headers.is_empty() { cors::AllowHeaders::mirror_request() } else { - cors::AllowHeaders::list(self.allow_headers.iter().filter_map(|header| { - header - .parse() - .map_err(|_| tracing::error!("header name '{header}' is not valid")) - .ok() - })) + cors::AllowHeaders::list(parse_values::( + &self.allow_headers, + "allow header name", + )?) }; + let cors = CorsLayer::new() .vary([]) .allow_credentials(self.allow_credentials) .allow_headers(allow_headers) - .expose_headers(cors::ExposeHeaders::list( - self.expose_headers - .unwrap_or_default() - .iter() - .filter_map(|header| { - header - .parse() - .map_err(|_| tracing::error!("header name '{header}' is not valid")) - .ok() - }), - )) - .allow_methods(cors::AllowMethods::list(self.methods.iter().filter_map( - |method| { - method - .parse() - .map_err(|_| tracing::error!("method '{method}' is not valid")) - .ok() - }, - ))); + .expose_headers(cors::ExposeHeaders::list(parse_values::( + &self.expose_headers.unwrap_or_default(), + "expose header name", + )?)) + .allow_methods(cors::AllowMethods::list(parse_values::( + &self.methods, + "method", + )?)); let cors = if let Some(max_age) = self.max_age { cors.max_age(max_age) } else { @@ -148,14 +138,7 @@ impl Cors { if self.allow_any_origin { Ok(cors.allow_origin(cors::Any)) } else if let Some(match_origins) = self.match_origins { - let regexes = match_origins - .into_iter() - .filter_map(|regex| { - Regex::from_str(regex.as_str()) - .map_err(|_| tracing::error!("origin regex '{regex}' is not valid")) - .ok() - }) - .collect::>(); + let regexes: Vec = parse_values(&match_origins, "match origin regex")?; Ok(cors.allow_origin(cors::AllowOrigin::predicate( move |origin: &HeaderValue, _: &Parts| { @@ -169,14 +152,10 @@ impl Cors { }, ))) } else { - Ok(cors.allow_origin(cors::AllowOrigin::list( - self.origins.into_iter().filter_map(|origin| { - origin - .parse() - .map_err(|_| tracing::error!("origin '{origin}' is not valid")) - .ok() - }), - ))) + Ok(cors.allow_origin(cors::AllowOrigin::list(parse_values( + &self.origins, + "origin", + )?))) } } @@ -214,3 +193,97 @@ impl Cors { Ok(()) } } + +fn parse_values(values_to_parse: &[String], error_description: &str) -> Result, String> +where + T: FromStr, + ::Err: std::fmt::Display, +{ + let mut errors = Vec::new(); + let mut values = Vec::new(); + for val in values_to_parse { + match val + .parse::() + .map_err(|err| format!("{error_description} '{val}' is not valid: {err}")) + { + Ok(val) => values.push(val), + Err(err) => errors.push(err), + } + } + + if errors.is_empty() { + Ok(values) + } else { + Err(errors.join(", ")) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_bad_allow_headers_cors_configuration() { + let cors = Cors::builder() + .allow_headers(vec![String::from("bad\nname")]) + .build(); + let layer = cors.into_layer(); + assert!(layer.is_err()); + + assert_eq!( + layer.unwrap_err(), + String::from("allow header name 'bad\nname' is not valid: invalid HTTP header name") + ); + } + + #[test] + fn test_bad_allow_methods_cors_configuration() { + let cors = Cors::builder() + .methods(vec![String::from("bad\nmethod")]) + .build(); + let layer = cors.into_layer(); + assert!(layer.is_err()); + + assert_eq!( + layer.unwrap_err(), + String::from("method 'bad\nmethod' is not valid: invalid HTTP method") + ); + } + + #[test] + fn test_bad_origins_cors_configuration() { + let cors = Cors::builder() + .origins(vec![String::from("bad\norigin")]) + .build(); + let layer = cors.into_layer(); + assert!(layer.is_err()); + + assert_eq!( + layer.unwrap_err(), + String::from("origin 'bad\norigin' is not valid: failed to parse header value") + ); + } + + #[test] + fn test_bad_match_origins_cors_configuration() { + let cors = Cors::builder() + .match_origins(vec![String::from("[")]) + .build(); + let layer = cors.into_layer(); + assert!(layer.is_err()); + + assert_eq!( + layer.unwrap_err(), + String::from("match origin regex '[' is not valid: regex parse error:\n [\n ^\nerror: unclosed character class") + ); + } + + #[test] + fn test_good_cors_configuration() { + let cors = Cors::builder() + .allow_headers(vec![String::from("good-name")]) + .build(); + let layer = cors.into_layer(); + assert!(layer.is_ok()); + } +} diff --git a/apollo-router/src/configuration/expansion.rs b/apollo-router/src/configuration/expansion.rs index c865b0d878..7c602e70be 100644 --- a/apollo-router/src/configuration/expansion.rs +++ b/apollo-router/src/configuration/expansion.rs @@ -168,6 +168,11 @@ fn dev_mode_defaults() -> Vec { .value(false) .value_type(ValueType::Bool) .build(), + Override::builder() + .config_path("preview_connectors.debug_extensions") + .value(true) + .value_type(ValueType::Bool) + .build(), ] } diff --git a/apollo-router/src/configuration/metrics.rs b/apollo-router/src/configuration/metrics.rs index f285c18b93..9ec1a0a605 100644 --- a/apollo-router/src/configuration/metrics.rs +++ b/apollo-router/src/configuration/metrics.rs @@ -2,9 +2,9 @@ use std::collections::HashMap; use std::str::FromStr; use jsonpath_rust::JsonPathInst; +use opentelemetry::metrics::Meter; use opentelemetry::metrics::MeterProvider; -use opentelemetry_api::metrics::Meter; -use opentelemetry_api::KeyValue; +use opentelemetry::KeyValue; use paste::paste; use serde_json::Value; @@ -340,6 +340,8 @@ impl InstrumentData { "$..events.supergraph", opt.events.subgraph, "$..events.subgraph", + opt.events.connector, + "$..events.connector", opt.instruments, "$..instruments", opt.instruments.router, @@ -348,6 +350,8 @@ impl InstrumentData { "$..instruments.supergraph", opt.instruments.subgraph, "$..instruments.subgraph", + opt.instruments.connector, + "$..instruments.connector", opt.instruments.graphql, "$..instruments.graphql", opt.instruments.default_attribute_requirement_level, @@ -363,9 +367,7 @@ impl InstrumentData { opt.spans.subgraph, "$..spans.subgraph", opt.spans.supergraph, - "$..spans.supergraph", - opt.logging.experimental_when_header, - "$..logging.experimental_when_header" + "$..spans.supergraph" ); populate_config_instrument!( @@ -400,6 +402,23 @@ impl InstrumentData { "$.metrics_reference_mode" ); + populate_config_instrument!( + apollo.router.config.connectors, + "$.preview_connectors", + opt.debug_extensions, + "$[?(@.debug_extensions == true)]", + opt.expose_sources_in_context, + "$[?(@.expose_sources_in_context == true)]", + opt.max_requests_per_operation_per_source, + "$[?(@.max_requests_per_operation_per_source)]", + opt.subgraph.config, + "$[?(@.subgraphs..['$config'])]", + opt.source.override_url, + "$[?(@.subgraphs..sources..override_url)]", + opt.source.max_requests_per_operation, + "$[?(@.subgraphs..sources..max_requests_per_operation)]" + ); + // We need to update the entry we just made because the selected strategy is a named object in the config. // The jsonpath spec doesn't include a utility for getting the keys out of an object, so we do it manually. if let Some((_, demand_control_attributes)) = diff --git a/apollo-router/src/configuration/mod.rs b/apollo-router/src/configuration/mod.rs index 3b83d9416a..d143f1bd89 100644 --- a/apollo-router/src/configuration/mod.rs +++ b/apollo-router/src/configuration/mod.rs @@ -20,12 +20,9 @@ pub(crate) use persisted_queries::PersistedQueriesPrewarmQueryPlanCache; #[cfg(test)] pub(crate) use persisted_queries::PersistedQueriesSafelist; use regex::Regex; -use rustls::Certificate; -use rustls::PrivateKey; +use rustls::pki_types::CertificateDer; +use rustls::pki_types::PrivateKeyDer; use rustls::ServerConfig; -use rustls_pemfile::certs; -use rustls_pemfile::read_one; -use rustls_pemfile::Item; use schemars::gen::SchemaGenerator; use schemars::schema::ObjectValidation; use schemars::schema::Schema; @@ -56,6 +53,7 @@ use crate::plugins::subscription::APOLLO_SUBSCRIPTION_PLUGIN_NAME; use crate::uplink::UplinkConfig; use crate::ApolloRouterError; +pub(crate) mod connector; pub(crate) mod cors; pub(crate) mod expansion; mod experimental; @@ -446,14 +444,6 @@ impl Configuration { impl Configuration { pub(crate) fn validate(self) -> Result { - #[cfg(not(feature = "hyper_header_limits"))] - if self.limits.http1_max_request_headers.is_some() { - return Err(ConfigurationError::InvalidConfiguration { - message: "'limits.http1_max_request_headers' requires 'hyper_header_limits' feature", - error: "enable 'hyper_header_limits' feature in order to use 'limits.http1_max_request_headers'".to_string(), - }); - } - // Sandbox and Homepage cannot be both enabled if self.sandbox.enabled && self.homepage.enabled { return Err(ConfigurationError::InvalidConfiguration { @@ -744,7 +734,7 @@ impl Supergraph { path = format!("{}router_extra_path", self.path); } else if SUPERGRAPH_ENDPOINT_REGEX.is_match(&self.path) { let new_path = SUPERGRAPH_ENDPOINT_REGEX - .replace(&self.path, "${first_path}${sub_path}:supergraph_route"); + .replace(&self.path, "${first_path}${sub_path}{supergraph_route}"); path = new_path.to_string(); } @@ -1024,26 +1014,26 @@ pub(crate) struct Tls { /// TLS server configuration /// /// this will affect the GraphQL endpoint and any other endpoint targeting the same listen address - pub(crate) supergraph: Option, + pub(crate) supergraph: Option>, pub(crate) subgraph: SubgraphConfiguration, } /// Configuration options pertaining to the supergraph server component. -#[derive(Debug, Clone, Deserialize, Serialize, JsonSchema)] +#[derive(Debug, Deserialize, Serialize, JsonSchema)] #[serde(deny_unknown_fields)] pub(crate) struct TlsSupergraph { /// server certificate in PEM format #[serde(deserialize_with = "deserialize_certificate", skip_serializing)] #[schemars(with = "String")] - pub(crate) certificate: Certificate, + pub(crate) certificate: CertificateDer<'static>, /// server key in PEM format #[serde(deserialize_with = "deserialize_key", skip_serializing)] #[schemars(with = "String")] - pub(crate) key: PrivateKey, + pub(crate) key: PrivateKeyDer<'static>, /// list of certificate authorities in PEM format #[serde(deserialize_with = "deserialize_certificate_chain", skip_serializing)] #[schemars(with = "String")] - pub(crate) certificate_chain: Vec, + pub(crate) certificate_chain: Vec>, } impl TlsSupergraph { @@ -1052,9 +1042,8 @@ impl TlsSupergraph { certificates.extend(self.certificate_chain.iter().cloned()); let mut config = ServerConfig::builder() - .with_safe_defaults() .with_no_client_auth() - .with_single_cert(certificates, self.key.clone()) + .with_single_cert(certificates, self.key.clone_key()) .map_err(ApolloRouterError::Rustls)?; config.alpn_protocols = vec![b"h2".to_vec(), b"http/1.1".to_vec()]; @@ -1062,7 +1051,7 @@ impl TlsSupergraph { } } -fn deserialize_certificate<'de, D>(deserializer: D) -> Result +fn deserialize_certificate<'de, D>(deserializer: D) -> Result, D::Error> where D: Deserializer<'de>, { @@ -1081,7 +1070,9 @@ where }) } -fn deserialize_certificate_chain<'de, D>(deserializer: D) -> Result, D::Error> +fn deserialize_certificate_chain<'de, D>( + deserializer: D, +) -> Result>, D::Error> where D: Deserializer<'de>, { @@ -1090,7 +1081,7 @@ where load_certs(&data).map_err(serde::de::Error::custom) } -fn deserialize_key<'de, D>(deserializer: D) -> Result +fn deserialize_key<'de, D>(deserializer: D) -> Result, D::Error> where D: Deserializer<'de>, { @@ -1099,20 +1090,24 @@ where load_key(&data).map_err(serde::de::Error::custom) } -pub(crate) fn load_certs(data: &str) -> io::Result> { - certs(&mut BufReader::new(data.as_bytes())) - .map_err(|_| io::Error::new(io::ErrorKind::InvalidInput, "invalid cert")) - .map(|mut certs| certs.drain(..).map(Certificate).collect()) +#[derive(thiserror::Error, Debug)] +#[error("could not load TLS certificate: {0}")] +struct LoadCertError(std::io::Error); + +pub(crate) fn load_certs(data: &str) -> io::Result>> { + rustls_pemfile::certs(&mut BufReader::new(data.as_bytes())) + .collect::, _>>() + .map_err(|error| io::Error::new(io::ErrorKind::InvalidInput, LoadCertError(error))) } -pub(crate) fn load_key(data: &str) -> io::Result { +pub(crate) fn load_key(data: &str) -> io::Result> { let mut reader = BufReader::new(data.as_bytes()); - let mut key_iterator = iter::from_fn(|| read_one(&mut reader).transpose()); + let mut key_iterator = iter::from_fn(|| rustls_pemfile::read_one(&mut reader).transpose()); let private_key = match key_iterator.next() { - Some(Ok(Item::RSAKey(key))) => PrivateKey(key), - Some(Ok(Item::PKCS8Key(key))) => PrivateKey(key), - Some(Ok(Item::ECKey(key))) => PrivateKey(key), + Some(Ok(rustls_pemfile::Item::Pkcs1Key(key))) => PrivateKeyDer::from(key), + Some(Ok(rustls_pemfile::Item::Pkcs8Key(key))) => PrivateKeyDer::from(key), + Some(Ok(rustls_pemfile::Item::Sec1Key(key))) => PrivateKeyDer::from(key), Some(Err(e)) => { return Err(io::Error::new( io::ErrorKind::InvalidInput, @@ -1150,7 +1145,7 @@ pub(crate) struct TlsClient { /// list of certificate authorities in PEM format pub(crate) certificate_authorities: Option, /// client certificate authentication - pub(crate) client_authentication: Option, + pub(crate) client_authentication: Option>, } #[buildstructor::buildstructor] @@ -1158,7 +1153,7 @@ impl TlsClient { #[builder] pub(crate) fn new( certificate_authorities: Option, - client_authentication: Option, + client_authentication: Option>, ) -> Self { Self { certificate_authorities, @@ -1174,17 +1169,17 @@ impl Default for TlsClient { } /// TLS client authentication -#[derive(Debug, Clone, Deserialize, Serialize, JsonSchema)] +#[derive(Debug, Deserialize, Serialize, JsonSchema)] #[serde(deny_unknown_fields)] pub(crate) struct TlsClientAuth { /// list of certificates in PEM format #[serde(deserialize_with = "deserialize_certificate_chain", skip_serializing)] #[schemars(with = "String")] - pub(crate) certificate_chain: Vec, + pub(crate) certificate_chain: Vec>, /// key in PEM format #[serde(deserialize_with = "deserialize_key", skip_serializing)] #[schemars(with = "String")] - pub(crate) key: PrivateKey, + pub(crate) key: PrivateKeyDer<'static>, } /// Configuration options pertaining to the sandbox page. diff --git a/apollo-router/src/configuration/snapshots/apollo_router__configuration__expansion__test__dev_mode.snap b/apollo-router/src/configuration/snapshots/apollo_router__configuration__expansion__test__dev_mode.snap index 6839dcb274..9d800c88e3 100644 --- a/apollo-router/src/configuration/snapshots/apollo_router__configuration__expansion__test__dev_mode.snap +++ b/apollo-router/src/configuration/snapshots/apollo_router__configuration__expansion__test__dev_mode.snap @@ -9,6 +9,8 @@ include_subgraph_errors: all: true plugins: experimental.expose_query_plan: true +preview_connectors: + debug_extensions: true sandbox: enabled: true supergraph: @@ -18,4 +20,3 @@ telemetry: tracing: experimental_response_trace_id: enabled: true - diff --git a/apollo-router/src/configuration/snapshots/apollo_router__configuration__metrics__test__metrics@connectors.router.yaml.snap b/apollo-router/src/configuration/snapshots/apollo_router__configuration__metrics__test__metrics@connectors.router.yaml.snap new file mode 100644 index 0000000000..9b24638487 --- /dev/null +++ b/apollo-router/src/configuration/snapshots/apollo_router__configuration__metrics__test__metrics@connectors.router.yaml.snap @@ -0,0 +1,15 @@ +--- +source: apollo-router/src/configuration/metrics.rs +expression: "&metrics.non_zero()" +--- +- name: apollo.router.config.connectors + data: + datapoints: + - value: 1 + attributes: + opt.debug_extensions: true + opt.expose_sources_in_context: true + opt.max_requests_per_operation_per_source: true + opt.source.max_requests_per_operation: true + opt.source.override_url: true + opt.subgraph.config: true diff --git a/apollo-router/src/configuration/snapshots/apollo_router__configuration__metrics__test__metrics@telemetry.router.yaml.snap b/apollo-router/src/configuration/snapshots/apollo_router__configuration__metrics__test__metrics@telemetry.router.yaml.snap index 50a0d132d5..2ddcf7c9aa 100644 --- a/apollo-router/src/configuration/snapshots/apollo_router__configuration__metrics__test__metrics@telemetry.router.yaml.snap +++ b/apollo-router/src/configuration/snapshots/apollo_router__configuration__metrics__test__metrics@telemetry.router.yaml.snap @@ -8,16 +8,17 @@ expression: "&metrics.non_zero()" - value: 1 attributes: opt.events: true + opt.events.connector: true opt.events.router: true opt.events.subgraph: true opt.events.supergraph: true opt.instruments: true + opt.instruments.connector: true opt.instruments.default_attribute_requirement_level: false opt.instruments.graphql: true opt.instruments.router: true opt.instruments.subgraph: true opt.instruments.supergraph: true - opt.logging.experimental_when_header: true opt.metrics.otlp: true opt.metrics.prometheus: true opt.spans: true diff --git a/apollo-router/src/configuration/snapshots/apollo_router__configuration__tests__schema_generation.snap b/apollo-router/src/configuration/snapshots/apollo_router__configuration__tests__schema_generation.snap index fcf47cdb92..1ad8e2ef17 100644 --- a/apollo-router/src/configuration/snapshots/apollo_router__configuration__tests__schema_generation.snap +++ b/apollo-router/src/configuration/snapshots/apollo_router__configuration__tests__schema_generation.snap @@ -255,41 +255,6 @@ expression: "&schema" } ] }, - "AttributesForwardConf": { - "additionalProperties": false, - "description": "Configuration to add custom attributes/labels on metrics to subgraphs", - "properties": { - "context": { - "description": "Configuration to forward values from the context to custom attributes/labels in metrics", - "items": { - "$ref": "#/definitions/ContextForward", - "description": "#/definitions/ContextForward" - }, - "type": "array" - }, - "errors": { - "$ref": "#/definitions/ErrorsForward", - "description": "#/definitions/ErrorsForward" - }, - "request": { - "$ref": "#/definitions/Forward", - "description": "#/definitions/Forward" - }, - "response": { - "$ref": "#/definitions/Forward", - "description": "#/definitions/Forward" - }, - "static": { - "description": "Configuration to insert custom attributes/labels in metrics", - "items": { - "$ref": "#/definitions/Insert2", - "description": "#/definitions/Insert2" - }, - "type": "array" - } - }, - "type": "object" - }, "AuthConfig": { "oneOf": [ { @@ -385,30 +350,6 @@ expression: "&schema" } ] }, - "BodyForward": { - "additionalProperties": false, - "description": "Configuration to forward body values in metric attributes/labels", - "properties": { - "default": { - "$ref": "#/definitions/AttributeValue", - "description": "#/definitions/AttributeValue", - "nullable": true - }, - "name": { - "description": "The name of the attribute", - "type": "string" - }, - "path": { - "description": "The path in the body", - "type": "string" - } - }, - "required": [ - "name", - "path" - ], - "type": "object" - }, "CSRFConfig": { "additionalProperties": false, "description": "CSRF Configuration.", @@ -610,6 +551,143 @@ expression: "&schema" } ] }, + "Condition_for_ConnectorSelector": { + "oneOf": [ + { + "additionalProperties": false, + "description": "A condition to check a selection against a value.", + "properties": { + "eq": { + "items": { + "$ref": "#/definitions/SelectorOrValue_for_ConnectorSelector", + "description": "#/definitions/SelectorOrValue_for_ConnectorSelector" + }, + "maxItems": 2, + "minItems": 2, + "type": "array" + } + }, + "required": [ + "eq" + ], + "type": "object" + }, + { + "additionalProperties": false, + "description": "The first selection must be greater than the second selection.", + "properties": { + "gt": { + "items": { + "$ref": "#/definitions/SelectorOrValue_for_ConnectorSelector", + "description": "#/definitions/SelectorOrValue_for_ConnectorSelector" + }, + "maxItems": 2, + "minItems": 2, + "type": "array" + } + }, + "required": [ + "gt" + ], + "type": "object" + }, + { + "additionalProperties": false, + "description": "The first selection must be less than the second selection.", + "properties": { + "lt": { + "items": { + "$ref": "#/definitions/SelectorOrValue_for_ConnectorSelector", + "description": "#/definitions/SelectorOrValue_for_ConnectorSelector" + }, + "maxItems": 2, + "minItems": 2, + "type": "array" + } + }, + "required": [ + "lt" + ], + "type": "object" + }, + { + "additionalProperties": false, + "description": "A condition to check a selection against a selector.", + "properties": { + "exists": { + "$ref": "#/definitions/ConnectorSelector", + "description": "#/definitions/ConnectorSelector" + } + }, + "required": [ + "exists" + ], + "type": "object" + }, + { + "additionalProperties": false, + "description": "All sub-conditions must be true.", + "properties": { + "all": { + "items": { + "$ref": "#/definitions/Condition_for_ConnectorSelector", + "description": "#/definitions/Condition_for_ConnectorSelector" + }, + "type": "array" + } + }, + "required": [ + "all" + ], + "type": "object" + }, + { + "additionalProperties": false, + "description": "At least one sub-conditions must be true.", + "properties": { + "any": { + "items": { + "$ref": "#/definitions/Condition_for_ConnectorSelector", + "description": "#/definitions/Condition_for_ConnectorSelector" + }, + "type": "array" + } + }, + "required": [ + "any" + ], + "type": "object" + }, + { + "additionalProperties": false, + "description": "The sub-condition must not be true", + "properties": { + "not": { + "$ref": "#/definitions/Condition_for_ConnectorSelector", + "description": "#/definitions/Condition_for_ConnectorSelector" + } + }, + "required": [ + "not" + ], + "type": "object" + }, + { + "description": "Static true condition", + "enum": [ + "true" + ], + "type": "string" + }, + { + "description": "Static false condition", + "enum": [ + "false" + ], + "type": "string" + } + ] + }, "Condition_for_GraphQLSelector": { "oneOf": [ { @@ -1175,6 +1253,11 @@ expression: "&schema" "additionalProperties": false, "description": "Authentication", "properties": { + "connector": { + "$ref": "#/definitions/ConnectorConfiguration_for_AuthConfig", + "description": "#/definitions/ConnectorConfiguration_for_AuthConfig", + "nullable": true + }, "router": { "$ref": "#/definitions/RouterConf", "description": "#/definitions/RouterConf", @@ -1629,6 +1712,7 @@ expression: "&schema" "$ref": "#/definitions/AuthConfig", "description": "#/definitions/AuthConfig" }, + "default": {}, "description": "Create a configuration that will apply only to a specific subgraph.", "type": "object" } @@ -1761,10 +1845,6 @@ expression: "&schema" "$ref": "#/definitions/Protocol", "description": "#/definitions/Protocol" }, - "experimental_otlp_tracing_sampler": { - "$ref": "#/definitions/SamplerOption", - "description": "#/definitions/SamplerOption" - }, "field_level_instrumentation_sampler": { "$ref": "#/definitions/SamplerOption", "description": "#/definitions/SamplerOption" @@ -1773,6 +1853,10 @@ expression: "&schema" "$ref": "#/definitions/ApolloMetricsReferenceMode", "description": "#/definitions/ApolloMetricsReferenceMode" }, + "otlp_tracing_sampler": { + "$ref": "#/definitions/SamplerOption", + "description": "#/definitions/SamplerOption" + }, "send_headers": { "$ref": "#/definitions/ForwardHeaders", "description": "#/definitions/ForwardHeaders" @@ -1788,66 +1872,314 @@ expression: "&schema" }, "type": "object" }, - "ContextForward": { + "ConnectorAttributes": { "additionalProperties": false, - "description": "Configuration to forward context values in metric attributes/labels", "properties": { - "default": { - "$ref": "#/definitions/AttributeValue", - "description": "#/definitions/AttributeValue", + "connector.http.method": { + "$ref": "#/definitions/StandardAttribute", + "description": "#/definitions/StandardAttribute", "nullable": true }, - "named": { - "description": "The name of the value in the context", - "type": "string" + "connector.source.name": { + "$ref": "#/definitions/StandardAttribute", + "description": "#/definitions/StandardAttribute", + "nullable": true }, - "rename": { - "description": "The optional output name", - "nullable": true, - "type": "string" + "connector.url.template": { + "$ref": "#/definitions/StandardAttribute", + "description": "#/definitions/StandardAttribute", + "nullable": true + }, + "subgraph.name": { + "$ref": "#/definitions/StandardAttribute", + "description": "#/definitions/StandardAttribute", + "nullable": true } }, - "required": [ - "named" - ], "type": "object" }, - "Cors": { + "ConnectorConfiguration_for_AuthConfig": { + "properties": { + "sources": { + "additionalProperties": { + "$ref": "#/definitions/AuthConfig", + "description": "#/definitions/AuthConfig" + }, + "default": {}, + "type": "object" + } + }, + "type": "object" + }, + "ConnectorEventsConfig": { "additionalProperties": false, - "description": "Cross origin request configuration.", "properties": { - "allow_any_origin": { - "default": false, - "description": "Set to true to allow any origin.\n\nDefaults to false Having this set to true is the only way to allow Origin: null.", - "type": "boolean" + "error": { + "$ref": "#/definitions/StandardEventConfig_for_ConnectorSelector", + "description": "#/definitions/StandardEventConfig_for_ConnectorSelector" }, - "allow_credentials": { - "default": false, - "description": "Set to true to add the `Access-Control-Allow-Credentials` header.", - "type": "boolean" + "request": { + "$ref": "#/definitions/StandardEventConfig_for_ConnectorSelector", + "description": "#/definitions/StandardEventConfig_for_ConnectorSelector" }, - "allow_headers": { - "default": [], - "description": "The headers to allow.\n\nIf this value is not set, the router will mirror client's `Access-Control-Request-Headers`.\n\nNote that if you set headers here, you also want to have a look at your `CSRF` plugins configuration, and make sure you either: - accept `x-apollo-operation-name` AND / OR `apollo-require-preflight` - defined `csrf` required headers in your yml configuration, as shown in the `examples/cors-and-csrf/custom-headers.router.yaml` files.", - "items": { - "type": "string" - }, - "type": "array" + "response": { + "$ref": "#/definitions/StandardEventConfig_for_ConnectorSelector", + "description": "#/definitions/StandardEventConfig_for_ConnectorSelector" + } + }, + "type": "object" + }, + "ConnectorInstrumentsConfig": { + "additionalProperties": false, + "properties": { + "http.client.request.body.size": { + "$ref": "#/definitions/DefaultedStandardInstrument_for_extendable_attribute_apollo_router::plugins::telemetry::config_new::connector::attributes::ConnectorAttributes_apollo_router::plugins::telemetry::config_new::connector::selectors::ConnectorSelector", + "description": "#/definitions/DefaultedStandardInstrument_for_extendable_attribute_apollo_router::plugins::telemetry::config_new::connector::attributes::ConnectorAttributes_apollo_router::plugins::telemetry::config_new::connector::selectors::ConnectorSelector" }, - "expose_headers": { - "default": null, - "description": "Which response headers should be made available to scripts running in the browser, in response to a cross-origin request.", - "items": { - "type": "string" - }, - "nullable": true, - "type": "array" + "http.client.request.duration": { + "$ref": "#/definitions/DefaultedStandardInstrument_for_extendable_attribute_apollo_router::plugins::telemetry::config_new::connector::attributes::ConnectorAttributes_apollo_router::plugins::telemetry::config_new::connector::selectors::ConnectorSelector", + "description": "#/definitions/DefaultedStandardInstrument_for_extendable_attribute_apollo_router::plugins::telemetry::config_new::connector::attributes::ConnectorAttributes_apollo_router::plugins::telemetry::config_new::connector::selectors::ConnectorSelector" }, - "match_origins": { - "default": null, - "description": "`Regex`es you want to match the origins against to determine if they're allowed. Defaults to an empty list. Note that `origins` will be evaluated before `match_origins`", - "items": { - "type": "string" + "http.client.response.body.size": { + "$ref": "#/definitions/DefaultedStandardInstrument_for_extendable_attribute_apollo_router::plugins::telemetry::config_new::connector::attributes::ConnectorAttributes_apollo_router::plugins::telemetry::config_new::connector::selectors::ConnectorSelector", + "description": "#/definitions/DefaultedStandardInstrument_for_extendable_attribute_apollo_router::plugins::telemetry::config_new::connector::attributes::ConnectorAttributes_apollo_router::plugins::telemetry::config_new::connector::selectors::ConnectorSelector" + } + }, + "type": "object" + }, + "ConnectorSelector": { + "anyOf": [ + { + "additionalProperties": false, + "properties": { + "subgraph_name": { + "description": "The subgraph name", + "type": "boolean" + } + }, + "required": [ + "subgraph_name" + ], + "type": "object" + }, + { + "additionalProperties": false, + "properties": { + "connector_source": { + "$ref": "#/definitions/ConnectorSource", + "description": "#/definitions/ConnectorSource" + } + }, + "required": [ + "connector_source" + ], + "type": "object" + }, + { + "additionalProperties": false, + "properties": { + "connector_http_request_header": { + "description": "The name of a connector HTTP request header.", + "type": "string" + }, + "default": { + "description": "Optional default value.", + "nullable": true, + "type": "string" + } + }, + "required": [ + "connector_http_request_header" + ], + "type": "object" + }, + { + "additionalProperties": false, + "properties": { + "connector_http_response_header": { + "description": "The name of a connector HTTP response header.", + "type": "string" + }, + "default": { + "description": "Optional default value.", + "nullable": true, + "type": "string" + } + }, + "required": [ + "connector_http_response_header" + ], + "type": "object" + }, + { + "additionalProperties": false, + "properties": { + "connector_http_response_status": { + "$ref": "#/definitions/ResponseStatus", + "description": "#/definitions/ResponseStatus" + } + }, + "required": [ + "connector_http_response_status" + ], + "type": "object" + }, + { + "additionalProperties": false, + "properties": { + "connector_http_method": { + "description": "The connector HTTP method.", + "type": "boolean" + } + }, + "required": [ + "connector_http_method" + ], + "type": "object" + }, + { + "additionalProperties": false, + "properties": { + "connector_url_template": { + "description": "The connector URL template.", + "type": "boolean" + } + }, + "required": [ + "connector_url_template" + ], + "type": "object" + }, + { + "additionalProperties": false, + "properties": { + "static": { + "$ref": "#/definitions/AttributeValue", + "description": "#/definitions/AttributeValue" + } + }, + "required": [ + "static" + ], + "type": "object" + }, + { + "additionalProperties": false, + "properties": { + "error": { + "$ref": "#/definitions/ErrorRepr", + "description": "#/definitions/ErrorRepr" + } + }, + "required": [ + "error" + ], + "type": "object" + } + ] + }, + "ConnectorSource": { + "oneOf": [ + { + "description": "The name of the connector source.", + "enum": [ + "name" + ], + "type": "string" + } + ] + }, + "ConnectorSpans": { + "additionalProperties": false, + "properties": { + "attributes": { + "$ref": "#/definitions/extendable_attribute_apollo_router::plugins::telemetry::config_new::connector::attributes::ConnectorAttributes_apollo_router::plugins::telemetry::config_new::conditional::Conditional", + "description": "#/definitions/extendable_attribute_apollo_router::plugins::telemetry::config_new::connector::attributes::ConnectorAttributes_apollo_router::plugins::telemetry::config_new::conditional::Conditional" + } + }, + "type": "object" + }, + "ConnectorValue": { + "anyOf": [ + { + "$ref": "#/definitions/Standard", + "description": "#/definitions/Standard" + }, + { + "$ref": "#/definitions/ConnectorSelector", + "description": "#/definitions/ConnectorSelector" + } + ] + }, + "ConnectorsConfig": { + "additionalProperties": false, + "properties": { + "debug_extensions": { + "default": false, + "description": "Enables connector debugging information on response extensions if the feature is enabled", + "type": "boolean" + }, + "expose_sources_in_context": { + "default": false, + "description": "When enabled, adds an entry to the context for use in coprocessors ```json { \"context\": { \"entries\": { \"apollo_connectors::sources_in_query_plan\": [ { \"subgraph_name\": \"subgraph\", \"source_name\": \"source\" } ] } } } ```", + "type": "boolean" + }, + "max_requests_per_operation_per_source": { + "default": null, + "description": "The maximum number of requests for a connector source", + "format": "uint", + "minimum": 0.0, + "nullable": true, + "type": "integer" + }, + "subgraphs": { + "additionalProperties": { + "$ref": "#/definitions/SubgraphConnectorConfiguration", + "description": "#/definitions/SubgraphConnectorConfiguration" + }, + "default": {}, + "description": "A map of subgraph name to connectors config for that subgraph", + "type": "object" + } + }, + "type": "object" + }, + "Cors": { + "additionalProperties": false, + "description": "Cross origin request configuration.", + "properties": { + "allow_any_origin": { + "default": false, + "description": "Set to true to allow any origin.\n\nDefaults to false Having this set to true is the only way to allow Origin: null.", + "type": "boolean" + }, + "allow_credentials": { + "default": false, + "description": "Set to true to add the `Access-Control-Allow-Credentials` header.", + "type": "boolean" + }, + "allow_headers": { + "default": [], + "description": "The headers to allow.\n\nIf this value is not set, the router will mirror client's `Access-Control-Request-Headers`.\n\nNote that if you set headers here, you also want to have a look at your `CSRF` plugins configuration, and make sure you either: - accept `x-apollo-operation-name` AND / OR `apollo-require-preflight` - defined `csrf` required headers in your yml configuration, as shown in the `examples/cors-and-csrf/custom-headers.router.yaml` files.", + "items": { + "type": "string" + }, + "type": "array" + }, + "expose_headers": { + "default": null, + "description": "Which response headers should be made available to scripts running in the browser, in response to a cross-origin request.", + "items": { + "type": "string" + }, + "nullable": true, + "type": "array" + }, + "match_origins": { + "default": null, + "description": "`Regex`es you want to match the origins against to determine if they're allowed. Defaults to an empty list. Note that `origins` will be evaluated before `match_origins`", + "items": { + "type": "string" }, "nullable": true, "type": "array" @@ -2083,6 +2415,29 @@ expression: "&schema" } ] }, + "DefaultedStandardInstrument_for_extendable_attribute_apollo_router::plugins::telemetry::config_new::connector::attributes::ConnectorAttributes_apollo_router::plugins::telemetry::config_new::connector::selectors::ConnectorSelector": { + "anyOf": [ + { + "type": "null" + }, + { + "type": "boolean" + }, + { + "additionalProperties": false, + "properties": { + "attributes": { + "$ref": "#/definitions/extendable_attribute_apollo_router::plugins::telemetry::config_new::connector::attributes::ConnectorAttributes_apollo_router::plugins::telemetry::config_new::connector::selectors::ConnectorSelector", + "description": "#/definitions/extendable_attribute_apollo_router::plugins::telemetry::config_new::connector::attributes::ConnectorAttributes_apollo_router::plugins::telemetry::config_new::connector::selectors::ConnectorSelector" + } + }, + "required": [ + "attributes" + ], + "type": "object" + } + ] + }, "DefaultedStandardInstrument_for_extendable_attribute_apollo_router::plugins::telemetry::config_new::graphql::attributes::GraphQLAttributes_apollo_router::plugins::telemetry::config_new::graphql::selectors::GraphQLSelector": { "anyOf": [ { @@ -2303,26 +2658,6 @@ expression: "&schema" }, "type": "object" }, - "ErrorsForward": { - "additionalProperties": false, - "properties": { - "extensions": { - "description": "Forward extensions values as custom attributes/labels in metrics", - "items": { - "$ref": "#/definitions/BodyForward", - "description": "#/definitions/BodyForward" - }, - "type": "array" - }, - "include_messages": { - "default": null, - "description": "Will include the error message in a \"message\" attribute", - "nullable": true, - "type": "boolean" - } - }, - "type": "object" - }, "EventLevel": { "enum": [ "info", @@ -2365,6 +2700,37 @@ expression: "&schema" } ] }, + "Event_for_ConnectorAttributes_and_ConnectorSelector": { + "description": "An event that can be logged as part of a trace. The event has an implicit `type` attribute that matches the name of the event in the yaml and a message that can be used to provide additional information.", + "properties": { + "attributes": { + "$ref": "#/definitions/extendable_attribute_apollo_router::plugins::telemetry::config_new::connector::attributes::ConnectorAttributes_apollo_router::plugins::telemetry::config_new::connector::selectors::ConnectorSelector", + "description": "#/definitions/extendable_attribute_apollo_router::plugins::telemetry::config_new::connector::attributes::ConnectorAttributes_apollo_router::plugins::telemetry::config_new::connector::selectors::ConnectorSelector" + }, + "condition": { + "$ref": "#/definitions/Condition_for_ConnectorSelector", + "description": "#/definitions/Condition_for_ConnectorSelector" + }, + "level": { + "$ref": "#/definitions/EventLevel", + "description": "#/definitions/EventLevel" + }, + "message": { + "description": "The event message.", + "type": "string" + }, + "on": { + "$ref": "#/definitions/EventOn", + "description": "#/definitions/EventOn" + } + }, + "required": [ + "level", + "message", + "on" + ], + "type": "object" + }, "Event_for_RouterAttributes_and_RouterSelector": { "description": "An event that can be logged as part of a trace. The event has an implicit `type` attribute that matches the name of the event in the yaml and a message that can be used to provide additional information.", "properties": { @@ -2494,6 +2860,10 @@ expression: "&schema" "additionalProperties": false, "description": "Events are", "properties": { + "connector": { + "$ref": "#/definitions/extendable_attribute_apollo_router::plugins::telemetry::config_new::connector::events::ConnectorEventsConfig_apollo_router::plugins::telemetry::config_new::events::Event", + "description": "#/definitions/extendable_attribute_apollo_router::plugins::telemetry::config_new::connector::events::ConnectorEventsConfig_apollo_router::plugins::telemetry::config_new::events::Event" + }, "router": { "$ref": "#/definitions/extendable_attribute_apollo_router::plugins::telemetry::config_new::events::RouterEventsConfig_apollo_router::plugins::telemetry::config_new::events::Event", "description": "#/definitions/extendable_attribute_apollo_router::plugins::telemetry::config_new::events::RouterEventsConfig_apollo_router::plugins::telemetry::config_new::events::Event" @@ -2700,29 +3070,6 @@ expression: "&schema" "description": "Forbid mutations configuration", "type": "boolean" }, - "Forward": { - "additionalProperties": false, - "description": "Configuration to forward from headers/body", - "properties": { - "body": { - "description": "Forward body values as custom attributes/labels in metrics", - "items": { - "$ref": "#/definitions/BodyForward", - "description": "#/definitions/BodyForward" - }, - "type": "array" - }, - "header": { - "description": "Forward header values as custom attributes/labels in metrics", - "items": { - "$ref": "#/definitions/HeaderForward", - "description": "#/definitions/HeaderForward" - }, - "type": "array" - } - }, - "type": "object" - }, "ForwardHeaders": { "description": "Forward headers", "oneOf": [ @@ -3009,133 +3356,28 @@ expression: "&schema" "default": {}, "description": "gRPC metadata", "type": "object" - } - }, - "type": "object" - }, - "Header": { - "additionalProperties": false, - "description": "Insert a header", - "properties": { - "name": { - "description": "The name of the header", - "type": "string" - }, - "value": { - "description": "The value for the header", - "type": "string" - } - }, - "required": [ - "name", - "value" - ], - "type": "object" - }, - "HeaderForward": { - "anyOf": [ - { - "additionalProperties": false, - "description": "Match via header name", - "properties": { - "default": { - "$ref": "#/definitions/AttributeValue", - "description": "#/definitions/AttributeValue", - "nullable": true - }, - "named": { - "description": "The name of the header", - "type": "string" - }, - "rename": { - "description": "The optional output name", - "nullable": true, - "type": "string" - } - }, - "required": [ - "named" - ], - "type": "object" - }, - { - "additionalProperties": false, - "description": "Match via rgex", - "properties": { - "matching": { - "description": "Using a regex on the header name", - "type": "string" - } - }, - "required": [ - "matching" - ], - "type": "object" - } - ], - "description": "Configuration to forward header values in metric labels" - }, - "HeaderLoggingCondition": { - "anyOf": [ - { - "additionalProperties": false, - "description": "Match header value given a regex to display logs", - "properties": { - "body": { - "default": false, - "description": "Display request/response body (default: false)", - "type": "boolean" - }, - "headers": { - "default": false, - "description": "Display request/response headers (default: false)", - "type": "boolean" - }, - "match": { - "description": "Regex to match the header value", - "type": "string" - }, - "name": { - "description": "Header name", - "type": "string" - } - }, - "required": [ - "match", - "name" - ], - "type": "object" + } + }, + "type": "object" + }, + "Header": { + "additionalProperties": false, + "description": "Insert a header", + "properties": { + "name": { + "description": "The name of the header", + "type": "string" }, - { - "additionalProperties": false, - "description": "Match header value given a value to display logs", - "properties": { - "body": { - "default": false, - "description": "Display request/response body (default: false)", - "type": "boolean" - }, - "headers": { - "default": false, - "description": "Display request/response headers (default: false)", - "type": "boolean" - }, - "name": { - "description": "Header name", - "type": "string" - }, - "value": { - "description": "Header value", - "type": "string" - } - }, - "required": [ - "name", - "value" - ], - "type": "object" + "value": { + "description": "The value for the header", + "type": "string" } - ] + }, + "required": [ + "name", + "value" + ], + "type": "object" }, "HeadersLocation": { "additionalProperties": false, @@ -3281,25 +3523,6 @@ expression: "&schema" ], "description": "Insert header" }, - "Insert2": { - "additionalProperties": false, - "description": "Configuration to insert custom attributes/labels in metrics", - "properties": { - "name": { - "description": "The name of the attribute to insert", - "type": "string" - }, - "value": { - "$ref": "#/definitions/AttributeValue", - "description": "#/definitions/AttributeValue" - } - }, - "required": [ - "name", - "value" - ], - "type": "object" - }, "InsertFromBody": { "additionalProperties": false, "description": "Insert header with a value coming from body", @@ -3416,6 +3639,42 @@ expression: "&schema" ], "type": "object" }, + "Instrument_for_ConnectorAttributes_and_ConnectorSelector_and_ConnectorValue": { + "additionalProperties": false, + "properties": { + "attributes": { + "$ref": "#/definitions/extendable_attribute_apollo_router::plugins::telemetry::config_new::connector::attributes::ConnectorAttributes_apollo_router::plugins::telemetry::config_new::connector::selectors::ConnectorSelector", + "description": "#/definitions/extendable_attribute_apollo_router::plugins::telemetry::config_new::connector::attributes::ConnectorAttributes_apollo_router::plugins::telemetry::config_new::connector::selectors::ConnectorSelector" + }, + "condition": { + "$ref": "#/definitions/Condition_for_ConnectorSelector", + "description": "#/definitions/Condition_for_ConnectorSelector" + }, + "description": { + "description": "The description of the instrument.", + "type": "string" + }, + "type": { + "$ref": "#/definitions/InstrumentType", + "description": "#/definitions/InstrumentType" + }, + "unit": { + "description": "The units of the instrument, e.g. \"ms\", \"bytes\", \"requests\".", + "type": "string" + }, + "value": { + "$ref": "#/definitions/ConnectorValue", + "description": "#/definitions/ConnectorValue" + } + }, + "required": [ + "description", + "type", + "unit", + "value" + ], + "type": "object" + }, "Instrument_for_GraphQLAttributes_and_GraphQLSelector_and_GraphQLValue": { "additionalProperties": false, "properties": { @@ -3586,6 +3845,10 @@ expression: "&schema" "$ref": "#/definitions/extendable_attribute_apollo_router::plugins::telemetry::config_new::cache::CacheInstrumentsConfig_apollo_router::plugins::telemetry::config_new::instruments::Instrument", "description": "#/definitions/extendable_attribute_apollo_router::plugins::telemetry::config_new::cache::CacheInstrumentsConfig_apollo_router::plugins::telemetry::config_new::instruments::Instrument" }, + "connector": { + "$ref": "#/definitions/extendable_attribute_apollo_router::plugins::telemetry::config_new::connector::instruments::ConnectorInstrumentsConfig_apollo_router::plugins::telemetry::config_new::instruments::Instrument", + "description": "#/definitions/extendable_attribute_apollo_router::plugins::telemetry::config_new::connector::instruments::ConnectorInstrumentsConfig_apollo_router::plugins::telemetry::config_new::instruments::Instrument" + }, "default_requirement_level": { "$ref": "#/definitions/DefaultAttributeRequirementLevel", "description": "#/definitions/DefaultAttributeRequirementLevel" @@ -3756,14 +4019,6 @@ expression: "&schema" "$ref": "#/definitions/LoggingCommon", "description": "#/definitions/LoggingCommon" }, - "experimental_when_header": { - "description": "Log configuration to log request and response for subgraphs and supergraph Note that this will be removed when events are implemented.", - "items": { - "$ref": "#/definitions/HeaderLoggingCondition", - "description": "#/definitions/HeaderLoggingCondition" - }, - "type": "array" - }, "stdout": { "$ref": "#/definitions/StdOut", "description": "#/definitions/StdOut" @@ -3913,28 +4168,9 @@ expression: "&schema" }, "type": "object" }, - "MetricsAttributesConf": { - "additionalProperties": false, - "description": "Configuration to add custom attributes/labels on metrics", - "properties": { - "subgraph": { - "$ref": "#/definitions/SubgraphAttributesConf", - "description": "#/definitions/SubgraphAttributesConf" - }, - "supergraph": { - "$ref": "#/definitions/AttributesForwardConf", - "description": "#/definitions/AttributesForwardConf" - } - }, - "type": "object" - }, "MetricsCommon": { "additionalProperties": false, "properties": { - "attributes": { - "$ref": "#/definitions/MetricsAttributesConf", - "description": "#/definitions/MetricsAttributesConf" - }, "buckets": { "default": [ 0.001, @@ -5270,6 +5506,18 @@ expression: "&schema" }, "type": "object" }, + "SelectorOrValue_for_ConnectorSelector": { + "anyOf": [ + { + "$ref": "#/definitions/AttributeValue", + "description": "#/definitions/AttributeValue" + }, + { + "$ref": "#/definitions/ConnectorSelector", + "description": "#/definitions/ConnectorSelector" + } + ] + }, "SelectorOrValue_for_GraphQLSelector": { "anyOf": [ { @@ -5370,6 +5618,28 @@ expression: "&schema" } ] }, + "SourceConfiguration": { + "additionalProperties": false, + "description": "Configuration for a `@source` directive", + "properties": { + "max_requests_per_operation": { + "default": null, + "description": "The maximum number of requests for this source", + "format": "uint", + "minimum": 0.0, + "nullable": true, + "type": "integer" + }, + "override_url": { + "default": null, + "description": "Override the `@source(http: {baseURL:})`", + "format": "uri", + "nullable": true, + "type": "string" + } + }, + "type": "object" + }, "SpanMode": { "description": "Span mode to create new or deprecated spans", "oneOf": [ @@ -5392,6 +5662,10 @@ expression: "&schema" "Spans": { "additionalProperties": false, "properties": { + "connector": { + "$ref": "#/definitions/ConnectorSpans", + "description": "#/definitions/ConnectorSpans" + }, "default_attribute_requirement_level": { "$ref": "#/definitions/DefaultAttributeRequirementLevel", "description": "#/definitions/DefaultAttributeRequirementLevel" @@ -5441,6 +5715,31 @@ expression: "&schema" } ] }, + "StandardEventConfig_for_ConnectorSelector": { + "anyOf": [ + { + "$ref": "#/definitions/EventLevel", + "description": "#/definitions/EventLevel" + }, + { + "properties": { + "condition": { + "$ref": "#/definitions/Condition_for_ConnectorSelector", + "description": "#/definitions/Condition_for_ConnectorSelector" + }, + "level": { + "$ref": "#/definitions/EventLevel", + "description": "#/definitions/EventLevel" + } + }, + "required": [ + "condition", + "level" + ], + "type": "object" + } + ] + }, "StandardEventConfig_for_RouterSelector": { "anyOf": [ { @@ -5685,25 +5984,6 @@ expression: "&schema" }, "type": "object" }, - "SubgraphAttributesConf": { - "additionalProperties": false, - "description": "Configuration to add custom attributes/labels on metrics to subgraphs", - "properties": { - "all": { - "$ref": "#/definitions/AttributesForwardConf", - "description": "#/definitions/AttributesForwardConf" - }, - "subgraphs": { - "additionalProperties": { - "$ref": "#/definitions/AttributesForwardConf", - "description": "#/definitions/AttributesForwardConf" - }, - "description": "Attributes per subgraph", - "type": "object" - } - }, - "type": "object" - }, "SubgraphConfiguration_for_CommonBatchingConfig": { "description": "Configuration options pertaining to the subgraph server component.", "properties": { @@ -5780,6 +6060,28 @@ expression: "&schema" }, "type": "object" }, + "SubgraphConnectorConfiguration": { + "additionalProperties": false, + "description": "Configuration for a connector subgraph", + "properties": { + "$config": { + "additionalProperties": true, + "default": {}, + "description": "Other values that can be used by connectors via `{$config.}`", + "type": "object" + }, + "sources": { + "additionalProperties": { + "$ref": "#/definitions/SourceConfiguration", + "description": "#/definitions/SourceConfiguration" + }, + "default": {}, + "description": "A map of `@source(name:)` to configuration for that source", + "type": "object" + } + }, + "type": "object" + }, "SubgraphErrorConfig": { "additionalProperties": false, "properties": { @@ -7381,6 +7683,22 @@ expression: "&schema" ], "type": "string" }, + "conditional_attribute_apollo_router::plugins::telemetry::config_new::connector::selectors::ConnectorSelector": { + "anyOf": [ + { + "$ref": "#/definitions/ConnectorSelector", + "description": "#/definitions/ConnectorSelector" + }, + { + "properties": { + "condition": { + "$ref": "#/definitions/Condition_for_ConnectorSelector", + "description": "#/definitions/Condition_for_ConnectorSelector" + } + } + } + ] + }, "conditional_attribute_apollo_router::plugins::telemetry::config_new::selectors::RouterSelector": { "anyOf": [ { @@ -7866,6 +8184,106 @@ expression: "&schema" }, "type": "object" }, + "extendable_attribute_apollo_router::plugins::telemetry::config_new::connector::attributes::ConnectorAttributes_apollo_router::plugins::telemetry::config_new::conditional::Conditional": { + "additionalProperties": { + "$ref": "#/definitions/conditional_attribute_apollo_router::plugins::telemetry::config_new::connector::selectors::ConnectorSelector", + "description": "#/definitions/conditional_attribute_apollo_router::plugins::telemetry::config_new::connector::selectors::ConnectorSelector" + }, + "properties": { + "connector.http.method": { + "$ref": "#/definitions/StandardAttribute", + "description": "#/definitions/StandardAttribute", + "nullable": true + }, + "connector.source.name": { + "$ref": "#/definitions/StandardAttribute", + "description": "#/definitions/StandardAttribute", + "nullable": true + }, + "connector.url.template": { + "$ref": "#/definitions/StandardAttribute", + "description": "#/definitions/StandardAttribute", + "nullable": true + }, + "subgraph.name": { + "$ref": "#/definitions/StandardAttribute", + "description": "#/definitions/StandardAttribute", + "nullable": true + } + }, + "type": "object" + }, + "extendable_attribute_apollo_router::plugins::telemetry::config_new::connector::attributes::ConnectorAttributes_apollo_router::plugins::telemetry::config_new::connector::selectors::ConnectorSelector": { + "additionalProperties": { + "$ref": "#/definitions/ConnectorSelector", + "description": "#/definitions/ConnectorSelector" + }, + "properties": { + "connector.http.method": { + "$ref": "#/definitions/StandardAttribute", + "description": "#/definitions/StandardAttribute", + "nullable": true + }, + "connector.source.name": { + "$ref": "#/definitions/StandardAttribute", + "description": "#/definitions/StandardAttribute", + "nullable": true + }, + "connector.url.template": { + "$ref": "#/definitions/StandardAttribute", + "description": "#/definitions/StandardAttribute", + "nullable": true + }, + "subgraph.name": { + "$ref": "#/definitions/StandardAttribute", + "description": "#/definitions/StandardAttribute", + "nullable": true + } + }, + "type": "object" + }, + "extendable_attribute_apollo_router::plugins::telemetry::config_new::connector::events::ConnectorEventsConfig_apollo_router::plugins::telemetry::config_new::events::Event": { + "additionalProperties": { + "$ref": "#/definitions/Event_for_ConnectorAttributes_and_ConnectorSelector", + "description": "#/definitions/Event_for_ConnectorAttributes_and_ConnectorSelector" + }, + "properties": { + "error": { + "$ref": "#/definitions/StandardEventConfig_for_ConnectorSelector", + "description": "#/definitions/StandardEventConfig_for_ConnectorSelector" + }, + "request": { + "$ref": "#/definitions/StandardEventConfig_for_ConnectorSelector", + "description": "#/definitions/StandardEventConfig_for_ConnectorSelector" + }, + "response": { + "$ref": "#/definitions/StandardEventConfig_for_ConnectorSelector", + "description": "#/definitions/StandardEventConfig_for_ConnectorSelector" + } + }, + "type": "object" + }, + "extendable_attribute_apollo_router::plugins::telemetry::config_new::connector::instruments::ConnectorInstrumentsConfig_apollo_router::plugins::telemetry::config_new::instruments::Instrument": { + "additionalProperties": { + "$ref": "#/definitions/Instrument_for_ConnectorAttributes_and_ConnectorSelector_and_ConnectorValue", + "description": "#/definitions/Instrument_for_ConnectorAttributes_and_ConnectorSelector_and_ConnectorValue" + }, + "properties": { + "http.client.request.body.size": { + "$ref": "#/definitions/DefaultedStandardInstrument_for_extendable_attribute_apollo_router::plugins::telemetry::config_new::connector::attributes::ConnectorAttributes_apollo_router::plugins::telemetry::config_new::connector::selectors::ConnectorSelector", + "description": "#/definitions/DefaultedStandardInstrument_for_extendable_attribute_apollo_router::plugins::telemetry::config_new::connector::attributes::ConnectorAttributes_apollo_router::plugins::telemetry::config_new::connector::selectors::ConnectorSelector" + }, + "http.client.request.duration": { + "$ref": "#/definitions/DefaultedStandardInstrument_for_extendable_attribute_apollo_router::plugins::telemetry::config_new::connector::attributes::ConnectorAttributes_apollo_router::plugins::telemetry::config_new::connector::selectors::ConnectorSelector", + "description": "#/definitions/DefaultedStandardInstrument_for_extendable_attribute_apollo_router::plugins::telemetry::config_new::connector::attributes::ConnectorAttributes_apollo_router::plugins::telemetry::config_new::connector::selectors::ConnectorSelector" + }, + "http.client.response.body.size": { + "$ref": "#/definitions/DefaultedStandardInstrument_for_extendable_attribute_apollo_router::plugins::telemetry::config_new::connector::attributes::ConnectorAttributes_apollo_router::plugins::telemetry::config_new::connector::selectors::ConnectorSelector", + "description": "#/definitions/DefaultedStandardInstrument_for_extendable_attribute_apollo_router::plugins::telemetry::config_new::connector::attributes::ConnectorAttributes_apollo_router::plugins::telemetry::config_new::connector::selectors::ConnectorSelector" + } + }, + "type": "object" + }, "extendable_attribute_apollo_router::plugins::telemetry::config_new::events::RouterEventsConfig_apollo_router::plugins::telemetry::config_new::events::Event": { "additionalProperties": { "$ref": "#/definitions/Event_for_RouterAttributes_and_RouterSelector", @@ -8323,6 +8741,10 @@ expression: "&schema" "$ref": "#/definitions/Plugins", "description": "#/definitions/Plugins" }, + "preview_connectors": { + "$ref": "#/definitions/ConnectorsConfig", + "description": "#/definitions/ConnectorsConfig" + }, "preview_entity_cache": { "$ref": "#/definitions/Config7", "description": "#/definitions/Config7" diff --git a/apollo-router/src/configuration/testdata/metrics/connectors.router.yaml b/apollo-router/src/configuration/testdata/metrics/connectors.router.yaml new file mode 100644 index 0000000000..c1c4748118 --- /dev/null +++ b/apollo-router/src/configuration/testdata/metrics/connectors.router.yaml @@ -0,0 +1,12 @@ +preview_connectors: + debug_extensions: true + expose_sources_in_context: true + max_requests_per_operation_per_source: 100 + subgraphs: + subgraph_name: + $config: + name_of_the_variable: variable_value + sources: + source_name: + max_requests_per_operation: 50 + override_url: 'http://localhost' diff --git a/apollo-router/src/configuration/testdata/metrics/telemetry.router.yaml b/apollo-router/src/configuration/testdata/metrics/telemetry.router.yaml index b4f9a19dd7..5dcc804e9d 100644 --- a/apollo-router/src/configuration/testdata/metrics/telemetry.router.yaml +++ b/apollo-router/src/configuration/testdata/metrics/telemetry.router.yaml @@ -20,16 +20,6 @@ telemetry: enabled: true agent: endpoint: default - logging: - experimental_when_header: - - name: apollo-router-log-request - value: test - headers: true # default: false - body: true # default: false - # log request for all requests coming from Iphones - - name: custom-header - match: ^foo.* - headers: true instrumentation: spans: mode: spec_compliant @@ -99,6 +89,20 @@ telemetry: subgraph_response_data: "$.products[*].price1" attributes: subgraph.name: true + connector: + acme.user.not.found: + value: unit + type: counter + unit: count + description: "Count of 404 responses from the user API" + condition: + all: + - eq: + - 404 + - connector_http_response_status: code + - eq: + - "user_api" + - connector_source: name graphql: list.length: true field.execution: true @@ -174,4 +178,20 @@ telemetry: attributes: subgraph.name: true response_status: - subgraph_response_status: code \ No newline at end of file + subgraph_response_status: code + connector: + # Standard events + request: off + response: info + error: error + + # Custom events + connector.response: + message: "Connector response" + level: info + on: response + attributes: + connector.http.method: true + connector.url.template: true + response_status: + connector_http_response_status: code \ No newline at end of file diff --git a/apollo-router/src/configuration/testdata/migrations/telemetry_router_to_supergraph.router.yaml b/apollo-router/src/configuration/testdata/migrations/telemetry_router_to_supergraph.router.yaml deleted file mode 100644 index 0e44d606a6..0000000000 --- a/apollo-router/src/configuration/testdata/migrations/telemetry_router_to_supergraph.router.yaml +++ /dev/null @@ -1,8 +0,0 @@ -telemetry: - metrics: - common: - attributes: - router: - request: - header: - - named: "fd" diff --git a/apollo-router/src/configuration/tests.rs b/apollo-router/src/configuration/tests.rs index 4a93e496cc..5c28681e52 100644 --- a/apollo-router/src/configuration/tests.rs +++ b/apollo-router/src/configuration/tests.rs @@ -413,11 +413,6 @@ fn validate_project_config_files() { }; for yaml in yamls { - #[cfg(not(feature = "hyper_header_limits"))] - if yaml.contains("http1_max_request_headers") { - continue; - } - if let Err(e) = validate_yaml_configuration( &yaml, Expansion::default().unwrap(), @@ -682,7 +677,7 @@ fn test_configuration_validate_and_sanitize() { .unwrap() .validate() .unwrap(); - assert_eq!(&conf.supergraph.sanitized_path(), "/g:supergraph_route"); + assert_eq!(&conf.supergraph.sanitized_path(), "/g{supergraph_route}"); let conf = Configuration::builder() .supergraph(Supergraph::builder().path("/graphql/g*").build()) @@ -692,16 +687,16 @@ fn test_configuration_validate_and_sanitize() { .unwrap(); assert_eq!( &conf.supergraph.sanitized_path(), - "/graphql/g:supergraph_route" + "/graphql/g{supergraph_route}" ); let conf = Configuration::builder() - .supergraph(Supergraph::builder().path("/*").build()) + .supergraph(Supergraph::builder().path("/{*rest}").build()) .build() .unwrap() .validate() .unwrap(); - assert_eq!(&conf.supergraph.sanitized_path(), "/*router_extra_path"); + assert_eq!(&conf.supergraph.sanitized_path(), "/{*rest}"); let conf = Configuration::builder() .supergraph(Supergraph::builder().path("/test").build()) @@ -719,6 +714,9 @@ fn test_configuration_validate_and_sanitize() { #[test] fn load_tls() { + // Enable crypto + let _ = rustls::crypto::aws_lc_rs::default_provider().install_default(); + let mut cert_path = PathBuf::from(env!("CARGO_MANIFEST_DIR")); cert_path.push("src"); cert_path.push("configuration"); diff --git a/apollo-router/src/error.rs b/apollo-router/src/error.rs index d8000b99cf..7b550fc79c 100644 --- a/apollo-router/src/error.rs +++ b/apollo-router/src/error.rs @@ -495,6 +495,10 @@ pub(crate) enum SchemaError { /// Api error(s): {0} #[from(ignore)] Api(String), + + /// Connector error(s): {0} + #[from(ignore)] + Connector(FederationError), } /// Collection of schema validation errors. diff --git a/apollo-router/src/executable.rs b/apollo-router/src/executable.rs index afe70ff552..c246096340 100644 --- a/apollo-router/src/executable.rs +++ b/apollo-router/src/executable.rs @@ -63,6 +63,8 @@ pub(crate) static mut DHAT_AD_HOC_PROFILER: OnceCell = OnceCell: pub(crate) const APOLLO_ROUTER_DEV_ENV: &str = "APOLLO_ROUTER_DEV"; pub(crate) const APOLLO_TELEMETRY_DISABLED: &str = "APOLLO_TELEMETRY_DISABLED"; +const INITIAL_UPLINK_POLL_INTERVAL: Duration = Duration::from_secs(10); + // Note: Constructor/Destructor functions may not play nicely with tracing, since they run after // main completes, so don't use tracing, use println!() and eprintln!().. #[cfg(feature = "dhat-heap")] @@ -235,10 +237,6 @@ pub struct Opt { // Should be a Vec when https://github.com/clap-rs/clap/discussions/3796 is solved apollo_uplink_endpoints: Option, - /// The time between polls to Apollo uplink. Minimum 10s. - #[clap(long, default_value = "10s", value_parser = humantime::parse_duration, env)] - apollo_uplink_poll_interval: Duration, - /// Disable sending anonymous usage information to Apollo. #[clap(long, env = APOLLO_TELEMETRY_DISABLED, value_parser = FalseyValueParser::new())] anonymous_telemetry_disabled: bool, @@ -294,7 +292,7 @@ impl Opt { .as_ref() .map(|endpoints| Self::parse_endpoints(endpoints)) .transpose()?, - poll_interval: self.apollo_uplink_poll_interval, + poll_interval: INITIAL_UPLINK_POLL_INTERVAL, timeout: self.apollo_uplink_timeout, }) } @@ -404,6 +402,8 @@ impl Executable { println!("{}", std::env!("CARGO_PKG_VERSION")); return Ok(()); } + // Enable crypto + let _ = rustls::crypto::aws_lc_rs::default_provider().install_default(); copy_args_to_env(); @@ -473,9 +473,6 @@ impl Executable { license: Option, mut opt: Opt, ) -> Result<()> { - if opt.apollo_uplink_poll_interval < Duration::from_secs(10) { - return Err(anyhow!("apollo-uplink-poll-interval must be at least 10s")); - } let current_directory = std::env::current_dir()?; // Enable hot reload when dev mode is enabled opt.hot_reload = opt.hot_reload || opt.dev; @@ -554,7 +551,7 @@ impl Executable { SchemaSource::URLs { urls: supergraph_urls.clone(), watch: opt.hot_reload, - period: opt.apollo_uplink_poll_interval + period: INITIAL_UPLINK_POLL_INTERVAL, } } (_, None, None, _, Some(apollo_key_path)) => { diff --git a/apollo-router/src/http_ext.rs b/apollo-router/src/http_ext.rs index 700cf19177..e01b2bd926 100644 --- a/apollo-router/src/http_ext.rs +++ b/apollo-router/src/http_ext.rs @@ -7,7 +7,6 @@ use std::hash::Hash; use std::ops::Deref; use std::ops::DerefMut; -use axum::body::boxed; use axum::response::IntoResponse; use bytes::Bytes; use http::header; @@ -444,7 +443,7 @@ impl IntoResponse for Response { .headers .insert(header::CONTENT_TYPE, APPLICATION_JSON_HEADER_VALUE.clone()); - axum::response::Response::from_parts(parts, boxed(http_body::Full::new(json_body_bytes))) + axum::response::Response::from_parts(parts, axum::body::Body::from(json_body_bytes)) } } @@ -453,7 +452,7 @@ impl IntoResponse for Response { // todo: chunks? let (parts, body) = http::Response::from(self).into_parts(); - axum::response::Response::from_parts(parts, boxed(http_body::Full::new(body))) + axum::response::Response::from_parts(parts, axum::body::Body::from(body)) } } diff --git a/apollo-router/src/http_server_factory.rs b/apollo-router/src/http_server_factory.rs index c66c9da0f0..b5685a236b 100644 --- a/apollo-router/src/http_server_factory.rs +++ b/apollo-router/src/http_server_factory.rs @@ -101,13 +101,12 @@ impl HttpServerHandle { } } + #[cfg(unix)] pub(crate) async fn shutdown(mut self) -> Result<(), ApolloRouterError> { - #[cfg(unix)] let listen_addresses = std::mem::take(&mut self.listen_addresses); let (_main_listener, _extra_listener) = self.wait_for_servers().await?; - #[cfg(unix)] // listen_addresses includes the main graphql_address for listen_address in listen_addresses { if let ListenAddr::UnixSocket(path) = listen_address { @@ -117,6 +116,13 @@ impl HttpServerHandle { Ok(()) } + #[cfg(not(unix))] + pub(crate) async fn shutdown(self) -> Result<(), ApolloRouterError> { + let (_main_listener, _extra_listener) = self.wait_for_servers().await?; + + Ok(()) + } + pub(crate) async fn restart( self, factory: &SF, diff --git a/apollo-router/src/lib.rs b/apollo-router/src/lib.rs index 4d32f99259..66afce95e0 100644 --- a/apollo-router/src/lib.rs +++ b/apollo-router/src/lib.rs @@ -81,6 +81,9 @@ pub mod test_harness; pub mod tracer; mod uplink; +#[doc(hidden)] +pub mod otel_compat; + pub use crate::axum_factory::unsupported_set_axum_router_callback; pub use crate::configuration::Configuration; pub use crate::configuration::ListenAddr; @@ -97,6 +100,10 @@ pub use crate::router::RouterHttpServer; pub use crate::router::SchemaSource; pub use crate::router::ShutdownSource; pub use crate::router_factory::Endpoint; +#[cfg(any(test, feature = "snapshot"))] +pub use crate::test_harness::http_snapshot::standalone::main as snapshot_server; +#[cfg(any(test, feature = "snapshot"))] +pub use crate::test_harness::http_snapshot::SnapshotServer; pub use crate::test_harness::make_fake_batch; pub use crate::test_harness::MockedSubgraphs; pub use crate::test_harness::TestHarness; diff --git a/apollo-router/src/metrics/filter.rs b/apollo-router/src/metrics/filter.rs index 6f30335794..46ced76274 100644 --- a/apollo-router/src/metrics/filter.rs +++ b/apollo-router/src/metrics/filter.rs @@ -105,7 +105,7 @@ impl FilterMeterProvider { FilterMeterProvider::builder() .delegate(delegate) .deny( - Regex::new(r"apollo\.router\.(config|entities|instance|operations\.(fetch|request_size|response_size))(\..*|$)") + Regex::new(r"apollo\.router\.(config|entities|instance|operations\.(connectors|fetch|request_size|response_size)|schema\.connectors)(\..*|$)") .expect("regex should have been valid"), ) .build() @@ -289,6 +289,14 @@ mod test { .u64_counter("apollo.router.lifecycle.api_schema") .init() .add(1, &[]); + filtered + .u64_counter("apollo.router.operations.connectors") + .init() + .add(1, &[]); + filtered + .u64_observable_gauge("apollo.router.schema.connectors") + .with_callback(move |observer| observer.observe(1, &[])) + .init(); meter_provider.force_flush(&cx).unwrap(); let metrics: Vec<_> = exporter @@ -315,6 +323,13 @@ mod test { assert!(metrics .iter() .any(|m| m.name == "apollo.router.lifecycle.api_schema")); + + assert!(metrics + .iter() + .any(|m| m.name == "apollo.router.operations.connectors")); + assert!(metrics + .iter() + .any(|m| m.name == "apollo.router.schema.connectors")); } #[tokio::test(flavor = "multi_thread")] @@ -396,6 +411,14 @@ mod test { .u64_counter("apollo.router.entities.test") .init() .add(1, &[]); + filtered + .u64_counter("apollo.router.operations.connectors") + .init() + .add(1, &[]); + filtered + .u64_observable_gauge("apollo.router.schema.connectors") + .with_callback(move |observer| observer.observe(1, &[])) + .init(); meter_provider.force_flush(&cx).unwrap(); let metrics: Vec<_> = exporter @@ -414,5 +437,11 @@ mod test { assert!(!metrics .iter() .any(|m| m.name == "apollo.router.entities.test")); + assert!(!metrics + .iter() + .any(|m| m.name == "apollo.router.operations.connectors")); + assert!(!metrics + .iter() + .any(|m| m.name == "apollo.router.schema.connectors")); } } diff --git a/apollo-router/src/metrics/mod.rs b/apollo-router/src/metrics/mod.rs index 0aaec867d5..99439193c7 100644 --- a/apollo-router/src/metrics/mod.rs +++ b/apollo-router/src/metrics/mod.rs @@ -177,23 +177,25 @@ pub(crate) mod test_utils { name: &str, ty: MetricType, value: T, + // Useful for histogram to check the count and not the sum + count: bool, attributes: &[KeyValue], ) -> bool { let attributes = AttributeSet::from(attributes); if let Some(value) = value.to_u64() { - if self.metric_matches(name, &ty, value, &attributes) { + if self.metric_matches(name, &ty, value, count, &attributes) { return true; } } if let Some(value) = value.to_i64() { - if self.metric_matches(name, &ty, value, &attributes) { + if self.metric_matches(name, &ty, value, count, &attributes) { return true; } } if let Some(value) = value.to_f64() { - if self.metric_matches(name, &ty, value, &attributes) { + if self.metric_matches(name, &ty, value, count, &attributes) { return true; } } @@ -206,6 +208,7 @@ pub(crate) mod test_utils { name: &str, ty: &MetricType, value: T, + count: bool, attributes: &AttributeSet, ) -> bool { if let Some(metric) = self.find(name) { @@ -227,9 +230,16 @@ pub(crate) mod test_utils { } else if let Some(histogram) = metric.data.as_any().downcast_ref::>() { if matches!(ty, MetricType::Histogram) { - return histogram.data_points.iter().any(|datapoint| { - datapoint.attributes == *attributes && datapoint.sum == value - }); + if count { + return histogram.data_points.iter().any(|datapoint| { + datapoint.attributes == *attributes + && datapoint.count == value.to_u64().unwrap() + }); + } else { + return histogram.data_points.iter().any(|datapoint| { + datapoint.attributes == *attributes && datapoint.sum == value + }); + } } } } @@ -342,6 +352,8 @@ pub(crate) mod test_utils { pub(crate) value: Option, #[serde(skip_serializing_if = "Option::is_none")] pub(crate) sum: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub(crate) count: Option, pub(crate) attributes: BTreeMap, } @@ -420,6 +432,7 @@ pub(crate) mod test_utils { SerdeMetricDataPoint { value: Some(value.value.clone().into()), sum: None, + count: None, attributes: value .attributes .iter() @@ -454,6 +467,7 @@ pub(crate) mod test_utils { SerdeMetricDataPoint { sum: Some(value.sum.clone().into()), value: None, + count: Some(value.count), attributes: value .attributes .iter() @@ -879,7 +893,7 @@ macro_rules! metric { #[cfg(test)] macro_rules! assert_metric { - ($result:expr, $name:expr, $value:expr, $sum:expr, $attrs:expr) => { + ($result:expr, $name:expr, $value:expr, $sum:expr, $count:expr, $attrs:expr) => { if !$result { let metric = crate::metrics::test_utils::SerdeMetric { name: $name.to_string(), @@ -889,6 +903,7 @@ macro_rules! assert_metric { datapoints: [crate::metrics::test_utils::SerdeMetricDataPoint { value: $value, sum: $sum, + count: $count, attributes: $attrs .iter() .map(|kv: &opentelemetry::KeyValue| { @@ -922,37 +937,37 @@ macro_rules! assert_counter { ($($name:ident).+, $value: expr, $($attr_key:literal = $attr_value:expr),+) => { let name = stringify!($($name).+); let attributes = &[$(opentelemetry::KeyValue::new($attr_key, $attr_value)),+]; - let result = crate::metrics::collect_metrics().assert(name, crate::metrics::test_utils::MetricType::Counter, $value, attributes); - assert_metric!(result, name, Some($value.into()), None, &attributes); + let result = crate::metrics::collect_metrics().assert(name, crate::metrics::test_utils::MetricType::Counter, $value, false, attributes); + assert_metric!(result, name, Some($value.into()), None, None, &attributes); }; ($($name:ident).+, $value: expr, $($($attr_key:ident).+ = $attr_value:expr),+) => { let name = stringify!($($name).+); let attributes = &[$(opentelemetry::KeyValue::new(stringify!($($attr_key).+), $attr_value)),+]; - let result = crate::metrics::collect_metrics().assert(name, crate::metrics::test_utils::MetricType::Counter, $value, attributes); - assert_metric!(result, name, Some($value.into()), None, &attributes); + let result = crate::metrics::collect_metrics().assert(name, crate::metrics::test_utils::MetricType::Counter, $value, false, attributes); + assert_metric!(result, name, Some($value.into()), None, None, &attributes); }; ($name:literal, $value: expr, $($attr_key:literal = $attr_value:expr),+) => { let attributes = &[$(opentelemetry::KeyValue::new($attr_key, $attr_value)),+]; - let result = crate::metrics::collect_metrics().assert($name, crate::metrics::test_utils::MetricType::Counter, $value, attributes); - assert_metric!(result, $name, Some($value.into()), None, &attributes); + let result = crate::metrics::collect_metrics().assert($name, crate::metrics::test_utils::MetricType::Counter, $value, false, attributes); + assert_metric!(result, $name, Some($value.into()), None, None, &attributes); }; ($name:literal, $value: expr, $($($attr_key:ident).+ = $attr_value:expr),+) => { let attributes = &[$(opentelemetry::KeyValue::new(stringify!($($attr_key).+), $attr_value)),+]; - let result = crate::metrics::collect_metrics().assert($name, crate::metrics::test_utils::MetricType::Counter, $value, attributes); - assert_metric!(result, $name, Some($value.into()), None, &attributes); + let result = crate::metrics::collect_metrics().assert($name, crate::metrics::test_utils::MetricType::Counter, $value, false, attributes); + assert_metric!(result, $name, Some($value.into()), None, None, &attributes); }; ($name:literal, $value: expr, $attributes: expr) => { - let result = crate::metrics::collect_metrics().assert($name, crate::metrics::test_utils::MetricType::Counter, $value, $attributes); - assert_metric!(result, $name, Some($value.into()), None, &$attributes); + let result = crate::metrics::collect_metrics().assert($name, crate::metrics::test_utils::MetricType::Counter, $value, false, $attributes); + assert_metric!(result, $name, Some($value.into()), None, None, &$attributes); }; ($name:literal, $value: expr) => { - let result = crate::metrics::collect_metrics().assert($name, crate::metrics::test_utils::MetricType::Counter, $value, &[]); - assert_metric!(result, $name, Some($value.into()), None, &[]); + let result = crate::metrics::collect_metrics().assert($name, crate::metrics::test_utils::MetricType::Counter, $value, false, &[]); + assert_metric!(result, $name, Some($value.into()), None, None, &[]); }; } @@ -965,31 +980,31 @@ macro_rules! assert_up_down_counter { ($($name:ident).+, $value: expr, $($attr_key:literal = $attr_value:expr),+) => { let attributes = &[$(opentelemetry::KeyValue::new($attr_key, $attr_value)),+]; - let result = crate::metrics::collect_metrics().assert(stringify!($($name).+), crate::metrics::test_utils::MetricType::UpDownCounter, $value, attributes); - assert_metric!(result, $name, Some($value.into()), None, attributes); + let result = crate::metrics::collect_metrics().assert(stringify!($($name).+), crate::metrics::test_utils::MetricType::UpDownCounter, $value, false, attributes); + assert_metric!(result, $name, Some($value.into()), None, None, attributes); }; ($($name:ident).+, $value: expr, $($($attr_key:ident).+ = $attr_value:expr),+) => { let attributes = &[$(opentelemetry::KeyValue::new(stringify!($($attr_key).+), $attr_value)),+]; - let result = crate::metrics::collect_metrics().assert(stringify!($($name).+), crate::metrics::test_utils::MetricType::UpDownCounter, $value, attributes); - assert_metric!(result, $name, Some($value.into()), None, attributes); + let result = crate::metrics::collect_metrics().assert(stringify!($($name).+), crate::metrics::test_utils::MetricType::UpDownCounter, $value, false, attributes); + assert_metric!(result, $name, Some($value.into()), None, None, attributes); }; ($name:literal, $value: expr, $($attr_key:literal = $attr_value:expr),+) => { let attributes = &[$(opentelemetry::KeyValue::new($attr_key, $attr_value)),+]; - let result = crate::metrics::collect_metrics().assert($name, crate::metrics::test_utils::MetricType::UpDownCounter, $value, attributes); - assert_metric!(result, $name, Some($value.into()), None, attributes); + let result = crate::metrics::collect_metrics().assert($name, crate::metrics::test_utils::MetricType::UpDownCounter, $value, false, attributes); + assert_metric!(result, $name, Some($value.into()), None, None, attributes); }; ($name:literal, $value: expr, $($($attr_key:ident).+ = $attr_value:expr),+) => { let attributes = &[$(opentelemetry::KeyValue::new(stringify!($($attr_key).+), $attr_value)),+]; - let result = crate::metrics::collect_metrics().assert($name, crate::metrics::test_utils::MetricType::UpDownCounter, $value, attributes); - assert_metric!(result, $name, Some($value.into()), None, attributes); + let result = crate::metrics::collect_metrics().assert($name, crate::metrics::test_utils::MetricType::UpDownCounter, $value, false, attributes); + assert_metric!(result, $name, Some($value.into()), None, None, attributes); }; ($name:literal, $value: expr) => { - let result = crate::metrics::collect_metrics().assert($name, crate::metrics::test_utils::MetricType::UpDownCounter, $value, &[]); - assert_metric!(result, $name, Some($value.into()), None, &[]); + let result = crate::metrics::collect_metrics().assert($name, crate::metrics::test_utils::MetricType::UpDownCounter, $value, false, &[]); + assert_metric!(result, $name, Some($value.into()), None, None, &[]); }; } @@ -1002,31 +1017,64 @@ macro_rules! assert_gauge { ($($name:ident).+, $value: expr, $($attr_key:literal = $attr_value:expr),+) => { let attributes = &[$(opentelemetry::KeyValue::new($attr_key, $attr_value)),+]; - let result = crate::metrics::collect_metrics().assert(stringify!($($name).+), crate::metrics::test_utils::MetricType::Gauge, $value, attributes); - assert_metric!(result, $name, Some($value.into()), None, attributes); + let result = crate::metrics::collect_metrics().assert(stringify!($($name).+), crate::metrics::test_utils::MetricType::Gauge, $value, false, attributes); + assert_metric!(result, $name, Some($value.into()), None, None, attributes); + }; + + ($($name:ident).+, $value: expr, $($($attr_key:ident).+ = $attr_value:expr),+) => { + let attributes = &[$(opentelemetry::KeyValue::new(stringify!($($attr_key).+), $attr_value)),+]; + let result = crate::metrics::collect_metrics().assert(stringify!($($name).+), crate::metrics::test_utils::MetricType::Gauge, $value, false, attributes); + assert_metric!(result, $name, Some($value.into()), None, None, attributes); + }; + + ($name:literal, $value: expr, $($attr_key:literal = $attr_value:expr),+) => { + let attributes = &[$(opentelemetry::KeyValue::new($attr_key, $attr_value)),+]; + let result = crate::metrics::collect_metrics().assert($name, crate::metrics::test_utils::MetricType::Gauge, $value, false, attributes); + assert_metric!(result, $name, Some($value.into()), None, None, attributes); + }; + + ($name:literal, $value: expr, $($($attr_key:ident).+ = $attr_value:expr),+) => { + let attributes = &[$(opentelemetry::KeyValue::new(stringify!($($attr_key).+), $attr_value)),+]; + let result = crate::metrics::collect_metrics().assert($name, crate::metrics::test_utils::MetricType::Gauge, $value, false, attributes); + assert_metric!(result, $name, Some($value.into()), None, None, attributes); + }; + + ($name:literal, $value: expr) => { + let result = crate::metrics::collect_metrics().assert($name, crate::metrics::test_utils::MetricType::Gauge, $value, false, &[]); + assert_metric!(result, $name, Some($value.into()), None, None, &[]); + }; +} + +#[cfg(test)] +macro_rules! assert_histogram_count { + + ($($name:ident).+, $value: expr, $($attr_key:literal = $attr_value:expr),+) => { + let attributes = &[$(opentelemetry::KeyValue::new($attr_key, $attr_value)),+]; + let result = crate::metrics::collect_metrics().assert(stringify!($($name).+), crate::metrics::test_utils::MetricType::Histogram, $value, true, attributes); + assert_metric!(result, $name, None, Some($value.into()), Some(num_traits::ToPrimitive::to_u64(&$value).expect("count should be convertible to u64")), attributes); }; ($($name:ident).+, $value: expr, $($($attr_key:ident).+ = $attr_value:expr),+) => { let attributes = &[$(opentelemetry::KeyValue::new(stringify!($($attr_key).+), $attr_value)),+]; - let result = crate::metrics::collect_metrics().assert(stringify!($($name).+), crate::metrics::test_utils::MetricType::Gauge, $value, attributes); - assert_metric!(result, $name, Some($value.into()), None, attributes); + let result = crate::metrics::collect_metrics().assert(stringify!($($name).+), crate::metrics::test_utils::MetricType::Histogram, $value, true, attributes); + assert_metric!(result, $name, None, Some($value.into()), Some(num_traits::ToPrimitive::to_u64(&$value).expect("count should be convertible to u64")), attributes); }; ($name:literal, $value: expr, $($attr_key:literal = $attr_value:expr),+) => { let attributes = &[$(opentelemetry::KeyValue::new($attr_key, $attr_value)),+]; - let result = crate::metrics::collect_metrics().assert($name, crate::metrics::test_utils::MetricType::Gauge, $value, attributes); - assert_metric!(result, $name, Some($value.into()), None, attributes); + let result = crate::metrics::collect_metrics().assert($name, crate::metrics::test_utils::MetricType::Histogram, $value, true, attributes); + assert_metric!(result, $name, None, Some($value.into()), Some(num_traits::ToPrimitive::to_u64(&$value).expect("count should be convertible to u64")), attributes); }; ($name:literal, $value: expr, $($($attr_key:ident).+ = $attr_value:expr),+) => { let attributes = &[$(opentelemetry::KeyValue::new(stringify!($($attr_key).+), $attr_value)),+]; - let result = crate::metrics::collect_metrics().assert($name, crate::metrics::test_utils::MetricType::Gauge, $value, attributes); - assert_metric!(result, $name, Some($value.into()), None, attributes); + let result = crate::metrics::collect_metrics().assert($name, crate::metrics::test_utils::MetricType::Histogram, $value, attributes); + assert_metric!(result, $name, None, Some($value.into()), Some(num_traits::ToPrimitive::to_u64(&$value).expect("count should be convertible to u64")), attributes); }; ($name:literal, $value: expr) => { - let result = crate::metrics::collect_metrics().assert($name, crate::metrics::test_utils::MetricType::Gauge, $value, &[]); - assert_metric!(result, $name, Some($value.into()), None, &[]); + let result = crate::metrics::collect_metrics().assert($name, crate::metrics::test_utils::MetricType::Histogram, $value, true, &[]); + assert_metric!(result, $name, None, Some($value.into()), Some(num_traits::ToPrimitive::to_u64(&$value).expect("count should be convertible to u64")), &[]); }; } @@ -1039,31 +1087,31 @@ macro_rules! assert_histogram_sum { ($($name:ident).+, $value: expr, $($attr_key:literal = $attr_value:expr),+) => { let attributes = &[$(opentelemetry::KeyValue::new($attr_key, $attr_value)),+]; - let result = crate::metrics::collect_metrics().assert(stringify!($($name).+), crate::metrics::test_utils::MetricType::Histogram, $value, attributes); - assert_metric!(result, $name, None, Some($value.into()), attributes); + let result = crate::metrics::collect_metrics().assert(stringify!($($name).+), crate::metrics::test_utils::MetricType::Histogram, $value, false, attributes); + assert_metric!(result, $name, None, Some($value.into()), None, attributes); }; ($($name:ident).+, $value: expr, $($($attr_key:ident).+ = $attr_value:expr),+) => { let attributes = &[$(opentelemetry::KeyValue::new(stringify!($($attr_key).+), $attr_value)),+]; - let result = crate::metrics::collect_metrics().assert(stringify!($($name).+), crate::metrics::test_utils::MetricType::Histogram, $value, attributes); - assert_metric!(result, $name, None, Some($value.into()), attributes); + let result = crate::metrics::collect_metrics().assert(stringify!($($name).+), crate::metrics::test_utils::MetricType::Histogram, $value, false, attributes); + assert_metric!(result, $name, None, Some($value.into()), None, attributes); }; ($name:literal, $value: expr, $($attr_key:literal = $attr_value:expr),+) => { let attributes = &[$(opentelemetry::KeyValue::new($attr_key, $attr_value)),+]; - let result = crate::metrics::collect_metrics().assert($name, crate::metrics::test_utils::MetricType::Histogram, $value, attributes); - assert_metric!(result, $name, None, Some($value.into()), attributes); + let result = crate::metrics::collect_metrics().assert($name, crate::metrics::test_utils::MetricType::Histogram, $value, false, attributes); + assert_metric!(result, $name, None, Some($value.into()), None, attributes); }; ($name:literal, $value: expr, $($($attr_key:ident).+ = $attr_value:expr),+) => { let attributes = &[$(opentelemetry::KeyValue::new(stringify!($($attr_key).+), $attr_value)),+]; - let result = crate::metrics::collect_metrics().assert($name, crate::metrics::test_utils::MetricType::Histogram, $value, attributes); - assert_metric!(result, $name, None, Some($value.into()), attributes); + let result = crate::metrics::collect_metrics().assert($name, crate::metrics::test_utils::MetricType::Histogram, $value, false, attributes); + assert_metric!(result, $name, None, Some($value.into()), None, attributes); }; ($name:literal, $value: expr) => { - let result = crate::metrics::collect_metrics().assert($name, crate::metrics::test_utils::MetricType::Histogram, $value, &[]); - assert_metric!(result, $name, None, Some($value.into()), &[]); + let result = crate::metrics::collect_metrics().assert($name, crate::metrics::test_utils::MetricType::Histogram, $value, false, &[]); + assert_metric!(result, $name, None, Some($value.into()), None, &[]); }; } @@ -1077,30 +1125,30 @@ macro_rules! assert_histogram_exists { ($($name:ident).+, $value: ty, $($attr_key:literal = $attr_value:expr),+) => { let attributes = &[$(opentelemetry::KeyValue::new($attr_key, $attr_value)),+]; let result = crate::metrics::collect_metrics().metric_exists::<$value>(stringify!($($name).+), crate::metrics::test_utils::MetricType::Histogram, attributes); - assert_metric!(result, $name, None, None, attributes); + assert_metric!(result, $name, None, None, None, attributes); }; ($($name:ident).+, $value: ty, $($($attr_key:ident).+ = $attr_value:expr),+) => { let attributes = &[$(opentelemetry::KeyValue::new(stringify!($($attr_key).+), $attr_value)),+]; let result = crate::metrics::collect_metrics().metric_exists::<$value>(stringify!($($name).+), crate::metrics::test_utils::MetricType::Histogram, attributes); - assert_metric!(result, $name, None, None, attributes); + assert_metric!(result, $name, None, None, None, attributes); }; ($name:literal, $value: ty, $($attr_key:literal = $attr_value:expr),+) => { let attributes = &[$(opentelemetry::KeyValue::new($attr_key, $attr_value)),+]; let result = crate::metrics::collect_metrics().metric_exists::<$value>($name, crate::metrics::test_utils::MetricType::Histogram, attributes); - assert_metric!(result, $name, None, None, attributes); + assert_metric!(result, $name, None, None, None, attributes); }; ($name:literal, $value: ty, $($($attr_key:ident).+ = $attr_value:expr),+) => { let attributes = &[$(opentelemetry::KeyValue::new(stringify!($($attr_key).+), $attr_value)),+]; let result = crate::metrics::collect_metrics().metric_exists::<$value>($name, crate::metrics::test_utils::MetricType::Histogram, attributes); - assert_metric!(result, $name, None, None, attributes); + assert_metric!(result, $name, None, None, None, attributes); }; ($name:literal, $value: ty) => { let result = crate::metrics::collect_metrics().metric_exists::<$value>($name, crate::metrics::test_utils::MetricType::Histogram, &[]); - assert_metric!(result, $name, None, None, &[]); + assert_metric!(result, $name, None, None, None, &[]); }; } @@ -1114,30 +1162,30 @@ macro_rules! assert_histogram_not_exists { ($($name:ident).+, $value: ty, $($attr_key:literal = $attr_value:expr),+) => { let attributes = &[$(opentelemetry::KeyValue::new($attr_key, $attr_value)),+]; let result = crate::metrics::collect_metrics().metric_exists::<$value>(stringify!($($name).+), crate::metrics::test_utils::MetricType::Histogram, attributes); - assert_metric!(!result, $name, None, None, attributes); + assert_metric!(!result, $name, None, None, None, attributes); }; ($($name:ident).+, $value: ty, $($($attr_key:ident).+ = $attr_value:expr),+) => { let attributes = &[$(opentelemetry::KeyValue::new(stringify!($($attr_key).+), $attr_value)),+]; let result = crate::metrics::collect_metrics().metric_exists::<$value>(stringify!($($name).+), crate::metrics::test_utils::MetricType::Histogram, attributes); - assert_metric!(!result, $name, None, None, attributes); + assert_metric!(!result, $name, None, None, None, attributes); }; ($name:literal, $value: ty, $($attr_key:literal = $attr_value:expr),+) => { let attributes = &[$(opentelemetry::KeyValue::new($attr_key, $attr_value)),+]; let result = crate::metrics::collect_metrics().metric_exists::<$value>($name, crate::metrics::test_utils::MetricType::Histogram, attributes); - assert_metric!(!result, $name, None, None, attributes); + assert_metric!(!result, $name, None, None, None, attributes); }; ($name:literal, $value: ty, $($($attr_key:ident).+ = $attr_value:expr),+) => { let attributes = &[$(opentelemetry::KeyValue::new(stringify!($($attr_key).+), $attr_value)),+]; let result = crate::metrics::collect_metrics().metric_exists::<$value>($name, crate::metrics::test_utils::MetricType::Histogram, attributes); - assert_metric!(!result, $name, None, None, attributes); + assert_metric!(!result, $name, None, None, None, attributes); }; ($name:literal, $value: ty) => { let result = crate::metrics::collect_metrics().metric_exists::<$value>($name, crate::metrics::test_utils::MetricType::Histogram, &[]); - assert_metric!(!result, $name, None, None, &[]); + assert_metric!(!result, $name, None, None, None, &[]); }; } diff --git a/apollo-router/src/otel_compat.rs b/apollo-router/src/otel_compat.rs new file mode 100644 index 0000000000..9e3f13fca3 --- /dev/null +++ b/apollo-router/src/otel_compat.rs @@ -0,0 +1,38 @@ +//! Facilities for using our old version of opentelemetry with our new version of http/hyper. + +/// A header extractor that works on http 1.x types. +/// +/// The implementation is a straight copy from [opentelemetry_http::HeaderExtractor]. +/// This can be removed after we update otel. +pub struct HeaderExtractor<'a>(pub &'a http::HeaderMap); +impl opentelemetry_api::propagation::Extractor for HeaderExtractor<'_> { + /// Get a value for a key from the HeaderMap. If the value is not valid ASCII, returns None. + fn get(&self, key: &str) -> Option<&str> { + self.0.get(key).and_then(|value| value.to_str().ok()) + } + + /// Collect all the keys from the HeaderMap. + fn keys(&self) -> Vec<&str> { + self.0 + .keys() + .map(|value| value.as_str()) + .collect::>() + } +} + +/// A header injector that works on http 1.x types. +/// +/// The implementation is a straight copy from [opentelemetry_http::HeaderInjector]. +/// This can be removed after we update otel. +pub struct HeaderInjector<'a>(pub &'a mut http::HeaderMap); + +impl opentelemetry_api::propagation::Injector for HeaderInjector<'_> { + /// Set a key and value in the HeaderMap. Does nothing if the key or value are not valid inputs. + fn set(&mut self, key: &str, value: String) { + if let Ok(name) = http::header::HeaderName::from_bytes(key.as_bytes()) { + if let Ok(val) = http::header::HeaderValue::from_str(&value) { + self.0.insert(name, val); + } + } + } +} diff --git a/apollo-router/src/plugin/mod.rs b/apollo-router/src/plugin/mod.rs index f8750892c9..dcf8c6c1ee 100644 --- a/apollo-router/src/plugin/mod.rs +++ b/apollo-router/src/plugin/mod.rs @@ -835,7 +835,7 @@ macro_rules! register_plugin { }; }; - ($group: literal, $name: literal, $plugin_type: ident) => { + ($group: literal, $name: expr, $plugin_type: ident) => { // Artificial scope to avoid naming collisions const _: () = { use $crate::_private::once_cell::sync::Lazy; @@ -889,7 +889,7 @@ macro_rules! register_private_plugin { /// Handler represents a [`Plugin`] endpoint. #[derive(Clone)] pub(crate) struct Handler { - service: Buffer, + service: Buffer>::Future>, } impl Handler { diff --git a/apollo-router/src/plugins/authentication/connector.rs b/apollo-router/src/plugins/authentication/connector.rs new file mode 100644 index 0000000000..16c18c6584 --- /dev/null +++ b/apollo-router/src/plugins/authentication/connector.rs @@ -0,0 +1,50 @@ +use std::collections::HashMap; +use std::sync::Arc; + +use tower::ServiceBuilder; +use tower::ServiceExt; + +use crate::plugins::authentication::subgraph::SigningParamsConfig; +use crate::services::connector_service::ConnectorInfo; +use crate::services::connector_service::ConnectorSourceRef; +use crate::services::connector_service::CONNECTOR_INFO_CONTEXT_KEY; +use crate::services::http::HttpRequest; + +pub(super) struct ConnectorAuth { + pub(super) signing_params: Arc>>, +} + +impl ConnectorAuth { + pub(super) fn http_client_service( + &self, + subgraph_name: &str, + service: crate::services::http::BoxService, + ) -> crate::services::http::BoxService { + let signing_params = self.signing_params.clone(); + let subgraph_name = subgraph_name.to_string(); + ServiceBuilder::new() + .map_request(move |req: HttpRequest| { + if let Ok(Some(connector_info)) = req + .context + .get::<&str, ConnectorInfo>(CONNECTOR_INFO_CONTEXT_KEY) + { + if let Some(source_name) = connector_info.source_name { + if let Some(signing_params) = signing_params + .get(&ConnectorSourceRef::new( + subgraph_name.clone(), + source_name.clone(), + )) + .cloned() + { + req.context + .extensions() + .with_lock(|mut lock| lock.insert(signing_params)); + } + } + } + req + }) + .service(service) + .boxed() + } +} diff --git a/apollo-router/src/plugins/authentication/mod.rs b/apollo-router/src/plugins/authentication/mod.rs index 0239e1f005..18e906c1ba 100644 --- a/apollo-router/src/plugins/authentication/mod.rs +++ b/apollo-router/src/plugins/authentication/mod.rs @@ -42,19 +42,24 @@ use self::jwks::JwksManager; use self::subgraph::SigningParams; use self::subgraph::SigningParamsConfig; use self::subgraph::SubgraphAuth; +use crate::configuration::connector::ConnectorConfiguration; use crate::graphql; use crate::layers::ServiceBuilderExt; use crate::plugin::serde::deserialize_header_name; use crate::plugin::serde::deserialize_header_value; -use crate::plugin::Plugin; use crate::plugin::PluginInit; +use crate::plugin::PluginPrivate; +use crate::plugins::authentication::connector::ConnectorAuth; use crate::plugins::authentication::jwks::JwkSetInfo; use crate::plugins::authentication::jwks::JwksConfig; -use crate::register_plugin; +use crate::plugins::authentication::subgraph::make_signing_params; +use crate::plugins::authentication::subgraph::AuthConfig; +use crate::services::connector_service::ConnectorSourceRef; use crate::services::router; use crate::services::APPLICATION_JSON_HEADER_VALUE; use crate::Context; +mod connector; mod jwks; pub(crate) mod subgraph; @@ -123,6 +128,7 @@ struct Router { struct AuthenticationPlugin { router: Option, subgraph: Option, + connector: Option, } #[derive(Clone, Debug, Deserialize, JsonSchema, serde_derive_default::Default)] @@ -207,6 +213,8 @@ struct Conf { router: Option, /// Subgraph configuration subgraph: Option, + /// Connector configuration + connector: Option>, } // We may support additional authentication mechanisms in future, so all @@ -409,7 +417,7 @@ fn search_jwks( } #[async_trait::async_trait] -impl Plugin for AuthenticationPlugin { +impl PluginPrivate for AuthenticationPlugin { type Config = Conf; async fn new(init: PluginInit) -> Result { @@ -491,7 +499,30 @@ impl Plugin for AuthenticationPlugin { None }; - Ok(Self { router, subgraph }) + let connector = if let Some(config) = init.config.connector { + let mut signing_params: HashMap> = + Default::default(); + for (s, source_config) in config.sources { + let source_ref: ConnectorSourceRef = s.parse()?; + signing_params.insert( + source_ref.clone(), + make_signing_params(&source_config, &source_ref.subgraph_name) + .await + .map(Arc::new)?, + ); + } + Some(ConnectorAuth { + signing_params: Arc::new(signing_params), + }) + } else { + None + }; + + Ok(Self { + router, + subgraph, + connector, + }) } fn router_service(&self, service: router::BoxService) -> router::BoxService { @@ -532,6 +563,18 @@ impl Plugin for AuthenticationPlugin { service } } + + fn http_client_service( + &self, + subgraph_name: &str, + service: crate::services::http::BoxService, + ) -> crate::services::http::BoxService { + if let Some(auth) = &self.connector { + auth.http_client_service(subgraph_name, service) + } else { + service + } + } } fn authenticate( @@ -937,4 +980,4 @@ pub(crate) fn convert_algorithm(algorithm: Algorithm) -> KeyAlgorithm { // // In order to keep the plugin names consistent, // we use using the `Reverse domain name notation` -register_plugin!("apollo", "authentication", AuthenticationPlugin); +register_private_plugin!("apollo", "authentication", AuthenticationPlugin); diff --git a/apollo-router/src/plugins/authentication/subgraph.rs b/apollo-router/src/plugins/authentication/subgraph.rs index 4a0bcd4d65..8e0d2ca1e9 100644 --- a/apollo-router/src/plugins/authentication/subgraph.rs +++ b/apollo-router/src/plugins/authentication/subgraph.rs @@ -19,19 +19,20 @@ use http::HeaderMap; use http::Request; use schemars::JsonSchema; use serde::Deserialize; +use serde::Serialize; use tokio::sync::mpsc::Sender; use tokio::task::JoinHandle; use tower::BoxError; use tower::ServiceBuilder; use tower::ServiceExt; -use crate::services::router::body::get_body_bytes; +use crate::services::router; use crate::services::router::body::RouterBody; use crate::services::SubgraphRequest; /// Hardcoded Config using access_key and secret. /// Prefer using DefaultChain instead. -#[derive(Clone, JsonSchema, Deserialize, Debug)] +#[derive(Clone, JsonSchema, Deserialize, Serialize, Debug)] #[serde(rename_all = "snake_case", deny_unknown_fields)] pub(crate) struct AWSSigV4HardcodedConfig { /// The ID for this access key. @@ -64,7 +65,7 @@ impl ProvideCredentials for AWSSigV4HardcodedConfig { } /// Configuration of the DefaultChainProvider -#[derive(Clone, JsonSchema, Deserialize, Debug)] +#[derive(Clone, JsonSchema, Deserialize, Serialize, Debug)] #[serde(deny_unknown_fields)] pub(crate) struct DefaultChainConfig { /// The AWS region this chain applies to. @@ -78,7 +79,7 @@ pub(crate) struct DefaultChainConfig { } /// Specify assumed role configuration. -#[derive(Clone, JsonSchema, Deserialize, Debug)] +#[derive(Clone, JsonSchema, Deserialize, Serialize, Debug)] #[serde(deny_unknown_fields)] pub(crate) struct AssumeRoleProvider { /// Amazon Resource Name (ARN) @@ -91,7 +92,7 @@ pub(crate) struct AssumeRoleProvider { } /// Configure AWS sigv4 auth. -#[derive(Clone, JsonSchema, Deserialize, Debug)] +#[derive(Clone, JsonSchema, Deserialize, Serialize, Debug)] #[serde(rename_all = "snake_case")] pub(crate) enum AWSSigV4Config { Hardcoded(AWSSigV4HardcodedConfig), @@ -170,7 +171,7 @@ impl AWSSigV4Config { } } -#[derive(Clone, Debug, JsonSchema, Deserialize)] +#[derive(Clone, Debug, JsonSchema, Deserialize, Serialize)] #[serde(deny_unknown_fields)] pub(crate) enum AuthConfig { #[serde(rename = "aws_sig_v4")] @@ -314,7 +315,7 @@ impl SigningParamsConfig { // We'll go with default signed headers let headers = HeaderMap::<&'static str>::default(); // UnsignedPayload only applies to lattice - let body_bytes = get_body_bytes(body).await?.to_vec(); + let body_bytes = router::body::into_bytes(body).await?.to_vec(); let signable_request = SignableRequest::new( parts.method.as_str(), parts.uri.to_string(), @@ -335,8 +336,8 @@ impl SigningParamsConfig { error })? .into_parts(); - req = Request::::from_parts(parts, body_bytes.into()); - signing_instructions.apply_to_request_http0x(&mut req); + req = Request::::from_parts(parts, router::body::from_bytes(body_bytes)); + signing_instructions.apply_to_request_http1x(&mut req); increment_success_counter(subgraph_name); Ok(req) } @@ -375,7 +376,7 @@ impl SigningParamsConfig { })? .into_parts(); req = Request::<()>::from_parts(parts, ()); - signing_instructions.apply_to_request_http0x(&mut req); + signing_instructions.apply_to_request_http1x(&mut req); increment_success_counter(subgraph_name); Ok(req) } @@ -857,7 +858,7 @@ mod test { let http_request = request .clone() .subgraph_request - .map(|body| RouterBody::from(serde_json::to_string(&body).unwrap())); + .map(|body| router::body::from_bytes(serde_json::to_string(&body).unwrap())); std::thread::spawn(move || { let rt = tokio::runtime::Runtime::new().unwrap(); diff --git a/apollo-router/src/plugins/authentication/tests.rs b/apollo-router/src/plugins/authentication/tests.rs index 258c324da7..6a91d56785 100644 --- a/apollo-router/src/plugins/authentication/tests.rs +++ b/apollo-router/src/plugins/authentication/tests.rs @@ -1,18 +1,14 @@ use std::collections::HashMap; use std::collections::HashSet; -use std::io; use std::path::Path; use std::sync::atomic::AtomicBool; use std::sync::atomic::Ordering; use std::sync::Arc; +use axum::handler::HandlerWithoutStateExt; use base64::prelude::BASE64_URL_SAFE_NO_PAD; use base64::Engine as _; use http::header::CONTENT_TYPE; -use hyper::server::conn::AddrIncoming; -use hyper::service::make_service_fn; -use hyper::service::service_fn; -use hyper::Server; use insta::assert_yaml_snapshot; use jsonwebtoken::encode; use jsonwebtoken::get_current_timestamp; @@ -34,7 +30,8 @@ use super::*; use crate::assert_snapshot_subscriber; use crate::plugin::test; use crate::plugins::authentication::jwks::parse_jwks; -use crate::services::router::body::get_body_bytes; +use crate::services::router; +use crate::services::router::body::RouterBody; use crate::services::supergraph; fn create_an_url(filename: &str) -> String { @@ -1028,9 +1025,12 @@ async fn issuer_check() { match authenticate(&config, &manager, request.try_into().unwrap()) { ControlFlow::Break(res) => { - let response: graphql::Response = - serde_json::from_slice(&get_body_bytes(res.response.into_body()).await.unwrap()) - .unwrap(); + let response: graphql::Response = serde_json::from_slice( + &router::body::into_bytes(res.response.into_body()) + .await + .unwrap(), + ) + .unwrap(); assert_eq!(response, graphql::Response::builder() .errors(vec![graphql::Error::builder().extension_code("AUTH_ERROR").message("Invalid issuer: the token's `iss` was 'hallo', but signed with a key from 'hello'").build()]).build()); } @@ -1064,9 +1064,12 @@ async fn issuer_check() { match authenticate(&config, &manager, request.try_into().unwrap()) { ControlFlow::Break(res) => { - let response: graphql::Response = - serde_json::from_slice(&get_body_bytes(res.response.into_body()).await.unwrap()) - .unwrap(); + let response: graphql::Response = serde_json::from_slice( + &router::body::into_bytes(res.response.into_body()) + .await + .unwrap(), + ) + .unwrap(); assert_eq!(response, graphql::Response::builder() .errors(vec![graphql::Error::builder().extension_code("AUTH_ERROR").message("Invalid issuer: the token's `iss` was 'AAAA', but signed with a key from 'hello'").build()]).build()); } @@ -1095,9 +1098,12 @@ async fn issuer_check() { match authenticate(&config, &manager, request.try_into().unwrap()) { ControlFlow::Break(res) => { - let response: graphql::Response = - serde_json::from_slice(&get_body_bytes(res.response.into_body()).await.unwrap()) - .unwrap(); + let response: graphql::Response = serde_json::from_slice( + &router::body::into_bytes(res.response.into_body()) + .await + .unwrap(), + ) + .unwrap(); assert_eq!(response, graphql::Response::builder() .errors(vec![graphql::Error::builder().extension_code("AUTH_ERROR").message("Invalid issuer: the token's `iss` was 'AAAA', but signed with a key from 'hello'").build()]).build()); } @@ -1292,38 +1298,23 @@ async fn jwks_send_headers() { let got_header = Arc::new(AtomicBool::new(false)); let gh = got_header.clone(); - let service = make_service_fn(move |_| { - let gh = gh.clone(); + let service = move |headers: HeaderMap| { + println!("got re: {:?}", headers); + let gh: Arc = gh.clone(); async move { - //let gh1 = gh.clone(); - Ok::<_, io::Error>(service_fn(move |req| { - println!("got re: {:?}", req.headers()); - let gh: Arc = gh.clone(); - async move { - if req - .headers() - .get("jwks-authz") - .and_then(|v| v.to_str().ok()) - == Some("user1") - { - gh.store(true, Ordering::Release); - } - Ok::<_, io::Error>( - http::Response::builder() - .header(CONTENT_TYPE, APPLICATION_JSON.essence_str()) - .status(StatusCode::OK) - .version(http::Version::HTTP_11) - .body::( - include_str!("testdata/jwks.json").into(), - ) - .unwrap(), - ) - } - })) + if headers.get("jwks-authz").and_then(|v| v.to_str().ok()) == Some("user1") { + gh.store(true, Ordering::Release); + } + http::Response::builder() + .header(CONTENT_TYPE, APPLICATION_JSON.essence_str()) + .status(StatusCode::OK) + .version(http::Version::HTTP_11) + .body::(router::body::from_bytes(include_str!("testdata/jwks.json"))) + .unwrap() } - }); - let server = Server::builder(AddrIncoming::from_listener(listener).unwrap()).serve(service); - tokio::task::spawn(server); + }; + let server = axum::serve(listener, service.into_make_service()); + tokio::task::spawn(async { server.await.unwrap() }); let url = Url::parse(&format!("http://{socket_addr}/")).unwrap(); diff --git a/apollo-router/src/plugins/authorization/tests.rs b/apollo-router/src/plugins/authorization/tests.rs index b8b1b8052b..be9cb51578 100644 --- a/apollo-router/src/plugins/authorization/tests.rs +++ b/apollo-router/src/plugins/authorization/tests.rs @@ -9,6 +9,7 @@ use crate::plugin::test::MockSubgraph; use crate::plugin::test::MockSubgraphService; use crate::plugins::authorization::CacheKeyMetadata; use crate::services::router; +use crate::services::router::body; use crate::services::subgraph; use crate::services::supergraph; use crate::Context; @@ -282,7 +283,7 @@ async fn authenticated_directive() { .method("POST") .header(CONTENT_TYPE, "application/json") .header(ACCEPT, "application/json") - .body(serde_json::to_vec(&req).unwrap().into()) + .body(body::from_bytes(serde_json::to_vec(&req).unwrap())) .unwrap(), }; @@ -313,7 +314,7 @@ async fn authenticated_directive() { .method("POST") .header(CONTENT_TYPE, "application/json") .header(ACCEPT, "application/json") - .body(serde_json::to_vec(&req).unwrap().into()) + .body(body::from_bytes(serde_json::to_vec(&req).unwrap())) .unwrap(), }; @@ -400,7 +401,7 @@ async fn authenticated_directive_reject_unauthorized() { .method("POST") .header(CONTENT_TYPE, "application/json") .header(ACCEPT, "application/json") - .body(serde_json::to_vec(&req).unwrap().into()) + .body(body::from_bytes(serde_json::to_vec(&req).unwrap())) .unwrap(), }; @@ -485,7 +486,7 @@ async fn authenticated_directive_dry_run() { .method("POST") .header(CONTENT_TYPE, "application/json") .header(ACCEPT, "application/json") - .body(serde_json::to_vec(&req).unwrap().into()) + .body(body::from_bytes(serde_json::to_vec(&req).unwrap())) .unwrap(), }; @@ -626,7 +627,7 @@ async fn scopes_directive() { .method("POST") .header(CONTENT_TYPE, "application/json") .header(ACCEPT, "application/json") - .body(serde_json::to_vec(&req).unwrap().into()) + .body(body::from_bytes(serde_json::to_vec(&req).unwrap())) .unwrap(), }; @@ -657,7 +658,7 @@ async fn scopes_directive() { .method("POST") .header(CONTENT_TYPE, "application/json") .header(ACCEPT, "application/json") - .body(serde_json::to_vec(&req).unwrap().into()) + .body(body::from_bytes(serde_json::to_vec(&req).unwrap())) .unwrap(), }; @@ -688,7 +689,7 @@ async fn scopes_directive() { .method("POST") .header(CONTENT_TYPE, "application/json") .header(ACCEPT, "application/json") - .body(serde_json::to_vec(&req).unwrap().into()) + .body(body::from_bytes(serde_json::to_vec(&req).unwrap())) .unwrap(), }; @@ -719,7 +720,7 @@ async fn scopes_directive() { .method("POST") .header(CONTENT_TYPE, "application/json") .header(ACCEPT, "application/json") - .body(serde_json::to_vec(&req).unwrap().into()) + .body(body::from_bytes(serde_json::to_vec(&req).unwrap())) .unwrap(), }; @@ -798,7 +799,7 @@ async fn scopes_directive_reject_unauthorized() { .method("POST") .header(CONTENT_TYPE, "application/json") .header(ACCEPT, "application/json") - .body(serde_json::to_vec(&req).unwrap().into()) + .body(body::from_bytes(serde_json::to_vec(&req).unwrap())) .unwrap(), }; @@ -878,7 +879,7 @@ async fn scopes_directive_dry_run() { .method("POST") .header(CONTENT_TYPE, "application/json") .header(ACCEPT, "application/json") - .body(serde_json::to_vec(&req).unwrap().into()) + .body(body::from_bytes(serde_json::to_vec(&req).unwrap())) .unwrap(), }; @@ -960,7 +961,7 @@ async fn errors_in_extensions() { .method("POST") .header(CONTENT_TYPE, "application/json") .header(ACCEPT, "application/json") - .body(serde_json::to_vec(&req).unwrap().into()) + .body(body::from_bytes(serde_json::to_vec(&req).unwrap())) .unwrap(), }; diff --git a/apollo-router/src/plugins/cache/invalidation_endpoint.rs b/apollo-router/src/plugins/cache/invalidation_endpoint.rs index 19d4b5a582..9f86636a9a 100644 --- a/apollo-router/src/plugins/cache/invalidation_endpoint.rs +++ b/apollo-router/src/plugins/cache/invalidation_endpoint.rs @@ -24,7 +24,6 @@ use crate::plugins::telemetry::consts::OTEL_STATUS_CODE; use crate::plugins::telemetry::consts::OTEL_STATUS_CODE_ERROR; use crate::plugins::telemetry::consts::OTEL_STATUS_CODE_OK; use crate::services::router; -use crate::services::router::body::RouterBody; use crate::ListenAddr; pub(crate) const INVALIDATION_ENDPOINT_SPAN_NAME: &str = "invalidation_endpoint"; @@ -112,16 +111,15 @@ impl Service for InvalidationService { return Ok(router::Response { response: http::Response::builder() .status(StatusCode::UNAUTHORIZED) - .body("Missing authorization header".into()) + .body(router::body::from_bytes("Missing authorization header")) .map_err(BoxError::from)?, context: req.context, }); } match parts.method { Method::POST => { - let body = Into::::into(body) - .to_bytes() - .instrument(tracing::info_span!("to_bytes")) + let body = router::body::into_bytes(body) + .instrument(tracing::info_span!("into_bytes")) .await .map_err(|e| format!("failed to get the request body: {e}")) .and_then(|bytes| { @@ -161,7 +159,9 @@ impl Service for InvalidationService { return Ok(router::Response { response: http::Response::builder() .status(StatusCode::UNAUTHORIZED) - .body("Invalid authorization header".into()) + .body(router::body::from_bytes( + "Invalid authorization header", + )) .map_err(BoxError::from)?, context: req.context, }); @@ -174,12 +174,11 @@ impl Service for InvalidationService { Ok(count) => Ok(router::Response { response: http::Response::builder() .status(StatusCode::ACCEPTED) - .body( - serde_json::to_string(&json!({ + .body(router::body::from_bytes(serde_json::to_string( + &json!({ "count": count - }))? - .into(), - ) + }), + )?)) .map_err(BoxError::from)?, context: req.context, }), @@ -189,7 +188,7 @@ impl Service for InvalidationService { Ok(router::Response { response: http::Response::builder() .status(StatusCode::BAD_REQUEST) - .body(err.to_string().into()) + .body(router::body::from_bytes(err.to_string())) .map_err(BoxError::from)?, context: req.context, }) @@ -201,7 +200,7 @@ impl Service for InvalidationService { Ok(router::Response { response: http::Response::builder() .status(StatusCode::BAD_REQUEST) - .body(err.into()) + .body(router::body::from_bytes(err)) .map_err(BoxError::from)?, context: req.context, }) @@ -213,7 +212,7 @@ impl Service for InvalidationService { Ok(router::Response { response: http::Response::builder() .status(StatusCode::METHOD_NOT_ALLOWED) - .body("".into()) + .body(router::body::from_bytes("".to_string())) .map_err(BoxError::from)?, context: req.context, }) diff --git a/apollo-router/src/plugins/connectors/configuration.rs b/apollo-router/src/plugins/connectors/configuration.rs new file mode 100644 index 0000000000..e112cde016 --- /dev/null +++ b/apollo-router/src/plugins/connectors/configuration.rs @@ -0,0 +1,98 @@ +use std::collections::HashMap; +use std::sync::Arc; + +use apollo_federation::sources::connect::expand::Connectors; +use apollo_federation::sources::connect::CustomConfiguration; +use schemars::JsonSchema; +use serde::Deserialize; +use serde::Serialize; +use url::Url; + +use crate::plugins::connectors::plugin::PLUGIN_NAME; +use crate::Configuration; + +#[derive(Debug, Clone, Deserialize, Serialize, JsonSchema)] +#[serde(deny_unknown_fields)] +pub(crate) struct ConnectorsConfig { + /// A map of subgraph name to connectors config for that subgraph + #[serde(default)] + pub(crate) subgraphs: HashMap, + + /// Enables connector debugging information on response extensions if the feature is enabled + #[serde(default)] + pub(crate) debug_extensions: bool, + + /// The maximum number of requests for a connector source + #[serde(default)] + pub(crate) max_requests_per_operation_per_source: Option, + + /// When enabled, adds an entry to the context for use in coprocessors + /// ```json + /// { + /// "context": { + /// "entries": { + /// "apollo_connectors::sources_in_query_plan": [ + /// { "subgraph_name": "subgraph", "source_name": "source" } + /// ] + /// } + /// } + /// } + /// ``` + #[serde(default)] + pub(crate) expose_sources_in_context: bool, +} + +/// Configuration for a connector subgraph +#[derive(Clone, Debug, Default, Deserialize, JsonSchema, Serialize)] +#[serde(deny_unknown_fields, default)] +pub(crate) struct SubgraphConnectorConfiguration { + /// A map of `@source(name:)` to configuration for that source + pub(crate) sources: HashMap, + + /// Other values that can be used by connectors via `{$config.}` + #[serde(rename = "$config")] + pub(crate) custom: CustomConfiguration, +} + +/// Configuration for a `@source` directive +#[derive(Clone, Debug, Default, Deserialize, JsonSchema, Serialize)] +#[serde(deny_unknown_fields, default)] +pub(crate) struct SourceConfiguration { + /// Override the `@source(http: {baseURL:})` + pub(crate) override_url: Option, + + /// The maximum number of requests for this source + pub(crate) max_requests_per_operation: Option, +} + +/// Modifies connectors with values from the configuration +pub(crate) fn apply_config(config: &Configuration, mut connectors: Connectors) -> Connectors { + let Some(config) = config.apollo_plugins.plugins.get(PLUGIN_NAME) else { + return connectors; + }; + let Ok(config) = serde_json::from_value::(config.clone()) else { + return connectors; + }; + + for connector in Arc::make_mut(&mut connectors.by_service_name).values_mut() { + let Some(subgraph_config) = config.subgraphs.get(&connector.id.subgraph_name) else { + continue; + }; + if let Some(source_config) = connector + .id + .source_name + .as_ref() + .and_then(|source_name| subgraph_config.sources.get(source_name)) + { + if let Some(url) = source_config.override_url.as_ref() { + connector.transport.source_url = Some(url.clone()); + } + if let Some(max_requests) = source_config.max_requests_per_operation { + connector.max_requests = Some(max_requests); + } + } + + connector.config = Some(subgraph_config.custom.clone()); + } + connectors +} diff --git a/apollo-router/src/plugins/connectors/error.rs b/apollo-router/src/plugins/connectors/error.rs new file mode 100644 index 0000000000..65262d7088 --- /dev/null +++ b/apollo-router/src/plugins/connectors/error.rs @@ -0,0 +1,58 @@ +//! Connectors error types. + +use apollo_federation::sources::connect::Connector; +use tower::BoxError; + +use crate::graphql; +use crate::graphql::ErrorExtension; +use crate::json_ext::Path; + +/// Errors that apply to all connector types. These errors represent a problem invoking the +/// connector, as opposed to an error returned from the connector itself. +#[derive(Debug, thiserror::Error, displaydoc::Display)] +pub(crate) enum Error { + /// Request limit exceeded + RequestLimitExceeded, + + /// {0} + HTTPClientError(#[from] BoxError), +} + +impl Error { + /// Create a GraphQL error from this error. + #[must_use] + pub(crate) fn to_graphql_error( + &self, + connector: &Connector, + path: Option, + ) -> crate::error::Error { + use serde_json_bytes::*; + + let builder = graphql::Error::builder() + .message(self.to_string()) + .extension_code(self.extension_code()) + .extension("service", connector.id.subgraph_name.clone()) + .extension( + "connector", + Value::Object(Map::from_iter([( + "coordinate".into(), + Value::String(connector.id.coordinate().into()), + )])), + ); + if let Some(path) = path { + builder.path(path).build() + } else { + builder.build() + } + } +} + +impl ErrorExtension for Error { + fn extension_code(&self) -> String { + match self { + Self::RequestLimitExceeded => "REQUEST_LIMIT_EXCEEDED", + Self::HTTPClientError(_) => "HTTP_CLIENT_ERROR", + } + .to_string() + } +} diff --git a/apollo-router/src/plugins/connectors/form_encoding.rs b/apollo-router/src/plugins/connectors/form_encoding.rs new file mode 100644 index 0000000000..f7cf30a452 --- /dev/null +++ b/apollo-router/src/plugins/connectors/form_encoding.rs @@ -0,0 +1,136 @@ +use serde_json_bytes::Value; + +pub(super) fn encode_json_as_form(value: &Value) -> Result { + if value.as_object().is_none() { + return Err("Expected URL-encoded forms to be objects"); + } + + let mut encoded: form_urlencoded::Serializer = + form_urlencoded::Serializer::new(String::new()); + + fn encode(encoded: &mut form_urlencoded::Serializer, value: &Value, prefix: &str) { + match value { + Value::Null => { + encoded.append_pair(prefix, ""); + } + Value::String(s) => { + encoded.append_pair(prefix, s.as_str()); + } + Value::Bool(b) => { + encoded.append_pair(prefix, if *b { "true" } else { "false" }); + } + Value::Number(n) => { + encoded.append_pair(prefix, &n.to_string()); + } + Value::Array(array) => { + for (i, value) in array.iter().enumerate() { + let prefix = format!("{prefix}[{i}]"); + encode(encoded, value, &prefix); + } + } + Value::Object(obj) => { + for (key, value) in obj { + if prefix.is_empty() { + encode(encoded, value, key.as_str()) + } else { + let prefix = format!("{prefix}[{key}]", key = key.as_str()); + encode(encoded, value, &prefix); + }; + } + } + } + } + + encode(&mut encoded, value, ""); + + Ok(encoded.finish()) +} + +#[cfg(test)] +mod tests { + use serde_json_bytes::json; + + use super::*; + + #[test] + fn complex() { + let data = json!({ + "a": 1, + "b": "2", + "c": { + "d": 3, + "e": "4", + "f": { + "g": 5, + "h": "6", + "i": [7, 8, 9], + "j": [ + {"k": 10}, + {"l": 11}, + {"m": 12} + ] + } + } + }); + + let encoded = encode_json_as_form(&data).expect("test case is valid for transformation"); + assert_eq!(encoded, "a=1&b=2&c%5Bd%5D=3&c%5Be%5D=4&c%5Bf%5D%5Bg%5D=5&c%5Bf%5D%5Bh%5D=6&c%5Bf%5D%5Bi%5D%5B0%5D=7&c%5Bf%5D%5Bi%5D%5B1%5D=8&c%5Bf%5D%5Bi%5D%5B2%5D=9&c%5Bf%5D%5Bj%5D%5B0%5D%5Bk%5D=10&c%5Bf%5D%5Bj%5D%5B1%5D%5Bl%5D=11&c%5Bf%5D%5Bj%5D%5B2%5D%5Bm%5D=12"); + } + + // https://github.com/ljharb/qs/blob/main/test/stringify.js used as reference for these tests + #[rstest::rstest] + #[case(r#"{ "a": "b" }"#, "a=b")] + #[case(r#"{ "a": 1 }"#, "a=1")] + #[case(r#"{ "a": 1, "b": 2 }"#, "a=1&b=2")] + #[case(r#"{ "a": "A_Z" }"#, "a=A_Z")] + #[case(r#"{ "a": "€" }"#, "a=%E2%82%AC")] + #[case(r#"{ "a": "" }"#, "a=%EE%80%80")] + #[case(r#"{ "a": "א" }"#, "a=%D7%90")] + #[case(r#"{ "a": "𐐷" }"#, "a=%F0%90%90%B7")] + #[case(r#"{ "a": { "b": "c" } }"#, "a%5Bb%5D=c")] + #[case( + r#"{ "a": { "b": { "c": { "d": "e" } } } }"#, + "a%5Bb%5D%5Bc%5D%5Bd%5D=e" + )] + #[case(r#"{ "a": ["b", "c", "d"] }"#, "a%5B0%5D=b&a%5B1%5D=c&a%5B2%5D=d")] + #[case(r#"{ "a": [], "b": "zz" }"#, "b=zz")] + #[case( + r#"{ "a": { "b": ["c", "d"] } }"#, + "a%5Bb%5D%5B0%5D=c&a%5Bb%5D%5B1%5D=d" + )] + #[case( + r#"{ "a": [",", "", "c,d%"] }"#, + "a%5B0%5D=%2C&a%5B1%5D=&a%5B2%5D=c%2Cd%25" + )] + #[case(r#"{ "a": ",", "b": "", "c": "c,d%" }"#, "a=%2C&b=&c=c%2Cd%25")] + #[case(r#"{ "a": [{ "b": "c" }] }"#, "a%5B0%5D%5Bb%5D=c")] + #[case( + r#"{ "a": [{ "b": { "c": [1] } }] }"#, + "a%5B0%5D%5Bb%5D%5Bc%5D%5B0%5D=1" + )] + #[case( + r#"{ "a": [{ "b": 1 }, 2, 3] }"#, + "a%5B0%5D%5Bb%5D=1&a%5B1%5D=2&a%5B2%5D=3" + )] + #[case(r#"{ "a": "" }"#, "a=")] + #[case(r#"{ "a": null }"#, "a=")] + #[case(r#"{ "a": { "b": "" } }"#, "a%5Bb%5D=")] + #[case(r#"{ "a": { "b": null } }"#, "a%5Bb%5D=")] + #[case(r#"{ "a": "b c" }"#, "a=b+c")] // RFC 1738, not RFC 3986 with %20 for spaces! + #[case( + r#"{ "my weird field": "~q1!2\"'w$5&7/z8)?" }"#, + // "my%20weird%20field=~q1%212%22%27w%245%267%2Fz8%29%3F" + "my+weird+field=%7Eq1%212%22%27w%245%267%2Fz8%29%3F" + )] + #[case(r#"{ "a": true }"#, "a=true")] + #[case(r#"{ "a": { "b": true } }"#, "a%5Bb%5D=true")] + #[case(r#"{ "b": false }"#, "b=false")] + #[case(r#"{ "b": { "c": false } }"#, "b%5Bc%5D=false")] + // #[case(r#"{ "a": [, "2", , , "1"] }"#, "a%5B1%5D=2&a%5B4%5D=1")] // json doesn't do sparse arrays + + fn stringifies_a_querystring_object(#[case] json: &str, #[case] expected: &str) { + let json = serde_json::from_slice::(json.as_bytes()).unwrap(); + let encoded = encode_json_as_form(&json).expect("test cases are valid for transformation"); + assert_eq!(encoded, expected); + } +} diff --git a/apollo-router/src/plugins/connectors/handle_responses.rs b/apollo-router/src/plugins/connectors/handle_responses.rs new file mode 100644 index 0000000000..9d41360d7e --- /dev/null +++ b/apollo-router/src/plugins/connectors/handle_responses.rs @@ -0,0 +1,1094 @@ +use std::sync::Arc; + +use apollo_federation::sources::connect::Connector; +use http_body::Body as HttpBody; +use parking_lot::Mutex; +use serde_json_bytes::ByteString; +use serde_json_bytes::Value; +use tracing::Span; + +use crate::graphql; +use crate::json_ext::Path; +use crate::plugins::connectors::http::Response as ConnectorResponse; +use crate::plugins::connectors::http::Result as ConnectorResult; +use crate::plugins::connectors::make_requests::ResponseKey; +use crate::plugins::connectors::plugin::debug::ConnectorContext; +use crate::plugins::connectors::plugin::debug::ConnectorDebugHttpRequest; +use crate::plugins::connectors::plugin::debug::SelectionData; +use crate::plugins::telemetry::consts::OTEL_STATUS_CODE; +use crate::plugins::telemetry::consts::OTEL_STATUS_CODE_ERROR; +use crate::plugins::telemetry::consts::OTEL_STATUS_CODE_OK; +use crate::services::connect::Response; +use crate::services::fetch::AddSubgraphNameExt; +use crate::services::router; +use crate::Context; + +const ENTITIES: &str = "_entities"; +const TYPENAME: &str = "__typename"; + +// --- ERRORS ------------------------------------------------------------------ + +#[derive(Debug, thiserror::Error, displaydoc::Display)] +pub(crate) enum HandleResponseError { + /// Merge error: {0} + MergeError(String), +} + +// --- RAW RESPONSE ------------------------------------------------------------ + +enum RawResponse { + /// This error type is used if: + /// 1. We didn't even make the request (we hit the request limit) + /// 2. We couldn't deserialize the response body + Error { + error: graphql::Error, + key: ResponseKey, + }, + /// Contains the response data directly from the HTTP response. We'll apply + /// a selection to convert this into either `data` or `errors` based on + /// whether it's successful or not. + Data { + parts: http::response::Parts, + data: Value, + key: ResponseKey, + debug_request: Option, + }, +} + +impl RawResponse { + /// Returns a `MappedResponse` with the response data transformed by the + /// selection mapping. + /// + /// As a side effect, this will also write to the debug context. + fn map_response( + self, + connector: &Connector, + context: &Context, + debug_context: &Option>>, + ) -> MappedResponse { + match self { + RawResponse::Error { error, key } => MappedResponse::Error { error, key }, + RawResponse::Data { + data, + key, + parts, + debug_request, + } => { + let inputs = key.inputs().merge( + &connector.response_variables, + connector.config.as_ref(), + context, + Some(parts.status.as_u16()), + ); + + let (res, apply_to_errors) = key.selection().apply_with_vars(&data, &inputs); + + if let Some(ref debug) = debug_context { + debug.lock().push_response( + debug_request.clone(), + &parts, + &data, + Some(SelectionData { + source: connector.selection.to_string(), + transformed: key.selection().to_string(), + result: res.clone(), + errors: apply_to_errors, + }), + ); + } + + MappedResponse::Data { + key, + data: res.unwrap_or_else(|| Value::Null), + } + } + } + } + + /// Returns a `MappedResponse` with a GraphQL error. + /// + /// As a side effect, this will also write to the debug context. + // TODO: This is where we'd map the response to a top-level GraphQL error + // once we have an error mapping. For now, it just creates a basic top-level + // error with the status code. + fn map_error( + self, + connector: &Connector, + _context: &Context, + debug_context: &Option>>, + ) -> MappedResponse { + use serde_json_bytes::*; + + match self { + RawResponse::Error { error, key } => MappedResponse::Error { error, key }, + RawResponse::Data { + key, + parts, + debug_request, + data, + } => { + let error = graphql::Error::builder() + .message("Request failed".to_string()) + .extension_code("CONNECTOR_FETCH") + .extension("service", connector.id.subgraph_name.clone()) + .extension( + "http", + Value::Object(Map::from_iter([( + "status".into(), + Value::Number(parts.status.as_u16().into()), + )])), + ) + .extension( + "connector", + Value::Object(Map::from_iter([( + "coordinate".into(), + Value::String(connector.id.coordinate().into()), + )])), + ) + .path::((&key).into()) + .build() + .add_subgraph_name(&connector.id.subgraph_name); // for include_subgraph_errors + + if let Some(ref debug) = debug_context { + debug + .lock() + .push_response(debug_request.clone(), &parts, &data, None); + } + + MappedResponse::Error { error, key } + } + } + } +} + +// --- MAPPED RESPONSE --------------------------------------------------------- + +pub(crate) enum MappedResponse { + /// This is equivalent to RawResponse::Error, but it also represents errors + /// when the request is semantically unsuccessful (e.g. 404, 500). + Error { + error: graphql::Error, + key: ResponseKey, + }, + /// The is the response data after applying the selection mapping. + Data { data: Value, key: ResponseKey }, +} + +impl MappedResponse { + /// Adds the response data to the `data` map or the error to the `errors` + /// array. How data is added depends on the `ResponseKey`: it's either a + /// property directly on the map, or stored in the `_entities` array. + fn add_to_data( + self, + data: &mut serde_json_bytes::Map, + errors: &mut Vec, + count: usize, + ) -> Result<(), HandleResponseError> { + match self { + Self::Error { error, key, .. } => { + match key { + // add a null to the "_entities" array at the right index + ResponseKey::Entity { index, .. } | ResponseKey::EntityField { index, .. } => { + let entities = data + .entry(ENTITIES) + .or_insert(Value::Array(Vec::with_capacity(count))); + entities + .as_array_mut() + .ok_or_else(|| { + HandleResponseError::MergeError("_entities is not an array".into()) + })? + .insert(index, Value::Null); + } + _ => {} + }; + errors.push(error); + } + Self::Data { + data: value, key, .. + } => match key { + ResponseKey::RootField { ref name, .. } => { + data.insert(name.clone(), value); + } + ResponseKey::Entity { index, .. } => { + let entities = data + .entry(ENTITIES) + .or_insert(Value::Array(Vec::with_capacity(count))); + entities + .as_array_mut() + .ok_or_else(|| { + HandleResponseError::MergeError("_entities is not an array".into()) + })? + .insert(index, value); + } + ResponseKey::EntityField { + index, + ref field_name, + ref typename, + .. + } => { + let entities = data + .entry(ENTITIES) + .or_insert(Value::Array(Vec::with_capacity(count))) + .as_array_mut() + .ok_or_else(|| { + HandleResponseError::MergeError("_entities is not an array".into()) + })?; + + match entities.get_mut(index) { + Some(Value::Object(entity)) => { + entity.insert(field_name.clone(), value); + } + _ => { + let mut entity = serde_json_bytes::Map::new(); + if let Some(typename) = typename { + entity.insert(TYPENAME, Value::String(typename.as_str().into())); + } + entity.insert(field_name.clone(), value); + entities.insert(index, Value::Object(entity)); + } + }; + } + }, + } + + Ok(()) + } +} + +// --- handle_responses -------------------------------------------------------- + +pub(crate) async fn process_response( + response: ConnectorResponse, + connector: &Connector, + context: &Context, + debug_context: &Option>>, +) -> MappedResponse { + let response_key = response.key; + let debug_request = response.debug_request; + + let raw = match response.result { + // This occurs when we short-circuit the request when over the limit + ConnectorResult::Err(error) => RawResponse::Error { + error: error.to_graphql_error(connector, Some((&response_key).into())), + key: response_key, + }, + ConnectorResult::HttpResponse(response) => { + let (parts, body) = response.into_parts(); + + // If this errors, it will write to the debug context because it + // has access to the raw bytes, so we can't write to it again + // in any RawResponse::Error branches. + match deserialize_response( + body, + &parts, + connector, + (&response_key).into(), + debug_context, + &debug_request, + ) + .await + { + Ok(data) => RawResponse::Data { + parts, + data, + key: response_key, + debug_request, + }, + Err(error) => RawResponse::Error { + error, + key: response_key, + }, + } + } + }; + + let is_success = match &raw { + RawResponse::Error { .. } => false, + RawResponse::Data { parts, .. } => parts.status.is_success(), + }; + + if is_success { + raw.map_response(connector, context, debug_context) + } else { + raw.map_error(connector, context, debug_context) + } +} + +pub(crate) fn aggregate_responses( + responses: Vec, +) -> Result { + let mut data = serde_json_bytes::Map::new(); + let mut errors = Vec::new(); + let count = responses.len(); + + for mapped in responses { + mapped.add_to_data(&mut data, &mut errors, count)?; + } + + let data = if data.is_empty() { + Value::Null + } else { + Value::Object(data) + }; + + Span::current().record( + OTEL_STATUS_CODE, + if errors.is_empty() { + OTEL_STATUS_CODE_OK + } else { + OTEL_STATUS_CODE_ERROR + }, + ); + + Ok(Response { + response: http::Response::builder() + .body( + graphql::Response::builder() + .data(data) + .errors(errors) + .build(), + ) + .unwrap(), + }) +} + +/// Converts the response body to bytes and deserializes it into a json Value. +/// This is the last time we have access to the original bytes, so it's the only +/// opportunity to write the invalid response to the debug context. +async fn deserialize_response( + body: T, + parts: &http::response::Parts, + connector: &Connector, + path: Path, + debug_context: &Option>>, + debug_request: &Option, +) -> Result { + use serde_json_bytes::*; + + let make_err = |path: Path| { + graphql::Error::builder() + .message("Request failed".to_string()) + .extension_code("CONNECTOR_FETCH") + .extension("service", connector.id.subgraph_name.clone()) + .extension( + "http", + Value::Object(Map::from_iter([( + "status".into(), + Value::Number(parts.status.as_u16().into()), + )])), + ) + .extension( + "connector", + Value::Object(Map::from_iter([( + "coordinate".into(), + Value::String(connector.id.coordinate().into()), + )])), + ) + .path(path) + .build() + .add_subgraph_name(&connector.id.subgraph_name) // for include_subgraph_errors + }; + + let body = &router::body::into_bytes(body) + .await + .map_err(|_| make_err(path.clone()))?; + match serde_json::from_slice::(body) { + Ok(json_data) => Ok(json_data), + Err(_) => { + if let Some(ref debug_context) = debug_context { + debug_context + .lock() + .push_invalid_response(debug_request.clone(), parts, body); + } + + Err(make_err(path)) + } + } +} + +#[cfg(test)] +mod tests { + use std::sync::Arc; + + use apollo_compiler::name; + use apollo_federation::sources::connect::ConnectId; + use apollo_federation::sources::connect::ConnectSpec; + use apollo_federation::sources::connect::Connector; + use apollo_federation::sources::connect::EntityResolver; + use apollo_federation::sources::connect::HTTPMethod; + use apollo_federation::sources::connect::HttpJsonTransport; + use apollo_federation::sources::connect::JSONSelection; + use insta::assert_debug_snapshot; + use url::Url; + + use crate::plugins::connectors::handle_responses::process_response; + use crate::plugins::connectors::http::Response as ConnectorResponse; + use crate::plugins::connectors::make_requests::ResponseKey; + use crate::services::router; + use crate::services::router::body::RouterBody; + use crate::Context; + + #[tokio::test] + async fn test_handle_responses_root_fields() { + let connector = Connector { + spec: ConnectSpec::V0_1, + id: ConnectId::new( + "subgraph_name".into(), + None, + name!(Query), + name!(hello), + 0, + "test label", + ), + transport: HttpJsonTransport { + source_url: Some(Url::parse("http://localhost/api").unwrap()), + connect_template: "/path".parse().unwrap(), + method: HTTPMethod::Get, + headers: Default::default(), + body: Default::default(), + }, + selection: JSONSelection::parse("$.data").unwrap(), + entity_resolver: None, + config: Default::default(), + max_requests: None, + request_variables: Default::default(), + response_variables: Default::default(), + }; + + let response1: http::Response = http::Response::builder() + .body(router::body::from_bytes(r#"{"data":"world"}"#)) + .unwrap(); + let response_key1 = ResponseKey::RootField { + name: "hello".to_string(), + inputs: Default::default(), + selection: Arc::new(JSONSelection::parse("$.data").unwrap()), + }; + + let response2 = http::Response::builder() + .body(router::body::from_bytes(r#"{"data":"world"}"#)) + .unwrap(); + let response_key2 = ResponseKey::RootField { + name: "hello2".to_string(), + inputs: Default::default(), + selection: Arc::new(JSONSelection::parse("$.data").unwrap()), + }; + + let res = super::aggregate_responses(vec![ + process_response( + ConnectorResponse { + result: response1.into(), + key: response_key1, + debug_request: None, + }, + &connector, + &Context::default(), + &None, + ) + .await, + process_response( + ConnectorResponse { + result: response2.into(), + key: response_key2, + debug_request: None, + }, + &connector, + &Context::default(), + &None, + ) + .await, + ]) + .unwrap(); + + assert_debug_snapshot!(res, @r###" + Response { + response: Response { + status: 200, + version: HTTP/1.1, + headers: {}, + body: Response { + label: None, + data: Some( + Object({ + "hello": String( + "world", + ), + "hello2": String( + "world", + ), + }), + ), + path: None, + errors: [], + extensions: {}, + has_next: None, + subscribed: None, + created_at: None, + incremental: [], + }, + }, + } + "###); + } + + #[tokio::test] + async fn test_handle_responses_entities() { + let connector = Connector { + spec: ConnectSpec::V0_1, + id: ConnectId::new( + "subgraph_name".into(), + None, + name!(Query), + name!(user), + 0, + "test label", + ), + transport: HttpJsonTransport { + source_url: Some(Url::parse("http://localhost/api").unwrap()), + connect_template: "/path".parse().unwrap(), + method: HTTPMethod::Get, + headers: Default::default(), + body: Default::default(), + }, + selection: JSONSelection::parse("$.data { id }").unwrap(), + entity_resolver: Some(EntityResolver::Explicit), + config: Default::default(), + max_requests: None, + request_variables: Default::default(), + response_variables: Default::default(), + }; + + let response1: http::Response = http::Response::builder() + .body(router::body::from_bytes(r#"{"data":{"id": "1"}}"#)) + .unwrap(); + let response_key1 = ResponseKey::Entity { + index: 0, + inputs: Default::default(), + selection: Arc::new(JSONSelection::parse("$.data").unwrap()), + }; + + let response2 = http::Response::builder() + .body(router::body::from_bytes(r#"{"data":{"id": "2"}}"#)) + .unwrap(); + let response_key2 = ResponseKey::Entity { + index: 1, + inputs: Default::default(), + selection: Arc::new(JSONSelection::parse("$.data").unwrap()), + }; + + let res = super::aggregate_responses(vec![ + process_response( + ConnectorResponse { + result: response1.into(), + key: response_key1, + debug_request: None, + }, + &connector, + &Context::default(), + &None, + ) + .await, + process_response( + ConnectorResponse { + result: response2.into(), + key: response_key2, + debug_request: None, + }, + &connector, + &Context::default(), + &None, + ) + .await, + ]) + .unwrap(); + + assert_debug_snapshot!(res, @r###" + Response { + response: Response { + status: 200, + version: HTTP/1.1, + headers: {}, + body: Response { + label: None, + data: Some( + Object({ + "_entities": Array([ + Object({ + "id": String( + "1", + ), + }), + Object({ + "id": String( + "2", + ), + }), + ]), + }), + ), + path: None, + errors: [], + extensions: {}, + has_next: None, + subscribed: None, + created_at: None, + incremental: [], + }, + }, + } + "###); + } + + #[tokio::test] + async fn test_handle_responses_entity_field() { + let connector = Connector { + spec: ConnectSpec::V0_1, + id: ConnectId::new( + "subgraph_name".into(), + None, + name!(User), + name!(field), + 0, + "test label", + ), + transport: HttpJsonTransport { + source_url: Some(Url::parse("http://localhost/api").unwrap()), + connect_template: "/path".parse().unwrap(), + method: HTTPMethod::Get, + headers: Default::default(), + body: Default::default(), + }, + selection: JSONSelection::parse("$.data").unwrap(), + entity_resolver: Some(EntityResolver::Implicit), + config: Default::default(), + max_requests: None, + request_variables: Default::default(), + response_variables: Default::default(), + }; + + let response1: http::Response = http::Response::builder() + .body(router::body::from_bytes(r#"{"data":"value1"}"#)) + .unwrap(); + let response_key1 = ResponseKey::EntityField { + index: 0, + inputs: Default::default(), + field_name: "field".to_string(), + typename: Some(name!("User")), + selection: Arc::new(JSONSelection::parse("$.data").unwrap()), + }; + + let response2 = http::Response::builder() + .body(router::body::from_bytes(r#"{"data":"value2"}"#)) + .unwrap(); + let response_key2 = ResponseKey::EntityField { + index: 1, + inputs: Default::default(), + field_name: "field".to_string(), + typename: Some(name!("User")), + selection: Arc::new(JSONSelection::parse("$.data").unwrap()), + }; + + let res = super::aggregate_responses(vec![ + process_response( + ConnectorResponse { + result: response1.into(), + key: response_key1, + debug_request: None, + }, + &connector, + &Context::default(), + &None, + ) + .await, + process_response( + ConnectorResponse { + result: response2.into(), + key: response_key2, + debug_request: None, + }, + &connector, + &Context::default(), + &None, + ) + .await, + ]) + .unwrap(); + + assert_debug_snapshot!(res, @r###" + Response { + response: Response { + status: 200, + version: HTTP/1.1, + headers: {}, + body: Response { + label: None, + data: Some( + Object({ + "_entities": Array([ + Object({ + "__typename": String( + "User", + ), + "field": String( + "value1", + ), + }), + Object({ + "__typename": String( + "User", + ), + "field": String( + "value2", + ), + }), + ]), + }), + ), + path: None, + errors: [], + extensions: {}, + has_next: None, + subscribed: None, + created_at: None, + incremental: [], + }, + }, + } + "###); + } + + #[tokio::test] + async fn test_handle_responses_errors() { + let connector = Connector { + spec: ConnectSpec::V0_1, + id: ConnectId::new( + "subgraph_name".into(), + None, + name!(Query), + name!(user), + 0, + "test label", + ), + transport: HttpJsonTransport { + source_url: Some(Url::parse("http://localhost/api").unwrap()), + connect_template: "/path".parse().unwrap(), + method: HTTPMethod::Get, + headers: Default::default(), + body: Default::default(), + }, + selection: JSONSelection::parse("$.data").unwrap(), + entity_resolver: Some(EntityResolver::Explicit), + config: Default::default(), + max_requests: None, + request_variables: Default::default(), + response_variables: Default::default(), + }; + + let response_plaintext: http::Response = http::Response::builder() + .body(router::body::from_bytes(r#"plain text"#)) + .unwrap(); + let response_key_plaintext = ResponseKey::Entity { + index: 0, + inputs: Default::default(), + selection: Arc::new(JSONSelection::parse("$.data").unwrap()), + }; + + let response1: http::Response = http::Response::builder() + .status(404) + .body(router::body::from_bytes(r#"{"error":"not found"}"#)) + .unwrap(); + let response_key1 = ResponseKey::Entity { + index: 1, + inputs: Default::default(), + selection: Arc::new(JSONSelection::parse("$.data").unwrap()), + }; + + let response2 = http::Response::builder() + .body(router::body::from_bytes(r#"{"data":{"id":"2"}}"#)) + .unwrap(); + let response_key2 = ResponseKey::Entity { + index: 2, + inputs: Default::default(), + selection: Arc::new(JSONSelection::parse("$.data").unwrap()), + }; + + let response3: http::Response = http::Response::builder() + .status(500) + .body(router::body::from_bytes(r#"{"error":"whoops"}"#)) + .unwrap(); + let response_key3 = ResponseKey::Entity { + index: 3, + inputs: Default::default(), + selection: Arc::new(JSONSelection::parse("$.data").unwrap()), + }; + + let res = super::aggregate_responses(vec![ + process_response( + ConnectorResponse { + result: response_plaintext.into(), + key: response_key_plaintext, + debug_request: None, + }, + &connector, + &Context::default(), + &None, + ) + .await, + process_response( + ConnectorResponse { + result: response1.into(), + key: response_key1, + debug_request: None, + }, + &connector, + &Context::default(), + &None, + ) + .await, + process_response( + ConnectorResponse { + result: response2.into(), + key: response_key2, + debug_request: None, + }, + &connector, + &Context::default(), + &None, + ) + .await, + process_response( + ConnectorResponse { + result: response3.into(), + key: response_key3, + debug_request: None, + }, + &connector, + &Context::default(), + &None, + ) + .await, + ]) + .unwrap(); + + assert_debug_snapshot!(res, @r###" + Response { + response: Response { + status: 200, + version: HTTP/1.1, + headers: {}, + body: Response { + label: None, + data: Some( + Object({ + "_entities": Array([ + Null, + Null, + Object({ + "id": String( + "2", + ), + }), + Null, + ]), + }), + ), + path: None, + errors: [ + Error { + message: "Request failed", + locations: [], + path: Some( + Path( + [ + Key( + "_entities", + None, + ), + Index( + 0, + ), + ], + ), + ), + extensions: { + "service": String( + "subgraph_name", + ), + "http": Object({ + "status": Number(200), + }), + "connector": Object({ + "coordinate": String( + "subgraph_name:Query.user@connect[0]", + ), + }), + "code": String( + "CONNECTOR_FETCH", + ), + "fetch_subgraph_name": String( + "subgraph_name", + ), + }, + }, + Error { + message: "Request failed", + locations: [], + path: Some( + Path( + [ + Key( + "_entities", + None, + ), + Index( + 1, + ), + ], + ), + ), + extensions: { + "service": String( + "subgraph_name", + ), + "http": Object({ + "status": Number(404), + }), + "connector": Object({ + "coordinate": String( + "subgraph_name:Query.user@connect[0]", + ), + }), + "code": String( + "CONNECTOR_FETCH", + ), + "fetch_subgraph_name": String( + "subgraph_name", + ), + }, + }, + Error { + message: "Request failed", + locations: [], + path: Some( + Path( + [ + Key( + "_entities", + None, + ), + Index( + 3, + ), + ], + ), + ), + extensions: { + "service": String( + "subgraph_name", + ), + "http": Object({ + "status": Number(500), + }), + "connector": Object({ + "coordinate": String( + "subgraph_name:Query.user@connect[0]", + ), + }), + "code": String( + "CONNECTOR_FETCH", + ), + "fetch_subgraph_name": String( + "subgraph_name", + ), + }, + }, + ], + extensions: {}, + has_next: None, + subscribed: None, + created_at: None, + incremental: [], + }, + }, + } + "###); + } + + #[tokio::test] + async fn test_handle_responses_status() { + let selection = JSONSelection::parse("$status").unwrap(); + let connector = Connector { + spec: ConnectSpec::V0_1, + id: ConnectId::new( + "subgraph_name".into(), + None, + name!(Query), + name!(hello), + 0, + "test label", + ), + transport: HttpJsonTransport { + source_url: Some(Url::parse("http://localhost/api").unwrap()), + connect_template: "/path".parse().unwrap(), + method: HTTPMethod::Get, + headers: Default::default(), + body: Default::default(), + }, + selection: selection.clone(), + entity_resolver: None, + config: Default::default(), + max_requests: None, + request_variables: Default::default(), + response_variables: selection.external_variables().collect(), + }; + + let response1: http::Response = http::Response::builder() + .status(201) + .body(router::body::from_bytes(r#"{}"#)) + .unwrap(); + let response_key1 = ResponseKey::RootField { + name: "hello".to_string(), + inputs: Default::default(), + selection: Arc::new(JSONSelection::parse("$status").unwrap()), + }; + + let res = super::aggregate_responses(vec![ + process_response( + ConnectorResponse { + result: response1.into(), + key: response_key1, + debug_request: None, + }, + &connector, + &Context::default(), + &None, + ) + .await, + ]) + .unwrap(); + + assert_debug_snapshot!(res, @r###" + Response { + response: Response { + status: 200, + version: HTTP/1.1, + headers: {}, + body: Response { + label: None, + data: Some( + Object({ + "hello": Number(201), + }), + ), + path: None, + errors: [], + extensions: {}, + has_next: None, + subscribed: None, + created_at: None, + incremental: [], + }, + }, + } + "###); + } +} diff --git a/apollo-router/src/plugins/connectors/http.rs b/apollo-router/src/plugins/connectors/http.rs new file mode 100644 index 0000000000..772a92846b --- /dev/null +++ b/apollo-router/src/plugins/connectors/http.rs @@ -0,0 +1,43 @@ +//! HTTP-based connector implementation types. + +use http_body::Body as HttpBody; + +use crate::plugins::connectors::error::Error as ConnectorError; +use crate::plugins::connectors::make_requests::ResponseKey; +use crate::plugins::connectors::plugin::debug::ConnectorDebugHttpRequest; +use crate::services::router::body::RouterBody; + +/// A result of a connector +pub(crate) enum Result { + /// The connector was not invoked because of an error + Err(ConnectorError), + + /// The connector was invoked and returned an HTTP response + HttpResponse(http::Response), +} + +impl From> for Result { + fn from(value: http::Response) -> Self { + Result::HttpResponse(value) + } +} + +impl From for Result { + fn from(value: ConnectorError) -> Self { + Result::Err(value) + } +} + +/// The result of a connector and the associated response key +pub(crate) struct Response { + pub(crate) result: Result, + pub(crate) key: ResponseKey, + pub(crate) debug_request: Option, +} + +#[derive(Debug)] +pub(crate) struct Request { + pub(crate) request: http::Request, + pub(crate) key: ResponseKey, + pub(crate) debug_request: Option, +} diff --git a/apollo-router/src/plugins/connectors/http_json_transport.rs b/apollo-router/src/plugins/connectors/http_json_transport.rs new file mode 100644 index 0000000000..444aad0b3d --- /dev/null +++ b/apollo-router/src/plugins/connectors/http_json_transport.rs @@ -0,0 +1,776 @@ +use std::sync::Arc; + +use apollo_compiler::collections::IndexMap; +use apollo_federation::sources::connect::HTTPMethod; +use apollo_federation::sources::connect::HeaderSource; +use apollo_federation::sources::connect::HttpJsonTransport; +use apollo_federation::sources::connect::URLTemplate; +use displaydoc::Display; +use http::header::CONTENT_LENGTH; +use http::header::CONTENT_TYPE; +use http::HeaderMap; +use http::HeaderName; +use http::HeaderValue; +use parking_lot::Mutex; +use serde_json_bytes::json; +use serde_json_bytes::Value; +use thiserror::Error; +use url::Url; + +use super::form_encoding::encode_json_as_form; +use crate::plugins::connectors::plugin::debug::serialize_request; +use crate::plugins::connectors::plugin::debug::ConnectorContext; +use crate::plugins::connectors::plugin::debug::ConnectorDebugHttpRequest; +use crate::plugins::connectors::plugin::debug::SelectionData; +use crate::services::connect; +use crate::services::router; +use crate::services::router::body::RouterBody; + +pub(crate) fn make_request( + transport: &HttpJsonTransport, + inputs: IndexMap, + original_request: &connect::Request, + debug: &Option>>, +) -> Result<(http::Request, Option), HttpJsonTransportError> +{ + let uri = make_uri( + transport.source_url.as_ref(), + &transport.connect_template, + &inputs, + )?; + + let request = http::Request::builder() + .method(transport.method.as_str()) + .uri(uri.as_str()); + + // add the headers and if content-type is specified, we'll check that when constructing the body + let (mut request, content_type) = add_headers( + request, + original_request.supergraph_request.headers(), + &transport.headers, + &inputs, + ); + + let is_form_urlencoded = content_type.as_ref() == Some(&mime::APPLICATION_WWW_FORM_URLENCODED); + + let (json_body, form_body, body, content_length, apply_to_errors) = + if let Some(ref selection) = transport.body { + let (json_body, apply_to_errors) = selection.apply_with_vars(&json!({}), &inputs); + let mut form_body = None; + let (body, content_length) = if let Some(json_body) = json_body.as_ref() { + if is_form_urlencoded { + let encoded = encode_json_as_form(json_body) + .map_err(HttpJsonTransportError::FormBodySerialization)?; + form_body = Some(encoded.clone()); + let len = encoded.bytes().len(); + (router::body::from_bytes(encoded), len) + } else { + request = request.header(CONTENT_TYPE, mime::APPLICATION_JSON.essence_str()); + let bytes = serde_json::to_vec(json_body)?; + let len = bytes.len(); + (router::body::from_bytes(bytes), len) + } + } else { + (router::body::empty(), 0) + }; + (json_body, form_body, body, content_length, apply_to_errors) + } else { + (None, None, router::body::empty(), 0, vec![]) + }; + + match transport.method { + HTTPMethod::Post | HTTPMethod::Patch | HTTPMethod::Put => { + request = request.header(CONTENT_LENGTH, content_length); + } + _ => {} + } + + let request = request + .body(body) + .map_err(HttpJsonTransportError::InvalidNewRequest)?; + + let debug_request = debug.as_ref().map(|_| { + if is_form_urlencoded { + serialize_request( + &request, + "form-urlencoded".to_string(), + form_body + .map(|s| serde_json_bytes::Value::String(s.clone().into())) + .as_ref(), + transport.body.as_ref().map(|body| SelectionData { + source: body.to_string(), + transformed: body.to_string(), // no transformation so this is the same + result: json_body, + errors: apply_to_errors, + }), + ) + } else { + serialize_request( + &request, + "json".to_string(), + json_body.as_ref(), + transport.body.as_ref().map(|body| SelectionData { + source: body.to_string(), + transformed: body.to_string(), // no transformation so this is the same + result: json_body.clone(), + errors: apply_to_errors, + }), + ) + } + }); + + Ok((request, debug_request)) +} + +fn make_uri( + source_url: Option<&Url>, + template: &URLTemplate, + inputs: &IndexMap, +) -> Result { + let mut url = source_url + .or(template.base.as_ref()) + .ok_or(HttpJsonTransportError::NoBaseUrl)? + .clone(); + + url.path_segments_mut() + .map_err(|_| { + HttpJsonTransportError::InvalidUrl(url::ParseError::RelativeUrlWithCannotBeABaseBase) + })? + .pop_if_empty() + .extend( + template + .interpolate_path(inputs) + .map_err(|err| HttpJsonTransportError::TemplateGenerationError(err.message))?, + ); + + let query_params = template + .interpolate_query(inputs) + .map_err(|err| HttpJsonTransportError::TemplateGenerationError(err.message))?; + if !query_params.is_empty() { + url.query_pairs_mut().extend_pairs(query_params); + } + Ok(url) +} + +#[allow(clippy::mutable_key_type)] // HeaderName is internally mutable, but safe to use in maps +fn add_headers( + mut request: http::request::Builder, + incoming_supergraph_headers: &HeaderMap, + config: &IndexMap, + inputs: &IndexMap, +) -> (http::request::Builder, Option) { + let mut content_type = None; + + for (header_name, header_source) in config { + match header_source { + HeaderSource::From(from) => { + let values = incoming_supergraph_headers.get_all(from); + let mut propagated = false; + for value in values { + request = request.header(header_name.clone(), value.clone()); + propagated = true; + } + if !propagated { + tracing::warn!("Header '{}' not found in incoming request", header_name); + } + } + HeaderSource::Value(value) => match value.interpolate(inputs) { + Ok(value) => { + request = request.header(header_name, value.clone()); + + if header_name == CONTENT_TYPE { + content_type = Some(value.clone()); + } + } + Err(err) => { + tracing::error!("Unable to interpolate header value: {:?}", err); + } + }, + } + } + + ( + request, + content_type.and_then(|v| v.to_str().unwrap_or_default().parse().ok()), + ) +} + +#[derive(Error, Display, Debug)] +pub(crate) enum HttpJsonTransportError { + /// Error building URI: {0:?} + NewUriError(#[from] Option), + /// Could not generate HTTP request: {0} + InvalidNewRequest(#[source] http::Error), + /// Could not serialize body: {0} + JsonBodySerialization(#[from] serde_json::Error), + /// Could not serialize body: {0} + FormBodySerialization(&'static str), + /// Error building URI: {0:?} + InvalidUrl(url::ParseError), + /// Could not generate URI from inputs: {0} + TemplateGenerationError(String), + /// Either a source or a fully qualified URL must be provided to `@connect` + NoBaseUrl, +} + +#[cfg(test)] +mod test_make_uri { + use insta::assert_snapshot; + use pretty_assertions::assert_eq; + use serde_json_bytes::json; + + use super::*; + + macro_rules! this { + ($($value:tt)*) => {{ + let mut map = IndexMap::with_capacity_and_hasher(1, Default::default()); + map.insert("$this".to_string(), json!({ $($value)* })); + map + }}; + } + + #[test] + fn append_path() { + assert_eq!( + make_uri( + Some(&Url::parse("https://localhost:8080/v1").unwrap()), + &"/hello/42".parse().unwrap(), + &Default::default(), + ) + .unwrap() + .as_str(), + "https://localhost:8080/v1/hello/42" + ); + } + + #[test] + fn append_path_with_trailing_slash() { + assert_eq!( + make_uri( + Some(&Url::parse("https://localhost:8080/").unwrap()), + &"/hello/42".parse().unwrap(), + &Default::default(), + ) + .unwrap() + .as_str(), + "https://localhost:8080/hello/42" + ); + } + + #[test] + fn append_path_test_with_trailing_slash_and_base_path() { + assert_eq!( + make_uri( + Some(&Url::parse("https://localhost:8080/v1/").unwrap()), + &"/hello/{$this.id}?id={$this.id}".parse().unwrap(), + &this! { "id": 42 }, + ) + .unwrap() + .as_str(), + "https://localhost:8080/v1/hello/42?id=42" + ); + } + #[test] + fn append_path_test_with_and_base_path_and_params() { + assert_eq!( + make_uri( + Some(&Url::parse("https://localhost:8080/v1?foo=bar").unwrap()), + &"/hello/{$this.id}?id={$this.id}".parse().unwrap(), + &this! {"id": 42 }, + ) + .unwrap() + .as_str(), + "https://localhost:8080/v1/hello/42?foo=bar&id=42" + ); + } + #[test] + fn append_path_test_with_and_base_path_and_trailing_slash_and_params() { + assert_eq!( + make_uri( + Some(&Url::parse("https://localhost:8080/v1/?foo=bar").unwrap()), + &"/hello/{$this.id}?id={$this.id}".parse().unwrap(), + &this! {"id": 42 }, + ) + .unwrap() + .as_str(), + "https://localhost:8080/v1/hello/42?foo=bar&id=42" + ); + } + + #[test] + fn path_cases() { + let template = "http://localhost/users/{$this.user_id}?a={$this.b}&e={$this.f.g}" + .parse() + .unwrap(); + + assert_snapshot!( + make_uri(None, &template, &Default::default()) + .unwrap() + .as_str(), + @"http://localhost/users/?a=&e=" + ); + + assert_snapshot!( + make_uri( + None, + &template, + &this! { + "user_id": 123, + "b": "456", + "f": {"g": "abc"} + } + ) + .unwrap() + .to_string(), + @"http://localhost/users/123?a=456&e=abc" + ); + + assert_snapshot!( + make_uri( + None, + &template, + &this! { + "user_id": 123, + "f": "not an object" + } + ) + .unwrap() + .as_str(), + @"http://localhost/users/123?a=&e=" + ); + + assert_snapshot!( + make_uri( + None, + &template, + &this! { + // The order of the variables should not matter. + "b": "456", + "user_id": "123" + } + ) + .unwrap() + .as_str(), + @"http://localhost/users/123?a=456&e=" + ); + + assert_eq!( + make_uri( + None, + &template, + &this! { + "user_id": "123", + "b": "a", + "f": {"g": "e"}, + // Extra variables should be ignored. + "extra": "ignored" + } + ) + .unwrap() + .to_string(), + "http://localhost/users/123?a=a&e=e", + ); + } + + #[test] + fn multi_variable_parameter_values() { + let template = + "http://localhost/locations/xyz({$this.x},{$this.y},{$this.z})?required={$this.b},{$this.c};{$this.d}&optional=[{$this.e},{$this.f}]" + .parse() + .unwrap(); + + assert_eq!( + make_uri( + None, + &template, + &this! { + "x": 1, + "y": 2, + "z": 3, + "b": 4, + "c": 5, + "d": 6, + "e": 7, + "f": 8, + } + ) + .unwrap() + .as_str(), + "http://localhost/locations/xyz(1,2,3)?required=4%2C5%3B6&optional=%5B7%2C8%5D" + ); + + assert_snapshot!( + make_uri( + None, + &template, + &this! { + "x": 1, + "y": 2, + "z": 3, + "b": 4, + "c": 5, + "d": 6, + "e": 7 + // "f": 8, + } + ) + .unwrap() + .as_str(), + @"http://localhost/locations/xyz(1,2,3)?required=4%2C5%3B6&optional=%5B7%2C%5D", + ); + + assert_snapshot!( + make_uri( + None, + &template, + &this! { + "x": 1, + "y": 2, + "z": 3, + "b": 4, + "c": 5, + "d": 6, + // "e": 7, + "f": 8 + } + ) + .unwrap() + .as_str(), + @"http://localhost/locations/xyz(1,2,3)?required=4%2C5%3B6&optional=%5B%2C8%5D", + ); + + assert_snapshot!( + make_uri( + None, + &template, + &this! { + "x": 1, + "y": 2, + "z": 3, + "b": 4, + "c": 5, + "d": 6 + } + ) + .unwrap() + .as_str(), + @"http://localhost/locations/xyz(1,2,3)?required=4%2C5%3B6&optional=%5B%2C%5D", + ); + + assert_snapshot!( + make_uri( + None, + &template, + &this! { + // "x": 1, + "y": 2, + "z": 3 + } + ) + .unwrap() + .as_str(), + @"http://localhost/locations/xyz(,2,3)?required=%2C%3B&optional=%5B%2C%5D", + ); + + assert_snapshot!( + make_uri( + None, + &template, + &this! { + "x": 1, + "y": 2 + // "z": 3, + } + ) + .unwrap() + .as_str(), + @"http://localhost/locations/xyz(1,2,)?required=%2C%3B&optional=%5B%2C%5D" + ); + + assert_snapshot!( + make_uri( + None, + &template, + &this! { + "b": 4, + // "c": 5, + "d": 6, + "x": 1, + "y": 2, + "z": 3 + } + ) + .unwrap() + .to_string(), + @"http://localhost/locations/xyz(1,2,3)?required=4%2C%3B6&optional=%5B%2C%5D" + ); + + let line_template = "http://localhost/line/{$this.p1.x},{$this.p1.y},{$this.p1.z}/{$this.p2.x},{$this.p2.y},{$this.p2.z}" + .parse() + .unwrap(); + + assert_snapshot!( + make_uri( + None, + &line_template, + &this! { + "p1": { + "x": 1, + "y": 2, + "z": 3, + }, + "p2": { + "x": 4, + "y": 5, + "z": 6, + } + } + ) + .unwrap() + .as_str(), + @"http://localhost/line/1,2,3/4,5,6" + ); + + assert_snapshot!( + make_uri( + None, + &line_template, + &this! { + "p1": { + "x": 1, + "y": 2, + "z": 3, + }, + "p2": { + "x": 4, + "y": 5, + // "z": 6, + } + } + ) + .unwrap() + .as_str(), + @"http://localhost/line/1,2,3/4,5," + ); + + assert_snapshot!( + make_uri( + None, + &line_template, + &this! { + "p1": { + "x": 1, + // "y": 2, + "z": 3, + }, + "p2": { + "x": 4, + "y": 5, + "z": 6, + } + } + ) + .unwrap() + .as_str(), + @"http://localhost/line/1,,3/4,5,6" + ); + } + + /// Values are all strings, they can't have semantic value for HTTP. That means no dynamic paths, + /// no nested query params, etc. When we expand values, we have to make sure they're safe. + #[test] + fn parameter_encoding() { + let vars = &this! { + "path": "/some/path", + "question_mark": "a?b", + "ampersand": "a&b=b", + "hash": "a#b", + }; + + let template = "http://localhost/{$this.path}/{$this.question_mark}?a={$this.ampersand}&c={$this.hash}" + .parse() + .expect("Failed to parse URL template"); + let url = make_uri(None, &template, vars).expect("Failed to generate URL"); + + assert_eq!( + url.as_str(), + "http://localhost/%2Fsome%2Fpath/a%3Fb?a=a%26b%3Db&c=a%23b" + ); + } +} + +#[cfg(test)] +mod tests { + use std::str::FromStr; + + use apollo_compiler::ExecutableDocument; + use apollo_compiler::Schema; + use apollo_federation::sources::connect::HTTPMethod; + use apollo_federation::sources::connect::HeaderSource; + use apollo_federation::sources::connect::JSONSelection; + use http::header::CONTENT_ENCODING; + use http::HeaderMap; + use http::HeaderValue; + use insta::assert_debug_snapshot; + + use super::*; + use crate::services::router::body; + use crate::Context; + + #[test] + fn test_headers_to_add_no_directives() { + let incoming_supergraph_headers: HeaderMap = vec![ + ("x-rename".parse().unwrap(), "renamed".parse().unwrap()), + ("x-rename".parse().unwrap(), "also-renamed".parse().unwrap()), + ("x-ignore".parse().unwrap(), "ignored".parse().unwrap()), + (CONTENT_ENCODING, "gzip".parse().unwrap()), + ] + .into_iter() + .collect(); + + let request = http::Request::builder(); + let (request, _) = add_headers( + request, + &incoming_supergraph_headers, + &IndexMap::with_hasher(Default::default()), + &IndexMap::with_hasher(Default::default()), + ); + let request = request.body(body::empty()).unwrap(); + assert!(request.headers().is_empty()); + } + + #[test] + fn test_headers_to_add_with_config() { + let incoming_supergraph_headers: HeaderMap = vec![ + ("x-rename".parse().unwrap(), "renamed".parse().unwrap()), + ("x-rename".parse().unwrap(), "also-renamed".parse().unwrap()), + ("x-ignore".parse().unwrap(), "ignored".parse().unwrap()), + (CONTENT_ENCODING, "gzip".parse().unwrap()), + ] + .into_iter() + .collect(); + + #[allow(clippy::mutable_key_type)] + let mut config = IndexMap::with_hasher(Default::default()); + config.insert( + "x-new-name".parse().unwrap(), + HeaderSource::From("x-rename".parse().unwrap()), + ); + config.insert( + "x-insert".parse().unwrap(), + HeaderSource::Value("inserted".parse().unwrap()), + ); + + let request = http::Request::builder(); + let (request, _) = add_headers( + request, + &incoming_supergraph_headers, + &config, + &IndexMap::with_hasher(Default::default()), + ); + let request = request.body(body::empty()).unwrap(); + let result = request.headers(); + assert_eq!(result.len(), 3); + assert_eq!(result.get("x-new-name"), Some(&"renamed".parse().unwrap())); + assert_eq!(result.get("x-insert"), Some(&"inserted".parse().unwrap())); + } + + #[tokio::test(flavor = "multi_thread")] + async fn make_request() { + let schema = Schema::parse_and_validate("type Query { f(a: Int): String }", "").unwrap(); + let doc = ExecutableDocument::parse_and_validate(&schema, "{f(a: 42)}", "").unwrap(); + let mut vars = IndexMap::default(); + vars.insert("$args".to_string(), json!({ "a": 42 })); + + let req = super::make_request( + &HttpJsonTransport { + source_url: None, + connect_template: URLTemplate::from_str("http://localhost:8080/").unwrap(), + method: HTTPMethod::Post, + headers: Default::default(), + body: Some(JSONSelection::parse("$args { a }").unwrap()), + }, + vars, + &connect::Request { + service_name: Arc::from("service"), + context: Context::default(), + operation: Arc::from(doc), + supergraph_request: Arc::from(http::Request::default()), + variables: Default::default(), + }, + &None, + ) + .unwrap(); + + assert_debug_snapshot!(req, @r###" + ( + Request { + method: POST, + uri: http://localhost:8080/, + version: HTTP/1.1, + headers: { + "content-type": "application/json", + "content-length": "8", + }, + body: UnsyncBoxBody, + }, + None, + ) + "###); + + let body = body::into_string(req.0.into_body()).await.unwrap(); + insta::assert_snapshot!(body, @r#"{"a":42}"#); + } + + #[tokio::test(flavor = "multi_thread")] + async fn make_request_form_encoded() { + let schema = Schema::parse_and_validate("type Query { f(a: Int): String }", "").unwrap(); + let doc = ExecutableDocument::parse_and_validate(&schema, "{f(a: 42)}", "").unwrap(); + let mut vars = IndexMap::default(); + vars.insert("$args".to_string(), json!({ "a": 42 })); + let mut headers = IndexMap::default(); + headers.insert( + "content-type".parse().unwrap(), + HeaderSource::Value("application/x-www-form-urlencoded".parse().unwrap()), + ); + + let req = super::make_request( + &HttpJsonTransport { + source_url: None, + connect_template: URLTemplate::from_str("http://localhost:8080/").unwrap(), + method: HTTPMethod::Post, + headers, + body: Some(JSONSelection::parse("$args { a }").unwrap()), + }, + vars, + &connect::Request { + service_name: Arc::from("service"), + context: Context::default(), + operation: Arc::from(doc), + supergraph_request: Arc::from(http::Request::default()), + variables: Default::default(), + }, + &None, + ) + .unwrap(); + + assert_debug_snapshot!(req, @r###" + ( + Request { + method: POST, + uri: http://localhost:8080/, + version: HTTP/1.1, + headers: { + "content-type": "application/x-www-form-urlencoded", + "content-length": "4", + }, + body: UnsyncBoxBody, + }, + None, + ) + "###); + + let body = body::into_string(req.0.into_body()).await.unwrap(); + insta::assert_snapshot!(body, @r#"a=42"#); + } +} diff --git a/apollo-router/src/plugins/connectors/make_requests.rs b/apollo-router/src/plugins/connectors/make_requests.rs new file mode 100644 index 0000000000..7bbc53b4a2 --- /dev/null +++ b/apollo-router/src/plugins/connectors/make_requests.rs @@ -0,0 +1,2744 @@ +use std::sync::Arc; + +use apollo_compiler::collections::HashSet; +use apollo_compiler::collections::IndexMap; +use apollo_compiler::executable::Selection; +use apollo_compiler::Name; +use apollo_federation::sources::connect::Connector; +use apollo_federation::sources::connect::CustomConfiguration; +use apollo_federation::sources::connect::EntityResolver; +use apollo_federation::sources::connect::JSONSelection; +use apollo_federation::sources::connect::Namespace; +use parking_lot::Mutex; +use serde_json_bytes::json; +use serde_json_bytes::ByteString; +use serde_json_bytes::Map; +use serde_json_bytes::Value; + +use super::http::Request; +use super::http_json_transport::make_request; +use super::http_json_transport::HttpJsonTransportError; +use crate::json_ext::Path; +use crate::json_ext::PathElement; +use crate::plugins::connectors::plugin::debug::ConnectorContext; +use crate::services::connect; +use crate::Context; + +const REPRESENTATIONS_VAR: &str = "representations"; +const ENTITIES: &str = "_entities"; +const TYPENAME: &str = "__typename"; + +#[derive(Clone, Debug, Default)] +pub(crate) struct RequestInputs { + args: Map, + this: Map, +} + +impl RequestInputs { + /// Creates a map for use in JSONSelection::apply_with_vars. It only clones + /// values into the map if the variable namespaces (`$args`, `$this`, etc.) + /// are actually referenced in the expressions for URLs, headers, body, or selection. + pub(crate) fn merge( + &self, + variables_used: &HashSet, + config: Option<&CustomConfiguration>, + context: &Context, + status: Option, + ) -> IndexMap { + let mut map = IndexMap::with_capacity_and_hasher(variables_used.len(), Default::default()); + + // Not all connectors reference $args + if variables_used.contains(&Namespace::Args) { + map.insert( + Namespace::Args.as_str().into(), + Value::Object(self.args.clone()), + ); + } + + // $this only applies to fields on entity types (not Query or Mutation) + if variables_used.contains(&Namespace::This) { + map.insert( + Namespace::This.as_str().into(), + Value::Object(self.this.clone()), + ); + } + + // $context could be a large object, so we only convert it to JSON + // if it's used. It can also be mutated between requests, so we have + // to convert it each time. + if variables_used.contains(&Namespace::Context) { + let context: Map = context + .iter() + .map(|r| (r.key().as_str().into(), r.value().clone())) + .collect(); + map.insert(Namespace::Context.as_str().into(), Value::Object(context)); + } + + // $config doesn't change unless the schema reloads, but we can avoid + // the allocation if it's unused. + if variables_used.contains(&Namespace::Config) { + if let Some(config) = config { + map.insert(Namespace::Config.as_str().into(), json!(config)); + } + } + + // $status is available only for response mapping + if variables_used.contains(&Namespace::Status) { + if let Some(status) = status { + map.insert( + Namespace::Status.as_str().into(), + Value::Number(status.into()), + ); + } + } + + map + } +} + +#[derive(Clone, Debug)] +pub(crate) enum ResponseKey { + RootField { + name: String, + selection: Arc, + inputs: RequestInputs, + }, + Entity { + index: usize, + selection: Arc, + inputs: RequestInputs, + }, + EntityField { + index: usize, + field_name: String, + /// Is Some only if the output type is a concrete object type. If it's + /// an interface, it's treated as an interface object and we can't emit + /// a __typename in the response. + typename: Option, + selection: Arc, + inputs: RequestInputs, + }, +} + +impl ResponseKey { + pub(crate) fn selection(&self) -> &JSONSelection { + match self { + ResponseKey::RootField { selection, .. } => selection, + ResponseKey::Entity { selection, .. } => selection, + ResponseKey::EntityField { selection, .. } => selection, + } + } + + pub(crate) fn inputs(&self) -> &RequestInputs { + match self { + ResponseKey::RootField { inputs, .. } => inputs, + ResponseKey::Entity { inputs, .. } => inputs, + ResponseKey::EntityField { inputs, .. } => inputs, + } + } +} + +/// Convert a ResponseKey into a Path for use in GraphQL errors. This mimics +/// the behavior of a GraphQL subgraph, including the `_entities` field. When +/// the path gets to [`FetchNode::response_at_path`], it will be amended and +/// appended to a parent path to create the full path to the field. For ex: +/// +/// - parent path: `["posts", @, "user"] +/// - path from key: `["_entities", 0, "user", "profile"]` +/// - result: `["posts", 1, "user", "profile"]` +impl From<&ResponseKey> for Path { + fn from(key: &ResponseKey) -> Self { + match key { + ResponseKey::RootField { name, .. } => { + Path::from_iter(vec![PathElement::Key(name.to_string(), None)]) + } + ResponseKey::Entity { index, .. } => Path::from_iter(vec![ + PathElement::Key("_entities".to_string(), None), + PathElement::Index(*index), + ]), + ResponseKey::EntityField { + index, field_name, .. + } => Path::from_iter(vec![ + PathElement::Key("_entities".to_string(), None), + PathElement::Index(*index), + PathElement::Key(field_name.clone(), None), + ]), + } + } +} + +pub(crate) fn make_requests( + request: connect::Request, + connector: &Connector, + debug: &Option>>, +) -> Result, MakeRequestError> { + let request_params = match connector.entity_resolver { + Some(EntityResolver::Explicit) => entities_from_request(connector, &request), + Some(EntityResolver::Implicit) => entities_with_fields_from_request(connector, &request), + None => root_fields(connector, &request), + }?; + + request_params_to_requests(connector, request_params, &request, debug) +} + +fn request_params_to_requests( + connector: &Connector, + request_params: Vec, + original_request: &connect::Request, + debug: &Option>>, +) -> Result, MakeRequestError> { + let mut results = vec![]; + for response_key in request_params { + let (request, debug_request) = make_request( + &connector.transport, + response_key.inputs().merge( + &connector.request_variables, + connector.config.as_ref(), + &original_request.context, + None, + ), + original_request, + debug, + )?; + + results.push(Request { + request, + key: response_key, + debug_request, + }); + } + + Ok(results) +} + +// --- ERRORS ------------------------------------------------------------------ + +#[derive(Debug, thiserror::Error, displaydoc::Display)] +pub(crate) enum MakeRequestError { + /// Invalid request operation: {0} + InvalidOperation(String), + + /// Unsupported request operation: {0} + UnsupportedOperation(String), + + /// Invalid request arguments: {0} + InvalidArguments(String), + + /// Invalid entity representation: {0} + InvalidRepresentations(String), + + /// Cannot create HTTP request: {0} + TransportError(#[from] HttpJsonTransportError), +} + +// --- ROOT FIELDS ------------------------------------------------------------- + +/// Given a query, find the root fields and return a list of requests. +/// The connector subgraph must have only a single root field, but it could be +/// used multiple times with aliases. +/// +/// Root fields exist in the supergraph schema so we can parse the operation +/// using the schema. (This isn't true for _entities operations.) +/// +/// Example: +/// ```graphql +/// type Query { +/// foo(bar: String): Foo @connect(...) +/// } +/// ``` +/// ```graphql +/// { +/// a: foo(bar: "a") # one request +/// b: foo(bar: "b") # another request +/// } +/// ``` +fn root_fields( + connector: &Connector, + request: &connect::Request, +) -> Result, MakeRequestError> { + use MakeRequestError::*; + + let op = request + .operation + .operations + .get(None) + .map_err(|_| InvalidOperation("no operation document".into()))?; + + op.selection_set + .selections + .iter() + .map(|s| match s { + Selection::Field(field) => { + let response_name = field + .alias + .as_ref() + .unwrap_or_else(|| &field.name) + .to_string(); + + let args = graphql_utils::field_arguments_map(field, &request.variables.variables) + .map_err(|_| { + InvalidArguments("cannot get inputs from field arguments".into()) + })?; + + let request_inputs = RequestInputs { + args, + this: Default::default(), + }; + + let response_key = ResponseKey::RootField { + name: response_name, + selection: Arc::new( + connector + .selection + .apply_selection_set(&request.operation, &field.selection_set), + ), + inputs: request_inputs, + }; + + Ok(response_key) + } + + // The query planner removes fragments at the root so we don't have + // to worry these branches + Selection::FragmentSpread(_) | Selection::InlineFragment(_) => { + Err(UnsupportedOperation( + "top-level fragments in query planner nodes should not happen".into(), + )) + } + }) + .collect::, MakeRequestError>>() +} + +// --- ENTITIES ---------------------------------------------------------------- + +/// Connectors marked with `entity: true` can be used as entity resolvers, +/// (resolving `_entities` queries) or regular root fields. For now we'll check +/// the existence of the `representations` variable to determine which use case +/// is relevant here. +/// +/// If it's an entity resolver, we create separate requests for each item in the +/// representations array. +/// +/// ```json +/// { +/// "variables": { +/// "representations": [{ "__typename": "User", "id": "1" }] +/// } +/// } +/// ``` +/// +/// Returns a list of request inputs and the response key (index in the array). +fn entities_from_request( + connector: &Connector, + request: &connect::Request, +) -> Result, MakeRequestError> { + use MakeRequestError::*; + + let Some(representations) = request.variables.variables.get(REPRESENTATIONS_VAR) else { + return root_fields(connector, request); + }; + + let op = request + .operation + .operations + .get(None) + .map_err(|_| InvalidOperation("no operation document".into()))?; + + let (entities_field, _) = graphql_utils::get_entity_fields(&request.operation, op)?; + + let selection = Arc::new( + connector + .selection + .apply_selection_set(&request.operation, &entities_field.selection_set), + ); + + representations + .as_array() + .ok_or_else(|| InvalidRepresentations("representations is not an array".into()))? + .iter() + .enumerate() + .map(|(i, rep)| { + let request_inputs = RequestInputs { + args: rep + .as_object() + .ok_or_else(|| { + InvalidRepresentations("representation is not an object".into()) + })? + .clone(), + // entity connectors are always on Query fields, so they cannot use + // sibling fields with $this + this: Default::default(), + }; + + Ok(ResponseKey::Entity { + index: i, + selection: selection.clone(), + inputs: request_inputs, + }) + }) + .collect::, _>>() +} + +// --- ENTITY FIELDS ----------------------------------------------------------- + +/// This is effectively the combination of the other two functions: +/// +/// * It makes a request for each item in the `representations` array. +/// * If the connector field is aliased, it makes a request for each alias. +/// +/// So it can return N (representations) x M (aliases) requests. +/// +/// ```json +/// { +/// "query": "{ _entities(representations: $representations) { ... on User { name } } }", +/// "variables": { "representations": [{ "__typename": "User", "id": "1" }] } +/// } +/// ``` +/// +/// Return a list of request inputs with the response key (index in list and +/// name/alias of field) for each. +fn entities_with_fields_from_request( + connector: &Connector, + request: &connect::Request, +) -> Result, MakeRequestError> { + use MakeRequestError::*; + + let op = request + .operation + .operations + .get(None) + .map_err(|_| InvalidOperation("no operation document".into()))?; + + let (entities_field, typename_requested) = + graphql_utils::get_entity_fields(&request.operation, op)?; + + let types_and_fields = entities_field + .selection_set + .selections + .iter() + .map(|selection| match selection { + Selection::Field(_) => Ok::<_, MakeRequestError>(vec![]), + + Selection::FragmentSpread(f) => { + let Some(frag) = f.fragment_def(&request.operation) else { + return Err(InvalidOperation(format!( + "invalid operation: fragment `{}` missing", + f.fragment_name + ))); + }; + let typename = frag.type_condition(); + Ok(frag + .selection_set + .selections + .iter() + .filter_map(|sel| { + let field = match sel { + Selection::Field(f) => { + if f.name == TYPENAME { + None + } else { + Some(f) + } + } + Selection::FragmentSpread(_) | Selection::InlineFragment(_) => { + return Some(Err(InvalidOperation( + "handling fragments inside entity selections not implemented" + .into(), + ))) + } + }; + field.map(|f| Ok((typename, f))) + }) + .collect::, _>>()?) + } + + Selection::InlineFragment(frag) => { + let typename = frag + .type_condition + .as_ref() + .ok_or_else(|| InvalidOperation("missing type condition".into()))?; + Ok(frag + .selection_set + .selections + .iter() + .filter_map(|sel| { + let field = match sel { + Selection::Field(f) => { + if f.name == TYPENAME { + None + } else { + Some(f) + } + } + Selection::FragmentSpread(_) | Selection::InlineFragment(_) => { + return Some(Err(InvalidOperation( + "handling fragments inside entity selections not implemented" + .into(), + ))); + } + }; + field.map(|f| Ok((typename, f))) + }) + .collect::, _>>()?) + } + }) + .collect::, _>>()?; + + let representations = request + .variables + .variables + .get(REPRESENTATIONS_VAR) + .ok_or_else(|| InvalidRepresentations("missing representations variable".into()))? + .as_array() + .ok_or_else(|| InvalidRepresentations("representations is not an array".into()))? + .iter() + .enumerate() + .collect::>(); + + // if we have multiple fields (because of aliases, we'll flatten that list) + // and generate requests for each field/representation pair + types_and_fields + .into_iter() + .flatten() + .flat_map(|(typename, field)| { + let selection = Arc::new( + connector + .selection + .apply_selection_set(&request.operation, &field.selection_set), + ); + + representations.iter().map(move |(i, representation)| { + let args = graphql_utils::field_arguments_map(field, &request.variables.variables) + .map_err(|_| { + InvalidArguments("cannot build inputs from field arguments".into()) + })?; + + let response_name = field + .alias + .as_ref() + .unwrap_or_else(|| &field.name) + .to_string(); + + let request_inputs = RequestInputs { + args, + this: representation + .as_object() + .ok_or_else(|| { + InvalidRepresentations("representation is not an object".into()) + })? + .clone(), + }; + Ok::<_, MakeRequestError>(ResponseKey::EntityField { + index: *i, + field_name: response_name.to_string(), + // if the fetch node operation doesn't include __typename, then + // we're assuming this is for an interface object and we don't want + // to include a __typename in the response. + // + // TODO: is this fragile? should we just check the output + // type of the field and omit the typename if it's abstract? + typename: typename_requested.then_some(typename.clone()), + selection: selection.clone(), + inputs: request_inputs, + }) + }) + }) + .collect::, _>>() +} + +#[cfg(test)] +mod tests { + use std::sync::Arc; + + use apollo_compiler::name; + use apollo_compiler::ExecutableDocument; + use apollo_compiler::Schema; + use apollo_federation::sources::connect::ConnectId; + use apollo_federation::sources::connect::ConnectSpec; + use apollo_federation::sources::connect::Connector; + use apollo_federation::sources::connect::HTTPMethod; + use apollo_federation::sources::connect::HttpJsonTransport; + use apollo_federation::sources::connect::JSONSelection; + use insta::assert_debug_snapshot; + use url::Url; + + use crate::graphql; + use crate::query_planner::fetch::Variables; + use crate::Context; + + #[test] + fn test_root_fields_simple() { + let schema = Arc::new( + Schema::parse_and_validate("type Query { a: A } type A { f: String }", "./").unwrap(), + ); + + let req = crate::services::connect::Request::builder() + .service_name("subgraph_Query_a_0".into()) + .context(Context::default()) + .operation(Arc::new( + ExecutableDocument::parse_and_validate( + &schema, + "query { a { f } a2: a { f2: f } }".to_string(), + "./", + ) + .unwrap(), + )) + .variables(Variables { + variables: Default::default(), + inverted_paths: Default::default(), + contextual_arguments: Default::default(), + }) + .supergraph_request(Arc::new( + http::Request::builder() + .body(graphql::Request::builder().build()) + .unwrap(), + )) + .build(); + + let connector = Connector { + spec: ConnectSpec::V0_1, + id: ConnectId::new( + "subgraph_name".into(), + None, + name!(Query), + name!(a), + 0, + "test label", + ), + transport: HttpJsonTransport { + source_url: Some(Url::parse("http://localhost/api").unwrap()), + connect_template: "/path".parse().unwrap(), + method: HTTPMethod::Get, + headers: Default::default(), + body: Default::default(), + }, + selection: JSONSelection::parse("f").unwrap(), + entity_resolver: None, + config: Default::default(), + max_requests: None, + request_variables: Default::default(), + response_variables: Default::default(), + }; + + assert_debug_snapshot!(super::root_fields(&connector, &req), @r###" + Ok( + [ + RootField { + name: "a", + selection: Named( + SubSelection { + selections: [ + Field( + None, + WithRange { + node: Field( + "f", + ), + range: Some( + 0..1, + ), + }, + None, + ), + ], + range: Some( + 0..1, + ), + }, + ), + inputs: RequestInputs { + args: {}, + this: {}, + }, + }, + RootField { + name: "a2", + selection: Named( + SubSelection { + selections: [ + Field( + Some( + Alias { + name: WithRange { + node: Field( + "f2", + ), + range: None, + }, + range: None, + }, + ), + WithRange { + node: Field( + "f", + ), + range: Some( + 0..1, + ), + }, + None, + ), + ], + range: Some( + 0..1, + ), + }, + ), + inputs: RequestInputs { + args: {}, + this: {}, + }, + }, + ], + ) + "###); + } + + #[test] + fn test_root_fields_inputs() { + let schema = Arc::new( + Schema::parse_and_validate("type Query {b(var: String): String}", "./").unwrap(), + ); + + let req = crate::services::connect::Request::builder() + .service_name("subgraph_Query_b_0".into()) + .context(Context::default()) + .operation(Arc::new( + ExecutableDocument::parse_and_validate( + &schema, + "query($var: String) { b(var: \"inline\") b2: b(var: $var) }".to_string(), + "./", + ) + .unwrap(), + )) + .variables(Variables { + variables: serde_json_bytes::json!({ "var": "variable" }) + .as_object() + .unwrap() + .clone(), + inverted_paths: Default::default(), + contextual_arguments: Default::default(), + }) + .supergraph_request(Arc::new( + http::Request::builder() + .body(graphql::Request::builder().build()) + .unwrap(), + )) + .build(); + + let connector = Connector { + spec: ConnectSpec::V0_1, + id: ConnectId::new( + "subgraph_name".into(), + None, + name!(Query), + name!(b), + 0, + "test label", + ), + transport: HttpJsonTransport { + source_url: Some(Url::parse("http://localhost/api").unwrap()), + connect_template: "/path".parse().unwrap(), + method: HTTPMethod::Get, + headers: Default::default(), + body: Default::default(), + }, + selection: JSONSelection::parse("$").unwrap(), + entity_resolver: None, + config: Default::default(), + max_requests: None, + request_variables: Default::default(), + response_variables: Default::default(), + }; + + assert_debug_snapshot!(super::root_fields(&connector, &req), @r###" + Ok( + [ + RootField { + name: "b", + selection: Path( + PathSelection { + path: WithRange { + node: Var( + WithRange { + node: $, + range: Some( + 0..1, + ), + }, + WithRange { + node: Empty, + range: Some( + 1..1, + ), + }, + ), + range: Some( + 0..1, + ), + }, + }, + ), + inputs: RequestInputs { + args: { + "var": String( + "inline", + ), + }, + this: {}, + }, + }, + RootField { + name: "b2", + selection: Path( + PathSelection { + path: WithRange { + node: Var( + WithRange { + node: $, + range: Some( + 0..1, + ), + }, + WithRange { + node: Empty, + range: Some( + 1..1, + ), + }, + ), + range: Some( + 0..1, + ), + }, + }, + ), + inputs: RequestInputs { + args: { + "var": String( + "variable", + ), + }, + this: {}, + }, + }, + ], + ) + "###); + } + + #[test] + fn test_root_fields_input_types() { + let schema = Arc::new(Schema::parse_and_validate( + r#" + scalar JSON + type Query { + c(var1: Int, var2: Boolean, var3: Float, var4: ID, var5: JSON, var6: [String], var7: String): String + } + "#, + "./", + ).unwrap()); + + let req = crate::services::connect::Request::builder() + .service_name("subgraph_Query_c_0".into()) + .context(Context::default()) + .operation(Arc::new( + ExecutableDocument::parse_and_validate( + &schema, + r#" + query( + $var1: Int, $var2: Boolean, $var3: Float, $var4: ID, $var5: JSON, $var6: [String], $var7: String + ) { + c(var1: $var1, var2: $var2, var3: $var3, var4: $var4, var5: $var5, var6: $var6, var7: $var7) + c2: c( + var1: 1, + var2: true, + var3: 0.9, + var4: "123", + var5: { a: 42 }, + var6: ["item"], + var7: null + ) + } + "#.to_string(), + "./", + ) + .unwrap(), + ) + ) + .variables(Variables { + variables: serde_json_bytes::json!({ + "var1": 1, "var2": true, "var3": 0.9, + "var4": "123", "var5": { "a": 42 }, "var6": ["item"], + "var7": null + }) + .as_object() + .unwrap() + .clone(), + inverted_paths: Default::default(), + contextual_arguments: Default::default(), + }) + .supergraph_request(Arc::new( + http::Request::builder() + .body(graphql::Request::builder().build()) + .unwrap(), + )) + .build(); + + let connector = Connector { + spec: ConnectSpec::V0_1, + id: ConnectId::new( + "subgraph_name".into(), + None, + name!(Query), + name!(c), + 0, + "test label", + ), + transport: HttpJsonTransport { + source_url: Some(Url::parse("http://localhost/api").unwrap()), + connect_template: "/path".parse().unwrap(), + method: HTTPMethod::Get, + headers: Default::default(), + body: Default::default(), + }, + selection: JSONSelection::parse("$.data").unwrap(), + entity_resolver: None, + config: Default::default(), + max_requests: None, + request_variables: Default::default(), + response_variables: Default::default(), + }; + + assert_debug_snapshot!(super::root_fields(&connector, &req), @r###" + Ok( + [ + RootField { + name: "c", + selection: Path( + PathSelection { + path: WithRange { + node: Var( + WithRange { + node: $, + range: Some( + 0..1, + ), + }, + WithRange { + node: Key( + WithRange { + node: Field( + "data", + ), + range: Some( + 2..6, + ), + }, + WithRange { + node: Empty, + range: Some( + 6..6, + ), + }, + ), + range: Some( + 1..6, + ), + }, + ), + range: Some( + 0..6, + ), + }, + }, + ), + inputs: RequestInputs { + args: { + "var1": Number(1), + "var2": Bool( + true, + ), + "var3": Number(0.9), + "var4": String( + "123", + ), + "var5": Object({ + "a": Number(42), + }), + "var6": Array([ + String( + "item", + ), + ]), + "var7": Null, + }, + this: {}, + }, + }, + RootField { + name: "c2", + selection: Path( + PathSelection { + path: WithRange { + node: Var( + WithRange { + node: $, + range: Some( + 0..1, + ), + }, + WithRange { + node: Key( + WithRange { + node: Field( + "data", + ), + range: Some( + 2..6, + ), + }, + WithRange { + node: Empty, + range: Some( + 6..6, + ), + }, + ), + range: Some( + 1..6, + ), + }, + ), + range: Some( + 0..6, + ), + }, + }, + ), + inputs: RequestInputs { + args: { + "var1": Number(1), + "var2": Bool( + true, + ), + "var3": Number(0.9), + "var4": String( + "123", + ), + "var5": Object({ + "a": Number(42), + }), + "var6": Array([ + String( + "item", + ), + ]), + "var7": Null, + }, + this: {}, + }, + }, + ], + ) + "###); + } + + #[test] + fn entities_from_request_entity() { + let partial_sdl = r#" + type Query { + entity(id: ID!): Entity + } + + type Entity { + field: String + } + "#; + + let subgraph_schema = Arc::new( + Schema::parse_and_validate( + format!( + r#"{partial_sdl} + extend type Query {{ + _entities(representations: [_Any!]!): _Entity + }} + scalar _Any + union _Entity = Entity + "# + ), + "./", + ) + .unwrap(), + ); + + let req = crate::services::connect::Request::builder() + .service_name("subgraph_Query_entity_0".into()) + .context(Context::default()) + .operation(Arc::new( + ExecutableDocument::parse_and_validate( + &subgraph_schema, + r#" + query($representations: [_Any!]!) { + _entities(representations: $representations) { + __typename + ... on Entity { + field + alias: field + } + } + } + "# + .to_string(), + "./", + ) + .unwrap(), + )) + .variables(Variables { + variables: serde_json_bytes::json!({ + "representations": [ + { "__typename": "Entity", "id": "1" }, + { "__typename": "Entity", "id": "2" }, + ] + }) + .as_object() + .unwrap() + .clone(), + inverted_paths: Default::default(), + contextual_arguments: Default::default(), + }) + .supergraph_request(Arc::new( + http::Request::builder() + .body(graphql::Request::builder().build()) + .unwrap(), + )) + .build(); + + let connector = Connector { + spec: ConnectSpec::V0_1, + id: ConnectId::new( + "subgraph_name".into(), + None, + name!(Query), + name!(entity), + 0, + "test label", + ), + transport: HttpJsonTransport { + source_url: Some(Url::parse("http://localhost/api").unwrap()), + connect_template: "/path".parse().unwrap(), + method: HTTPMethod::Get, + headers: Default::default(), + body: Default::default(), + }, + selection: JSONSelection::parse("field").unwrap(), + entity_resolver: Some(super::EntityResolver::Explicit), + config: Default::default(), + max_requests: None, + request_variables: Default::default(), + response_variables: Default::default(), + }; + + assert_debug_snapshot!(super::entities_from_request(&connector, &req).unwrap(), @r###" + [ + Entity { + index: 0, + selection: Named( + SubSelection { + selections: [ + Path { + alias: Some( + Alias { + name: WithRange { + node: Field( + "__typename", + ), + range: None, + }, + range: None, + }, + ), + inline: false, + path: PathSelection { + path: WithRange { + node: Var( + WithRange { + node: $, + range: None, + }, + WithRange { + node: Method( + WithRange { + node: "echo", + range: None, + }, + Some( + MethodArgs { + args: [ + WithRange { + node: String( + "_Entity", + ), + range: None, + }, + ], + range: None, + }, + ), + WithRange { + node: Empty, + range: None, + }, + ), + range: None, + }, + ), + range: None, + }, + }, + }, + Field( + None, + WithRange { + node: Field( + "field", + ), + range: Some( + 0..5, + ), + }, + None, + ), + Field( + Some( + Alias { + name: WithRange { + node: Field( + "alias", + ), + range: None, + }, + range: None, + }, + ), + WithRange { + node: Field( + "field", + ), + range: Some( + 0..5, + ), + }, + None, + ), + ], + range: Some( + 0..5, + ), + }, + ), + inputs: RequestInputs { + args: { + "__typename": String( + "Entity", + ), + "id": String( + "1", + ), + }, + this: {}, + }, + }, + Entity { + index: 1, + selection: Named( + SubSelection { + selections: [ + Path { + alias: Some( + Alias { + name: WithRange { + node: Field( + "__typename", + ), + range: None, + }, + range: None, + }, + ), + inline: false, + path: PathSelection { + path: WithRange { + node: Var( + WithRange { + node: $, + range: None, + }, + WithRange { + node: Method( + WithRange { + node: "echo", + range: None, + }, + Some( + MethodArgs { + args: [ + WithRange { + node: String( + "_Entity", + ), + range: None, + }, + ], + range: None, + }, + ), + WithRange { + node: Empty, + range: None, + }, + ), + range: None, + }, + ), + range: None, + }, + }, + }, + Field( + None, + WithRange { + node: Field( + "field", + ), + range: Some( + 0..5, + ), + }, + None, + ), + Field( + Some( + Alias { + name: WithRange { + node: Field( + "alias", + ), + range: None, + }, + range: None, + }, + ), + WithRange { + node: Field( + "field", + ), + range: Some( + 0..5, + ), + }, + None, + ), + ], + range: Some( + 0..5, + ), + }, + ), + inputs: RequestInputs { + args: { + "__typename": String( + "Entity", + ), + "id": String( + "2", + ), + }, + this: {}, + }, + }, + ] + "###); + } + + #[test] + fn entities_from_request_entity_with_fragment() { + let partial_sdl = r#" + type Query { + entity(id: ID!): Entity + } + + type Entity { + field: String + } + "#; + + let subgraph_schema = Arc::new( + Schema::parse_and_validate( + format!( + r#"{partial_sdl} + extend type Query {{ + _entities(representations: [_Any!]!): _Entity + }} + scalar _Any + union _Entity = Entity + "# + ), + "./", + ) + .unwrap(), + ); + + let req = crate::services::connect::Request::builder() + .service_name("subgraph_Query_entity_0".into()) + .context(Context::default()) + .operation(Arc::new( + ExecutableDocument::parse_and_validate( + &subgraph_schema, + r#" + query($representations: [_Any!]!) { + _entities(representations: $representations) { + ... _generated_Entity + } + } + fragment _generated_Entity on Entity { + __typename + field + alias: field + } + "# + .to_string(), + "./", + ) + .unwrap(), + )) + .variables(Variables { + variables: serde_json_bytes::json!({ + "representations": [ + { "__typename": "Entity", "id": "1" }, + { "__typename": "Entity", "id": "2" }, + ] + }) + .as_object() + .unwrap() + .clone(), + inverted_paths: Default::default(), + contextual_arguments: Default::default(), + }) + .supergraph_request(Arc::new( + http::Request::builder() + .body(graphql::Request::builder().build()) + .unwrap(), + )) + .build(); + + let connector = Connector { + spec: ConnectSpec::V0_1, + id: ConnectId::new( + "subgraph_name".into(), + None, + name!(Query), + name!(entity), + 0, + "test label", + ), + transport: HttpJsonTransport { + source_url: Some(Url::parse("http://localhost/api").unwrap()), + connect_template: "/path".parse().unwrap(), + method: HTTPMethod::Get, + headers: Default::default(), + body: Default::default(), + }, + selection: JSONSelection::parse("field").unwrap(), + entity_resolver: Some(super::EntityResolver::Explicit), + config: Default::default(), + max_requests: None, + request_variables: Default::default(), + response_variables: Default::default(), + }; + + assert_debug_snapshot!(super::entities_from_request(&connector, &req).unwrap(), @r###" + [ + Entity { + index: 0, + selection: Named( + SubSelection { + selections: [ + Path { + alias: Some( + Alias { + name: WithRange { + node: Field( + "__typename", + ), + range: None, + }, + range: None, + }, + ), + inline: false, + path: PathSelection { + path: WithRange { + node: Var( + WithRange { + node: $, + range: None, + }, + WithRange { + node: Method( + WithRange { + node: "echo", + range: None, + }, + Some( + MethodArgs { + args: [ + WithRange { + node: String( + "_Entity", + ), + range: None, + }, + ], + range: None, + }, + ), + WithRange { + node: Empty, + range: None, + }, + ), + range: None, + }, + ), + range: None, + }, + }, + }, + Field( + None, + WithRange { + node: Field( + "field", + ), + range: Some( + 0..5, + ), + }, + None, + ), + Field( + Some( + Alias { + name: WithRange { + node: Field( + "alias", + ), + range: None, + }, + range: None, + }, + ), + WithRange { + node: Field( + "field", + ), + range: Some( + 0..5, + ), + }, + None, + ), + ], + range: Some( + 0..5, + ), + }, + ), + inputs: RequestInputs { + args: { + "__typename": String( + "Entity", + ), + "id": String( + "1", + ), + }, + this: {}, + }, + }, + Entity { + index: 1, + selection: Named( + SubSelection { + selections: [ + Path { + alias: Some( + Alias { + name: WithRange { + node: Field( + "__typename", + ), + range: None, + }, + range: None, + }, + ), + inline: false, + path: PathSelection { + path: WithRange { + node: Var( + WithRange { + node: $, + range: None, + }, + WithRange { + node: Method( + WithRange { + node: "echo", + range: None, + }, + Some( + MethodArgs { + args: [ + WithRange { + node: String( + "_Entity", + ), + range: None, + }, + ], + range: None, + }, + ), + WithRange { + node: Empty, + range: None, + }, + ), + range: None, + }, + ), + range: None, + }, + }, + }, + Field( + None, + WithRange { + node: Field( + "field", + ), + range: Some( + 0..5, + ), + }, + None, + ), + Field( + Some( + Alias { + name: WithRange { + node: Field( + "alias", + ), + range: None, + }, + range: None, + }, + ), + WithRange { + node: Field( + "field", + ), + range: Some( + 0..5, + ), + }, + None, + ), + ], + range: Some( + 0..5, + ), + }, + ), + inputs: RequestInputs { + args: { + "__typename": String( + "Entity", + ), + "id": String( + "2", + ), + }, + this: {}, + }, + }, + ] + "###); + } + + #[test] + fn entities_from_request_root_field() { + let partial_sdl = r#" + type Query { + entity(id: ID!): Entity + } + + type Entity { + field: T + } + + type T { + field: String + } + "#; + let schema = Arc::new(Schema::parse_and_validate(partial_sdl, "./").unwrap()); + + let req = crate::services::connect::Request::builder() + .service_name("subgraph_Query_entity_0".into()) + .context(Context::default()) + .operation(Arc::new( + ExecutableDocument::parse_and_validate( + &schema, + r#" + query($a: ID!, $b: ID!) { + a: entity(id: $a) { field { field } } + b: entity(id: $b) { field { alias: field } } + } + "# + .to_string(), + "./", + ) + .unwrap(), + )) + .variables(Variables { + variables: serde_json_bytes::json!({ + "a": "1", + "b": "2" + }) + .as_object() + .unwrap() + .clone(), + inverted_paths: Default::default(), + contextual_arguments: Default::default(), + }) + .supergraph_request(Arc::new( + http::Request::builder() + .body(graphql::Request::builder().build()) + .unwrap(), + )) + .build(); + + let connector = Connector { + spec: ConnectSpec::V0_1, + id: ConnectId::new( + "subgraph_name".into(), + None, + name!(Query), + name!(entity), + 0, + "test label", + ), + transport: HttpJsonTransport { + source_url: Some(Url::parse("http://localhost/api").unwrap()), + connect_template: "/path".parse().unwrap(), + method: HTTPMethod::Get, + headers: Default::default(), + body: Default::default(), + }, + selection: JSONSelection::parse("field { field }").unwrap(), + entity_resolver: None, + config: Default::default(), + max_requests: None, + request_variables: Default::default(), + response_variables: Default::default(), + }; + + assert_debug_snapshot!(super::entities_from_request(&connector, &req).unwrap(), @r###" + [ + RootField { + name: "a", + selection: Named( + SubSelection { + selections: [ + Field( + None, + WithRange { + node: Field( + "field", + ), + range: Some( + 0..5, + ), + }, + Some( + SubSelection { + selections: [ + Field( + None, + WithRange { + node: Field( + "field", + ), + range: Some( + 8..13, + ), + }, + None, + ), + ], + range: Some( + 6..15, + ), + }, + ), + ), + ], + range: Some( + 0..15, + ), + }, + ), + inputs: RequestInputs { + args: { + "id": String( + "1", + ), + }, + this: {}, + }, + }, + RootField { + name: "b", + selection: Named( + SubSelection { + selections: [ + Field( + None, + WithRange { + node: Field( + "field", + ), + range: Some( + 0..5, + ), + }, + Some( + SubSelection { + selections: [ + Field( + Some( + Alias { + name: WithRange { + node: Field( + "alias", + ), + range: None, + }, + range: None, + }, + ), + WithRange { + node: Field( + "field", + ), + range: Some( + 8..13, + ), + }, + None, + ), + ], + range: Some( + 6..15, + ), + }, + ), + ), + ], + range: Some( + 0..15, + ), + }, + ), + inputs: RequestInputs { + args: { + "id": String( + "2", + ), + }, + this: {}, + }, + }, + ] + "###); + } + + #[test] + fn entities_with_fields_from_request() { + let partial_sdl = r#" + type Query { _: String } # just to make it valid + + type Entity { # @key(fields: "id") + id: ID! + field(foo: String): T + } + + type T { + selected: String + } + "#; + + let subgraph_schema = Arc::new( + Schema::parse_and_validate( + format!( + r#"{partial_sdl} + extend type Query {{ + _entities(representations: [_Any!]!): _Entity + }} + scalar _Any + union _Entity = Entity + "# + ), + "./", + ) + .unwrap(), + ); + + let req = crate::services::connect::Request::builder() + .service_name("subgraph_Entity_field_0".into()) + .context(Context::default()) + .operation(Arc::new( + ExecutableDocument::parse_and_validate( + &subgraph_schema, + r#" + query($representations: [_Any!]!, $bye: String) { + _entities(representations: $representations) { + __typename + ... on Entity { + field(foo: "hi") { selected } + alias: field(foo: $bye) { selected } + } + } + } + "# + .to_string(), + "./", + ) + .unwrap(), + )) + .variables(Variables { + variables: serde_json_bytes::json!({ + "representations": [ + { "__typename": "Entity", "id": "1" }, + { "__typename": "Entity", "id": "2" }, + ], + "bye": "bye" + }) + .as_object() + .unwrap() + .clone(), + inverted_paths: Default::default(), + contextual_arguments: Default::default(), + }) + .supergraph_request(Arc::new( + http::Request::builder() + .body(graphql::Request::builder().build()) + .unwrap(), + )) + .build(); + + let connector = Connector { + spec: ConnectSpec::V0_1, + id: ConnectId::new( + "subgraph_name".into(), + None, + name!(Entity), + name!(field), + 0, + "test label", + ), + transport: HttpJsonTransport { + source_url: Some(Url::parse("http://localhost/api").unwrap()), + connect_template: "/path".parse().unwrap(), + method: HTTPMethod::Get, + headers: Default::default(), + body: Default::default(), + }, + selection: JSONSelection::parse("selected").unwrap(), + entity_resolver: None, + config: Default::default(), + max_requests: None, + request_variables: Default::default(), + response_variables: Default::default(), + }; + + assert_debug_snapshot!(super::entities_with_fields_from_request(&connector, &req).unwrap(), @r###" + [ + EntityField { + index: 0, + field_name: "field", + typename: Some( + "Entity", + ), + selection: Named( + SubSelection { + selections: [ + Field( + None, + WithRange { + node: Field( + "selected", + ), + range: Some( + 0..8, + ), + }, + None, + ), + ], + range: Some( + 0..8, + ), + }, + ), + inputs: RequestInputs { + args: { + "foo": String( + "hi", + ), + }, + this: { + "__typename": String( + "Entity", + ), + "id": String( + "1", + ), + }, + }, + }, + EntityField { + index: 1, + field_name: "field", + typename: Some( + "Entity", + ), + selection: Named( + SubSelection { + selections: [ + Field( + None, + WithRange { + node: Field( + "selected", + ), + range: Some( + 0..8, + ), + }, + None, + ), + ], + range: Some( + 0..8, + ), + }, + ), + inputs: RequestInputs { + args: { + "foo": String( + "hi", + ), + }, + this: { + "__typename": String( + "Entity", + ), + "id": String( + "2", + ), + }, + }, + }, + EntityField { + index: 0, + field_name: "alias", + typename: Some( + "Entity", + ), + selection: Named( + SubSelection { + selections: [ + Field( + None, + WithRange { + node: Field( + "selected", + ), + range: Some( + 0..8, + ), + }, + None, + ), + ], + range: Some( + 0..8, + ), + }, + ), + inputs: RequestInputs { + args: { + "foo": String( + "bye", + ), + }, + this: { + "__typename": String( + "Entity", + ), + "id": String( + "1", + ), + }, + }, + }, + EntityField { + index: 1, + field_name: "alias", + typename: Some( + "Entity", + ), + selection: Named( + SubSelection { + selections: [ + Field( + None, + WithRange { + node: Field( + "selected", + ), + range: Some( + 0..8, + ), + }, + None, + ), + ], + range: Some( + 0..8, + ), + }, + ), + inputs: RequestInputs { + args: { + "foo": String( + "bye", + ), + }, + this: { + "__typename": String( + "Entity", + ), + "id": String( + "2", + ), + }, + }, + }, + ] + "###); + } + + #[test] + fn entities_with_fields_from_request_with_fragment() { + let partial_sdl = r#" + type Query { _: String } # just to make it valid + + type Entity { # @key(fields: "id") + id: ID! + field(foo: String): T + } + + type T { + selected: String + } + "#; + + let subgraph_schema = Arc::new( + Schema::parse_and_validate( + format!( + r#"{partial_sdl} + extend type Query {{ + _entities(representations: [_Any!]!): _Entity + }} + scalar _Any + union _Entity = Entity + "# + ), + "./", + ) + .unwrap(), + ); + + let req = crate::services::connect::Request::builder() + .service_name("subgraph_Entity_field_0".into()) + .context(Context::default()) + .operation(Arc::new( + ExecutableDocument::parse_and_validate( + &subgraph_schema, + r#" + query($representations: [_Any!]!, $bye: String) { + _entities(representations: $representations) { + ... _generated_Entity + } + } + fragment _generated_Entity on Entity { + __typename + field(foo: "hi") { selected } + alias: field(foo: $bye) { selected } + } + "# + .to_string(), + "./", + ) + .unwrap(), + )) + .variables(Variables { + variables: serde_json_bytes::json!({ + "representations": [ + { "__typename": "Entity", "id": "1" }, + { "__typename": "Entity", "id": "2" }, + ], + "bye": "bye" + }) + .as_object() + .unwrap() + .clone(), + inverted_paths: Default::default(), + contextual_arguments: Default::default(), + }) + .supergraph_request(Arc::new( + http::Request::builder() + .body(graphql::Request::builder().build()) + .unwrap(), + )) + .build(); + + let connector = Connector { + spec: ConnectSpec::V0_1, + id: ConnectId::new( + "subgraph_name".into(), + None, + name!(Entity), + name!(field), + 0, + "test label", + ), + transport: HttpJsonTransport { + source_url: Some(Url::parse("http://localhost/api").unwrap()), + connect_template: "/path".parse().unwrap(), + method: HTTPMethod::Get, + headers: Default::default(), + body: Default::default(), + }, + selection: JSONSelection::parse("selected").unwrap(), + entity_resolver: None, + config: Default::default(), + max_requests: None, + request_variables: Default::default(), + response_variables: Default::default(), + }; + + assert_debug_snapshot!(super::entities_with_fields_from_request(&connector, &req).unwrap(), @r###" + [ + EntityField { + index: 0, + field_name: "field", + typename: Some( + "Entity", + ), + selection: Named( + SubSelection { + selections: [ + Field( + None, + WithRange { + node: Field( + "selected", + ), + range: Some( + 0..8, + ), + }, + None, + ), + ], + range: Some( + 0..8, + ), + }, + ), + inputs: RequestInputs { + args: { + "foo": String( + "hi", + ), + }, + this: { + "__typename": String( + "Entity", + ), + "id": String( + "1", + ), + }, + }, + }, + EntityField { + index: 1, + field_name: "field", + typename: Some( + "Entity", + ), + selection: Named( + SubSelection { + selections: [ + Field( + None, + WithRange { + node: Field( + "selected", + ), + range: Some( + 0..8, + ), + }, + None, + ), + ], + range: Some( + 0..8, + ), + }, + ), + inputs: RequestInputs { + args: { + "foo": String( + "hi", + ), + }, + this: { + "__typename": String( + "Entity", + ), + "id": String( + "2", + ), + }, + }, + }, + EntityField { + index: 0, + field_name: "alias", + typename: Some( + "Entity", + ), + selection: Named( + SubSelection { + selections: [ + Field( + None, + WithRange { + node: Field( + "selected", + ), + range: Some( + 0..8, + ), + }, + None, + ), + ], + range: Some( + 0..8, + ), + }, + ), + inputs: RequestInputs { + args: { + "foo": String( + "bye", + ), + }, + this: { + "__typename": String( + "Entity", + ), + "id": String( + "1", + ), + }, + }, + }, + EntityField { + index: 1, + field_name: "alias", + typename: Some( + "Entity", + ), + selection: Named( + SubSelection { + selections: [ + Field( + None, + WithRange { + node: Field( + "selected", + ), + range: Some( + 0..8, + ), + }, + None, + ), + ], + range: Some( + 0..8, + ), + }, + ), + inputs: RequestInputs { + args: { + "foo": String( + "bye", + ), + }, + this: { + "__typename": String( + "Entity", + ), + "id": String( + "2", + ), + }, + }, + }, + ] + "###); + } + + #[test] + fn entities_with_fields_from_request_interface_object() { + let partial_sdl = r#" + type Query { _: String } # just to make it valid + + type Entity { # @interfaceObject @key(fields: "id") + id: ID! + field(foo: String): T + } + + type T { + selected: String + } + "#; + + let subgraph_schema = Arc::new( + Schema::parse_and_validate( + format!( + r#"{partial_sdl} + extend type Query {{ + _entities(representations: [_Any!]!): _Entity + }} + scalar _Any + union _Entity = Entity + "# + ), + "./", + ) + .unwrap(), + ); + + let req = crate::services::connect::Request::builder() + .service_name("subgraph_Entity_field_0".into()) + .context(Context::default()) + .operation(Arc::new( + ExecutableDocument::parse_and_validate( + &subgraph_schema, + r#" + query($representations: [_Any!]!, $foo: String) { + _entities(representations: $representations) { + ... on Entity { + field(foo: $foo) { selected } + } + } + } + "# + .to_string(), + "./", + ) + .unwrap(), + )) + .variables(Variables { + variables: serde_json_bytes::json!({ + "representations": [ + { "__typename": "Entity", "id": "1" }, + { "__typename": "Entity", "id": "2" }, + ], + "foo": "bar" + }) + .as_object() + .unwrap() + .clone(), + inverted_paths: Default::default(), + contextual_arguments: Default::default(), + }) + .supergraph_request(Arc::new( + http::Request::builder() + .body(graphql::Request::builder().build()) + .unwrap(), + )) + .build(); + + let connector = Connector { + spec: ConnectSpec::V0_1, + id: ConnectId::new( + "subgraph_name".into(), + None, + name!(Entity), + name!(field), + 0, + "test label", + ), + transport: HttpJsonTransport { + source_url: Some(Url::parse("http://localhost/api").unwrap()), + connect_template: "/path".parse().unwrap(), + method: HTTPMethod::Get, + headers: Default::default(), + body: Default::default(), + }, + selection: JSONSelection::parse("selected").unwrap(), + entity_resolver: None, + config: Default::default(), + max_requests: None, + request_variables: Default::default(), + response_variables: Default::default(), + }; + + assert_debug_snapshot!(super::entities_with_fields_from_request(&connector ,&req).unwrap(), @r###" + [ + EntityField { + index: 0, + field_name: "field", + typename: None, + selection: Named( + SubSelection { + selections: [ + Field( + None, + WithRange { + node: Field( + "selected", + ), + range: Some( + 0..8, + ), + }, + None, + ), + ], + range: Some( + 0..8, + ), + }, + ), + inputs: RequestInputs { + args: { + "foo": String( + "bar", + ), + }, + this: { + "__typename": String( + "Entity", + ), + "id": String( + "1", + ), + }, + }, + }, + EntityField { + index: 1, + field_name: "field", + typename: None, + selection: Named( + SubSelection { + selections: [ + Field( + None, + WithRange { + node: Field( + "selected", + ), + range: Some( + 0..8, + ), + }, + None, + ), + ], + range: Some( + 0..8, + ), + }, + ), + inputs: RequestInputs { + args: { + "foo": String( + "bar", + ), + }, + this: { + "__typename": String( + "Entity", + ), + "id": String( + "2", + ), + }, + }, + }, + ] + "###); + } + + #[test] + fn make_requests() { + let schema = Schema::parse_and_validate("type Query { hello: String }", "./").unwrap(); + + let req = crate::services::connect::Request::builder() + .service_name("subgraph_Query_a_0".into()) + .context(Context::default()) + .operation(Arc::new( + ExecutableDocument::parse_and_validate( + &schema, + "query { a: hello }".to_string(), + "./", + ) + .unwrap(), + )) + .variables(Variables { + variables: Default::default(), + inverted_paths: Default::default(), + contextual_arguments: Default::default(), + }) + .supergraph_request(Arc::new( + http::Request::builder() + .body(graphql::Request::builder().build()) + .unwrap(), + )) + .build(); + + let connector = Connector { + spec: ConnectSpec::V0_1, + id: ConnectId::new( + "subgraph_name".into(), + None, + name!(Query), + name!(users), + 0, + "test label", + ), + transport: HttpJsonTransport { + source_url: Some(Url::parse("http://localhost/api").unwrap()), + connect_template: "/path".parse().unwrap(), + method: HTTPMethod::Get, + headers: Default::default(), + body: Default::default(), + }, + selection: JSONSelection::parse("$.data").unwrap(), + entity_resolver: None, + config: Default::default(), + max_requests: None, + request_variables: Default::default(), + response_variables: Default::default(), + }; + + let requests: Vec<_> = super::make_requests(req, &connector, &None) + .unwrap() + .into_iter() + .map(|req| { + let (parts, _body) = req.request.into_parts(); + let new_req = + http::Request::from_parts(parts, http_body_util::Empty::::new()); + (new_req, req.key, req.debug_request) + }) + .collect(); + + assert_debug_snapshot!(requests, @r###" + [ + ( + Request { + method: GET, + uri: http://localhost/api/path, + version: HTTP/1.1, + headers: {}, + body: Empty, + }, + RootField { + name: "a", + selection: Path( + PathSelection { + path: WithRange { + node: Var( + WithRange { + node: $, + range: Some( + 0..1, + ), + }, + WithRange { + node: Key( + WithRange { + node: Field( + "data", + ), + range: Some( + 2..6, + ), + }, + WithRange { + node: Empty, + range: Some( + 6..6, + ), + }, + ), + range: Some( + 1..6, + ), + }, + ), + range: Some( + 0..6, + ), + }, + }, + ), + inputs: RequestInputs { + args: {}, + this: {}, + }, + }, + None, + ), + ] + "###); + } +} + +mod graphql_utils; diff --git a/apollo-router/src/plugins/connectors/make_requests/graphql_utils.rs b/apollo-router/src/plugins/connectors/make_requests/graphql_utils.rs new file mode 100644 index 0000000000..b955aad9a0 --- /dev/null +++ b/apollo-router/src/plugins/connectors/make_requests/graphql_utils.rs @@ -0,0 +1,149 @@ +use apollo_compiler::executable::Field; +use apollo_compiler::executable::Operation; +use apollo_compiler::executable::Selection; +use apollo_compiler::schema::Value; +use apollo_compiler::ExecutableDocument; +use apollo_compiler::Node; +use serde_json::Number; +use serde_json_bytes::ByteString; +use serde_json_bytes::Map; +use serde_json_bytes::Value as JSONValue; +use tower::BoxError; + +use super::MakeRequestError; +use super::ENTITIES; + +pub(super) fn field_arguments_map( + field: &Node, + variables: &Map, +) -> Result, BoxError> { + let mut arguments = Map::new(); + + for argument in field.arguments.iter() { + match &*argument.value { + apollo_compiler::schema::Value::Variable(name) => { + if let Some(value) = variables.get(name.as_str()) { + arguments.insert(argument.name.as_str(), value.clone()); + } + } + _ => { + arguments.insert( + argument.name.as_str(), + argument_value_to_json(&argument.value)?, + ); + } + } + } + + for argument_def in field.definition.arguments.iter() { + if let Some(value) = argument_def.default_value.as_ref() { + if !arguments.contains_key(argument_def.name.as_str()) { + arguments.insert( + argument_def.name.as_str(), + argument_value_to_json(value).map_err(|err| { + format!( + "failed to convert default value on {}({}:) to json: {}", + field.definition.name, argument_def.name, err + ) + })?, + ); + } + } + } + + Ok(arguments) +} + +pub(super) fn argument_value_to_json( + value: &apollo_compiler::ast::Value, +) -> Result { + match value { + Value::Null => Ok(JSONValue::Null), + Value::Enum(e) => Ok(JSONValue::String(e.as_str().into())), + Value::Variable(_) => Err(BoxError::from("variables not supported")), + Value::String(s) => Ok(JSONValue::String(s.as_str().into())), + Value::Float(f) => Ok(JSONValue::Number( + Number::from_f64( + f.try_to_f64() + .map_err(|_| BoxError::from("try_to_f64 failed"))?, + ) + .ok_or_else(|| BoxError::from("Number::from_f64 failed"))?, + )), + Value::Int(i) => Ok(JSONValue::Number(Number::from( + i.try_to_i32().map_err(|_| "invalid int")?, + ))), + Value::Boolean(b) => Ok(JSONValue::Bool(*b)), + Value::List(l) => Ok(JSONValue::Array( + l.iter() + .map(|v| argument_value_to_json(v)) + .collect::, _>>()?, + )), + Value::Object(o) => Ok(JSONValue::Object( + o.iter() + .map(|(k, v)| argument_value_to_json(v).map(|v| (k.as_str().into(), v))) + .collect::, _>>()?, + )), + } +} + +/// Looks for _entities near the root of the operation. Also looks for +/// __typename within the _entities selection — if it was selected, then we +/// don't have a interfaceObject query. +pub(super) fn get_entity_fields<'a>( + document: &'a ExecutableDocument, + op: &'a Node, +) -> Result<(&'a Node, bool), MakeRequestError> { + use MakeRequestError::*; + + let root_field = op + .selection_set + .selections + .iter() + .find_map(|s| match s { + Selection::Field(f) if f.name == ENTITIES => Some(f), + _ => None, + }) + .ok_or_else(|| InvalidOperation("missing entities root field".into()))?; + + let mut typename_requested = false; + + for selection in root_field.selection_set.selections.iter() { + match selection { + Selection::Field(f) => { + if f.name == "__typename" { + typename_requested = true; + } + } + Selection::FragmentSpread(f) => { + let fragment = document + .fragments + .get(f.fragment_name.as_str()) + .ok_or_else(|| InvalidOperation("missing fragment".into()))?; + for selection in fragment.selection_set.selections.iter() { + match selection { + Selection::Field(f) => { + if f.name == "__typename" { + typename_requested = true; + } + } + Selection::FragmentSpread(_) | Selection::InlineFragment(_) => {} + } + } + } + Selection::InlineFragment(f) => { + for selection in f.selection_set.selections.iter() { + match selection { + Selection::Field(f) => { + if f.name == "__typename" { + typename_requested = true; + } + } + Selection::FragmentSpread(_) | Selection::InlineFragment(_) => {} + } + } + } + } + } + + Ok((root_field, typename_requested)) +} diff --git a/apollo-router/src/plugins/connectors/mod.rs b/apollo-router/src/plugins/connectors/mod.rs new file mode 100644 index 0000000000..f7b5a81cf3 --- /dev/null +++ b/apollo-router/src/plugins/connectors/mod.rs @@ -0,0 +1,14 @@ +pub(crate) mod configuration; +pub(crate) mod error; +mod form_encoding; +pub(crate) mod handle_responses; +pub(crate) mod http; +pub(crate) mod http_json_transport; +pub(crate) mod make_requests; +pub(crate) mod plugin; +pub(crate) mod query_plans; +pub(crate) mod request_limit; +pub(crate) mod tracing; + +#[cfg(test)] +pub(crate) mod tests; diff --git a/apollo-router/src/plugins/connectors/plugin.rs b/apollo-router/src/plugins/connectors/plugin.rs new file mode 100644 index 0000000000..1dcb0809d3 --- /dev/null +++ b/apollo-router/src/plugins/connectors/plugin.rs @@ -0,0 +1,184 @@ +pub(crate) mod debug; + +use std::sync::atomic::AtomicBool; +use std::sync::atomic::Ordering; +use std::sync::Arc; + +use debug::ConnectorContext; +use futures::StreamExt; +use http::HeaderValue; +use itertools::Itertools; +use parking_lot::Mutex; +use serde_json_bytes::json; +use tower::BoxError; +use tower::ServiceBuilder; +use tower::ServiceExt as TowerServiceExt; + +use super::query_plans::get_connectors; +use crate::layers::ServiceExt; +use crate::plugin::Plugin; +use crate::plugin::PluginInit; +use crate::plugins::connectors::configuration::ConnectorsConfig; +use crate::plugins::connectors::request_limit::RequestLimits; +use crate::register_plugin; +use crate::services::connector_service::ConnectorSourceRef; +use crate::services::execution; +use crate::services::supergraph; + +const CONNECTORS_DEBUG_HEADER_NAME: &str = "Apollo-Connectors-Debugging"; +const CONNECTORS_DEBUG_ENV: &str = "APOLLO_CONNECTORS_DEBUGGING"; +const CONNECTORS_DEBUG_KEY: &str = "apolloConnectorsDebugging"; +const CONNECTORS_MAX_REQUESTS_ENV: &str = "APOLLO_CONNECTORS_MAX_REQUESTS_PER_OPERATION"; +const CONNECTOR_SOURCES_IN_QUERY_PLAN: &str = "apollo_connectors::sources_in_query_plan"; + +static LAST_DEBUG_ENABLED_VALUE: AtomicBool = AtomicBool::new(false); + +#[derive(Debug, Clone)] +struct Connectors { + debug_extensions: bool, + max_requests: Option, + expose_sources_in_context: bool, +} + +#[async_trait::async_trait] +impl Plugin for Connectors { + type Config = ConnectorsConfig; + + async fn new(init: PluginInit) -> Result { + let debug_extensions = init.config.debug_extensions + || std::env::var(CONNECTORS_DEBUG_ENV).as_deref() == Ok("true"); + + let last_value = LAST_DEBUG_ENABLED_VALUE.load(Ordering::Relaxed); + let swap_result = LAST_DEBUG_ENABLED_VALUE.compare_exchange( + last_value, + debug_extensions, + Ordering::Relaxed, + Ordering::Relaxed, + ); + // Ok means we swapped value, inner value is old value. Ok(false) means we went false -> true + if matches!(swap_result, Ok(false)) { + tracing::warn!( + "Connector debugging is enabled, this may expose sensitive information." + ); + } + + let max_requests = init + .config + .max_requests_per_operation_per_source + .or(std::env::var(CONNECTORS_MAX_REQUESTS_ENV) + .ok() + .and_then(|v| v.parse().ok())); + + Ok(Connectors { + debug_extensions, + max_requests, + expose_sources_in_context: init.config.expose_sources_in_context, + }) + } + + fn supergraph_service(&self, service: supergraph::BoxService) -> supergraph::BoxService { + let conf_enabled = self.debug_extensions; + let max_requests = self.max_requests; + service + .map_future_with_request_data( + move |req: &supergraph::Request| { + let is_debug_enabled = conf_enabled + && req + .supergraph_request + .headers() + .get(CONNECTORS_DEBUG_HEADER_NAME) + == Some(&HeaderValue::from_static("true")); + + req.context.extensions().with_lock(|mut lock| { + lock.insert::>(Arc::new(RequestLimits::new( + max_requests, + ))); + if is_debug_enabled { + lock.insert::>>(Arc::new(Mutex::new( + ConnectorContext::default(), + ))); + } + }); + + is_debug_enabled + }, + move |is_debug_enabled: bool, f| async move { + let mut res: supergraph::ServiceResult = f.await; + + res = match res { + Ok(mut res) => { + res.context.extensions().with_lock(|mut lock| { + if let Some(limits) = lock.remove::>() { + limits.log(); + } + }); + if is_debug_enabled { + if let Some(debug) = res.context.extensions().with_lock(|lock| { + lock.get::>>().cloned() + }) { + let (parts, stream) = res.response.into_parts(); + + let stream = stream.map(move |mut chunk| { + let serialized = { &debug.lock().clone().serialize() }; + chunk.extensions.insert( + CONNECTORS_DEBUG_KEY, + json!({"version": "1", "data": serialized }), + ); + chunk + }); + + res.response = + http::Response::from_parts(parts, Box::pin(stream)); + } + } + + Ok(res) + } + Err(err) => Err(err), + }; + + res + }, + ) + .boxed() + } + + fn execution_service(&self, service: execution::BoxService) -> execution::BoxService { + if !self.expose_sources_in_context { + return service; + } + + ServiceBuilder::new() + .map_request(|req: execution::Request| { + let Some(connectors) = get_connectors(&req.context) else { + return req; + }; + + // add [{"subgraph_name": "", "source_name": ""}] to the context + // for connectors with sources in the query plan. + let list = req + .query_plan + .root + .service_usage_set() + .into_iter() + .flat_map(|service_name| { + connectors + .get(service_name) + .map(|connector| ConnectorSourceRef::try_from(connector).ok()) + }) + .unique() + .collect_vec(); + + req.context + .insert(CONNECTOR_SOURCES_IN_QUERY_PLAN, list) + .unwrap(); + req + }) + .service(service) + .boxed() + } +} + +pub(crate) const PLUGIN_NAME: &str = "preview_connectors"; + +register_plugin!("apollo", PLUGIN_NAME, Connectors); diff --git a/apollo-router/src/plugins/connectors/plugin/debug.rs b/apollo-router/src/plugins/connectors/plugin/debug.rs new file mode 100644 index 0000000000..3109b30fba --- /dev/null +++ b/apollo-router/src/plugins/connectors/plugin/debug.rs @@ -0,0 +1,228 @@ +use std::collections::HashMap; + +use apollo_federation::sources::connect::ApplyToError; +use bytes::Bytes; +use itertools::Itertools; +use serde::Deserialize; +use serde::Serialize; +use serde_json_bytes::json; + +use crate::services::router::body::RouterBody; + +#[derive(Debug, Clone, Serialize, Deserialize, Default)] +pub(crate) struct ConnectorContext { + requests: Vec, + responses: Vec, +} + +impl ConnectorContext { + pub(crate) fn push_response( + &mut self, + request: Option, + parts: &http::response::Parts, + json_body: &serde_json_bytes::Value, + selection_data: Option, + ) { + if let Some(request) = request { + self.requests.push(request); + self.responses + .push(serialize_response(parts, json_body, selection_data)); + } else { + tracing::warn!( + "connectors debugging: couldn't find a matching request for the response" + ); + } + } + + pub(crate) fn push_invalid_response( + &mut self, + request: Option, + parts: &http::response::Parts, + body: &Bytes, + ) { + if let Some(request) = request { + self.requests.push(request); + self.responses.push(ConnectorDebugHttpResponse { + status: parts.status.as_u16(), + headers: parts + .headers + .iter() + .map(|(name, value)| { + ( + name.as_str().to_string(), + value.to_str().unwrap().to_string(), + ) + }) + .collect(), + body: ConnectorDebugBody { + kind: "invalid".to_string(), + content: format!("{:?}", body).into(), + selection: None, + }, + }); + } else { + tracing::warn!( + "connectors debugging: couldn't find a matching request for the response" + ); + } + } + + pub(super) fn serialize(self) -> serde_json_bytes::Value { + json!(self + .requests + .into_iter() + .zip(self.responses.into_iter()) + .map(|(req, res)| json!({ + "request": req, + "response": res, + })) + .collect::>()) + } +} + +/// JSONSelection Request / Response Data +/// +/// Contains all needed info and responses from the application of a JSONSelection +pub(crate) struct SelectionData { + /// The original [`JSONSelection`] to resolve + pub(crate) source: String, + + /// A mapping of the original selection, taking into account renames and other + /// transformations requested by the client + /// + /// Refer to [`Self::source`] for the original, schema-supplied selection. + pub(crate) transformed: String, + + /// The result of applying the selection to JSON. An empty value + /// here can potentially mean that errors were encountered. + /// + /// Refer to [`Self::errors`] for any errors found during evaluation + pub(crate) result: Option, + + /// A list of errors encountered during evaluation. + pub(crate) errors: Vec, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +struct ConnectorDebugBody { + kind: String, + content: serde_json_bytes::Value, + selection: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +pub(crate) struct ConnectorDebugHttpRequest { + url: String, + method: String, + headers: Vec<(String, String)>, + body: Option, +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +struct ConnectorDebugSelection { + source: String, + transformed: String, + result: Option, + errors: Vec, +} + +pub(crate) fn serialize_request( + req: &http::Request, + kind: String, + json_body: Option<&serde_json_bytes::Value>, + selection_data: Option, +) -> ConnectorDebugHttpRequest { + ConnectorDebugHttpRequest { + url: req.uri().to_string(), + method: req.method().to_string(), + headers: req + .headers() + .iter() + .map(|(name, value)| { + ( + name.as_str().to_string(), + value.to_str().unwrap().to_string(), + ) + }) + .collect(), + body: json_body.map(|body| ConnectorDebugBody { + kind, + content: body.clone(), + selection: selection_data.map(|selection| ConnectorDebugSelection { + source: selection.source, + transformed: selection.transformed, + result: selection.result, + errors: aggregate_apply_to_errors(&selection.errors), + }), + }), + } +} + +#[derive(Debug, Clone, Serialize, Deserialize)] +struct ConnectorDebugHttpResponse { + status: u16, + headers: Vec<(String, String)>, + body: ConnectorDebugBody, +} + +fn serialize_response( + parts: &http::response::Parts, + json_body: &serde_json_bytes::Value, + selection_data: Option, +) -> ConnectorDebugHttpResponse { + ConnectorDebugHttpResponse { + status: parts.status.as_u16(), + headers: parts + .headers + .iter() + .map(|(name, value)| { + ( + name.as_str().to_string(), + value.to_str().unwrap().to_string(), + ) + }) + .collect(), + body: ConnectorDebugBody { + kind: "json".to_string(), + content: json_body.clone(), + selection: selection_data.map(|selection| ConnectorDebugSelection { + source: selection.source, + transformed: selection.transformed, + result: selection.result, + errors: aggregate_apply_to_errors(&selection.errors), + }), + }, + } +} + +fn aggregate_apply_to_errors(errors: &[ApplyToError]) -> Vec { + errors + .iter() + .fold( + HashMap::default(), + |mut acc: HashMap<(&str, String), usize>, err| { + let path = err + .path() + .iter() + .map(|p| match p.as_u64() { + Some(_) => "@", // ignore array indices for grouping + None => p.as_str().unwrap_or_default(), + }) + .join("."); + + acc.entry((err.message(), path)) + .and_modify(|c| *c += 1) + .or_insert(1); + acc + }, + ) + .iter() + .map(|(key, count)| { + json!({ + "message": key.0, + "path": key.1, + "count": count, + }) + }) + .collect() +} diff --git a/apollo-router/src/plugins/connectors/query_plans.rs b/apollo-router/src/plugins/connectors/query_plans.rs new file mode 100644 index 0000000000..e8a7748d7b --- /dev/null +++ b/apollo-router/src/plugins/connectors/query_plans.rs @@ -0,0 +1,123 @@ +use std::sync::Arc; + +use apollo_federation::sources::connect::Connector; +use indexmap::IndexMap; + +use crate::query_planner::PlanNode; +use crate::Context; + +type ConnectorsByServiceName = Arc, Connector>>; + +pub(crate) fn store_connectors( + context: &Context, + connectors_by_service_name: Arc, Connector>>, +) { + context + .extensions() + .with_lock(|mut lock| lock.insert::(connectors_by_service_name)); +} + +pub(crate) fn get_connectors(context: &Context) -> Option { + context + .extensions() + .with_lock(|lock| lock.get::().cloned()) +} + +type ConnectorLabels = Arc, String>>; + +pub(crate) fn store_connectors_labels( + context: &Context, + labels_by_service_name: Arc, String>>, +) { + context + .extensions() + .with_lock(|mut lock| lock.insert::(labels_by_service_name)); +} + +pub(crate) fn replace_connector_service_names_text( + text: Option>, + context: &Context, +) -> Option> { + let replacements = context + .extensions() + .with_lock(|lock| lock.get::().cloned()); + if let Some(replacements) = replacements { + text.as_ref().map(|text| { + let mut text = text.to_string(); + for (service_name, label) in replacements.iter() { + text = text.replace(&**service_name, label); + } + Arc::new(text) + }) + } else { + text + } +} + +pub(crate) fn replace_connector_service_names( + plan: Arc, + context: &Context, +) -> Arc { + let replacements = context + .extensions() + .with_lock(|lock| lock.get::().cloned()); + + return if let Some(replacements) = replacements { + let mut plan = plan.clone(); + recurse(Arc::make_mut(&mut plan), &replacements); + plan + } else { + plan + }; + + fn recurse(plan: &mut PlanNode, replacements: &IndexMap, String>) { + match plan { + PlanNode::Sequence { nodes } => { + for node in nodes { + recurse(node, replacements); + } + } + PlanNode::Parallel { nodes } => { + for node in nodes { + recurse(node, replacements); + } + } + PlanNode::Fetch(node) => { + if let Some(service_name) = replacements.get(&node.service_name) { + node.service_name = service_name.clone().into(); + } + } + PlanNode::Flatten(flatten) => { + recurse(&mut flatten.node, replacements); + } + PlanNode::Defer { primary, deferred } => { + if let Some(primary) = primary.node.as_mut() { + recurse(primary, replacements); + } + for deferred in deferred { + if let Some(node) = &mut deferred.node { + recurse(Arc::make_mut(node), replacements); + } + } + } + PlanNode::Subscription { primary: _, rest } => { + // ignoring subscriptions because connectors are not supported + if let Some(node) = rest { + recurse(node, replacements); + } + } + PlanNode::Condition { + if_clause, + else_clause, + .. + } => { + if let Some(if_clause) = if_clause.as_mut() { + recurse(if_clause, replacements); + } + if let Some(else_clause) = else_clause.as_mut() { + recurse(else_clause, replacements); + } + } + } + } +} diff --git a/apollo-router/src/plugins/connectors/request_limit.rs b/apollo-router/src/plugins/connectors/request_limit.rs new file mode 100644 index 0000000000..7f9fc1e975 --- /dev/null +++ b/apollo-router/src/plugins/connectors/request_limit.rs @@ -0,0 +1,114 @@ +//! Limits on Connectors requests + +use std::collections::HashMap; +use std::fmt::Display; +use std::fmt::Formatter; +use std::sync::atomic::AtomicUsize; +use std::sync::atomic::Ordering; +use std::sync::Arc; + +use apollo_federation::sources::connect::ConnectId; +use parking_lot::Mutex; + +/// Key to access request limits for a connector +#[derive(Eq, Hash, PartialEq)] +pub(crate) enum RequestLimitKey { + /// A key to access the request limit for a connector referencing a source directive + SourceName(String), + + /// A key to access the request limit for a connector without a corresponding source directive + ConnectorLabel(String), +} + +impl Display for RequestLimitKey { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + match self { + RequestLimitKey::SourceName(source_name) => { + write!(f, "connector source {}", source_name) + } + RequestLimitKey::ConnectorLabel(connector_label) => { + write!(f, "connector {}", connector_label) + } + } + } +} + +impl From<&ConnectId> for RequestLimitKey { + fn from(value: &ConnectId) -> Self { + value + .source_name + .as_ref() + .map(|source_name| RequestLimitKey::SourceName(source_name.clone())) + .unwrap_or(RequestLimitKey::ConnectorLabel(value.label.clone())) + } +} + +/// Tracks a request limit for a connector +pub(crate) struct RequestLimit { + max_requests: usize, + total_requests: AtomicUsize, +} + +impl RequestLimit { + pub(crate) fn new(max_requests: usize) -> Self { + Self { + max_requests, + total_requests: AtomicUsize::new(0), + } + } + + pub(crate) fn allow(&self) -> bool { + self.total_requests.fetch_add(1, Ordering::Relaxed) < self.max_requests + } +} + +/// Tracks the request limits for an operation +pub(crate) struct RequestLimits { + default_max_requests: Option, + limits: Mutex>>, +} + +impl RequestLimits { + pub(crate) fn new(default_max_requests: Option) -> Self { + Self { + default_max_requests, + limits: Mutex::new(HashMap::new()), + } + } + + #[allow(clippy::unwrap_used)] // Unwrap checked by invariant + pub(crate) fn get( + &self, + key: RequestLimitKey, + limit: Option, + ) -> Option> { + if limit.is_none() && self.default_max_requests.is_none() { + return None; + } + Some( + self.limits + .lock() + .entry(key) + .or_insert_with(|| { + Arc::new(RequestLimit::new( + limit.or(self.default_max_requests).unwrap(), + )) + }) // unwrap ok, invariant checked above + .clone(), + ) + } + + pub(crate) fn log(&self) { + self.limits.lock().iter().for_each(|(key, limit)| { + let total = limit.total_requests.load(Ordering::Relaxed); + if total > limit.max_requests { + tracing::warn!( + "Request limit exceeded for {}: max: {}, attempted: {}", + key, + limit.max_requests, + total, + ); + } + }); + } +} diff --git a/apollo-router/src/plugins/connectors/testdata/README.md b/apollo-router/src/plugins/connectors/testdata/README.md new file mode 100644 index 0000000000..bd6b8e20f3 --- /dev/null +++ b/apollo-router/src/plugins/connectors/testdata/README.md @@ -0,0 +1,22 @@ +# Connectors runtime tests + +Each schema in this directory is used to test the runtime behavior of connectors in the sibling `test` directory. + +The runtime test require an already composed "supergraph SDL", which is the ouput of `rover supergraph compose`. Each +schema is defined using a supergraph config `.yaml` file in this directory. + +## Regenerating + +The `regenerate.sh` script will convert each of these `.yaml` files into a composed `.graphql` file which can be +executed. + +### Options: + +- Pass a specific `.yaml` file as an argument to regenerate only that file. +- Set the `FEDERATION_VERSION` environment variable to specify the federation version to use. + +> [!TIP] +> If you need to compose with an unreleased version of composition, you can add any `supergraph` binary to +> `~/.rover/bin` and use the suffix of that binary as a version. For example, if you have `supergraph-v2.10.0-blah` in +> that +> bin folder, you can set `FEDERATION_VERSION="2.10.0-blah"` to use that version. diff --git a/apollo-router/src/plugins/connectors/testdata/connector-without-source.graphql b/apollo-router/src/plugins/connectors/testdata/connector-without-source.graphql new file mode 100644 index 0000000000..b3001fde24 --- /dev/null +++ b/apollo-router/src/plugins/connectors/testdata/connector-without-source.graphql @@ -0,0 +1,68 @@ +schema + @link(url: "https://specs.apollo.dev/link/v1.0") + @link(url: "https://specs.apollo.dev/join/v0.5", for: EXECUTION) + @link(url: "https://specs.apollo.dev/connect/v0.1", for: EXECUTION) + @join__directive(graphs: [CONNECTORS], name: "link", args: {url: "https://specs.apollo.dev/connect/v0.1", import: ["@connect", "@source"]}) +{ + query: Query +} + +directive @join__directive(graphs: [join__Graph!], name: String!, args: join__DirectiveArguments) repeatable on SCHEMA | OBJECT | INTERFACE | FIELD_DEFINITION + +directive @join__enumValue(graph: join__Graph!) repeatable on ENUM_VALUE + +directive @join__field(graph: join__Graph, requires: join__FieldSet, provides: join__FieldSet, type: String, external: Boolean, override: String, usedOverridden: Boolean, overrideLabel: String, contextArguments: [join__ContextArgument!]) repeatable on FIELD_DEFINITION | INPUT_FIELD_DEFINITION + +directive @join__graph(name: String!, url: String!) on ENUM_VALUE + +directive @join__implements(graph: join__Graph!, interface: String!) repeatable on OBJECT | INTERFACE + +directive @join__type(graph: join__Graph!, key: join__FieldSet, extension: Boolean! = false, resolvable: Boolean! = true, isInterfaceObject: Boolean! = false) repeatable on OBJECT | INTERFACE | UNION | ENUM | INPUT_OBJECT | SCALAR + +directive @join__unionMember(graph: join__Graph!, member: String!) repeatable on UNION + +directive @link(url: String, as: String, for: link__Purpose, import: [link__Import]) repeatable on SCHEMA + +input join__ContextArgument { + name: String! + type: String! + context: String! + selection: join__FieldValue! +} + +scalar join__DirectiveArguments + +scalar join__FieldSet + +scalar join__FieldValue + +enum join__Graph { + CONNECTORS @join__graph(name: "connectors", url: "none") +} + +scalar link__Import + +enum link__Purpose { + """ + `SECURITY` features provide metadata necessary to securely resolve fields. + """ + SECURITY + + """ + `EXECUTION` features provide metadata necessary for operation execution. + """ + EXECUTION +} + +type Query + @join__type(graph: CONNECTORS) +{ + user(id: ID!): User @join__directive(graphs: [CONNECTORS], name: "connect", args: {http: {GET: "http://localhost/users/{$args.id}"}, selection: "id name"}) +} + +type User + @join__type(graph: CONNECTORS) +{ + id: ID! + name: String +} diff --git a/apollo-router/src/plugins/connectors/testdata/connector-without-source.yaml b/apollo-router/src/plugins/connectors/testdata/connector-without-source.yaml new file mode 100644 index 0000000000..b1354ed84d --- /dev/null +++ b/apollo-router/src/plugins/connectors/testdata/connector-without-source.yaml @@ -0,0 +1,21 @@ +subgraphs: + connectors: + routing_url: none + schema: + sdl: | + extend schema + @link(url: "https://specs.apollo.dev/federation/v2.10") + @link( + url: "https://specs.apollo.dev/connect/v0.1" + import: ["@connect", "@source"] + ) + + type User { + id: ID! + name: String + } + + type Query { + user(id: ID!): User + @connect(http: { GET: "http://localhost/users/{$$args.id}" }, selection: "id name") + } diff --git a/apollo-router/src/plugins/connectors/testdata/form-encoding.graphql b/apollo-router/src/plugins/connectors/testdata/form-encoding.graphql new file mode 100644 index 0000000000..f139fcf2b9 --- /dev/null +++ b/apollo-router/src/plugins/connectors/testdata/form-encoding.graphql @@ -0,0 +1,107 @@ +schema + @link(url: "https://specs.apollo.dev/link/v1.0") + @link(url: "https://specs.apollo.dev/join/v0.5", for: EXECUTION) + @link(url: "https://specs.apollo.dev/connect/v0.1", for: EXECUTION) + @join__directive(graphs: [CONNECTORS], name: "link", args: {url: "https://specs.apollo.dev/connect/v0.1", import: ["@connect", "@source"]}) + @join__directive(graphs: [CONNECTORS], name: "source", args: {name: "json", http: {baseURL: "http://localhost", headers: [{name: "Content-Type", value: "application/x-www-form-urlencoded"}]}}) +{ + query: Query + mutation: Mutation +} + +directive @join__directive(graphs: [join__Graph!], name: String!, args: join__DirectiveArguments) repeatable on SCHEMA | OBJECT | INTERFACE | FIELD_DEFINITION + +directive @join__enumValue(graph: join__Graph!) repeatable on ENUM_VALUE + +directive @join__field(graph: join__Graph, requires: join__FieldSet, provides: join__FieldSet, type: String, external: Boolean, override: String, usedOverridden: Boolean, overrideLabel: String, contextArguments: [join__ContextArgument!]) repeatable on FIELD_DEFINITION | INPUT_FIELD_DEFINITION + +directive @join__graph(name: String!, url: String!) on ENUM_VALUE + +directive @join__implements(graph: join__Graph!, interface: String!) repeatable on OBJECT | INTERFACE + +directive @join__type(graph: join__Graph!, key: join__FieldSet, extension: Boolean! = false, resolvable: Boolean! = true, isInterfaceObject: Boolean! = false) repeatable on OBJECT | INTERFACE | UNION | ENUM | INPUT_OBJECT | SCALAR + +directive @join__unionMember(graph: join__Graph!, member: String!) repeatable on UNION + +directive @link(url: String, as: String, for: link__Purpose, import: [link__Import]) repeatable on SCHEMA + +input join__ContextArgument { + name: String! + type: String! + context: String! + selection: join__FieldValue! +} + +scalar join__DirectiveArguments + +scalar join__FieldSet + +scalar join__FieldValue + +enum join__Graph { + CONNECTORS @join__graph(name: "connectors", url: "none") +} + +scalar link__Import + +enum link__Purpose { + """ + `SECURITY` features provide metadata necessary to securely resolve fields. + """ + SECURITY + + """ + `EXECUTION` features provide metadata necessary for operation execution. + """ + EXECUTION +} + +type Mutation + @join__type(graph: CONNECTORS) +{ + post(input: PostInput!): Post @join__directive(graphs: [CONNECTORS], name: "connect", args: {source: "json", http: {POST: "/posts", body: "$args.input {\n int\n str\n bool\n enum\n id\n\n intArr\n strArr\n boolArr\n # enumArr\n idArr\n\n obj {\n a\n b\n c\n nested {\n d\n e\n f\n }\n }\n objArr {\n a\n b\n c\n nested {\n d\n e\n f\n }\n }\n}"}, selection: "id"}) +} + +type Post + @join__type(graph: CONNECTORS) +{ + id: ID +} + +input PostChildInput + @join__type(graph: CONNECTORS) +{ + a: Int + b: String + c: Boolean + nested: PostNestedInput +} + +input PostInput + @join__type(graph: CONNECTORS) +{ + int: Int + str: String + bool: Boolean + id: ID + intArr: [Int] + strArr: [String] + boolArr: [Boolean] + idArr: [ID] + obj: PostChildInput + objArr: [PostChildInput] +} + +input PostNestedInput + @join__type(graph: CONNECTORS) +{ + d: Int + e: String + f: Boolean +} + +type Query + @join__type(graph: CONNECTORS) +{ + hello: String @join__directive(graphs: [CONNECTORS], name: "connect", args: {http: {GET: "http://localhost/hello"}, selection: "$"}) +} diff --git a/apollo-router/src/plugins/connectors/testdata/form-encoding.yaml b/apollo-router/src/plugins/connectors/testdata/form-encoding.yaml new file mode 100644 index 0000000000..ccb0168d38 --- /dev/null +++ b/apollo-router/src/plugins/connectors/testdata/form-encoding.yaml @@ -0,0 +1,114 @@ +subgraphs: + connectors: + routing_url: none + schema: + sdl: | + extend schema + @link( + url: "https://specs.apollo.dev/federation/v2.10" + import: ["@key", "@external", "@requires", "@shareable"] + ) + @link( + url: "https://specs.apollo.dev/connect/v0.1" + import: ["@connect", "@source"] + ) + @source( + name: "json" + http: { + baseURL: "http://localhost" + headers: [ + { name: "Content-Type" value: "application/x-www-form-urlencoded" } + ] + } + ) + + type Query { + hello: String @connect(http: { GET: "http://localhost/hello" }, selection: "$") + } + + type Mutation { + post(input: PostInput!): Post + @connect( + source: "json" + http: { + POST: "/posts" + body: """ + $$args.input { + int + str + bool + enum + id + + intArr + strArr + boolArr + # enumArr + idArr + + obj { + a + b + c + nested { + d + e + f + } + } + objArr { + a + b + c + nested { + d + e + f + } + } + } + """ + } + selection: "id" + ) + } + + input PostInput { + int: Int + str: String + bool: Boolean + # enum: PostEnum + id: ID + + intArr: [Int] + strArr: [String] + boolArr: [Boolean] + # enumArr: [PostEnum] + idArr: [ID] + + obj: PostChildInput + objArr: [PostChildInput] + } + + input PostChildInput { + a: Int + b: String + c: Boolean + nested: PostNestedInput + } + + input PostNestedInput { + d: Int + e: String + f: Boolean + } + + # enum PostEnum { + # A + # B + # C + # } + + type Post { + id: ID + } \ No newline at end of file diff --git a/apollo-router/src/plugins/connectors/testdata/interface-object.graphql b/apollo-router/src/plugins/connectors/testdata/interface-object.graphql new file mode 100644 index 0000000000..a7364124c6 --- /dev/null +++ b/apollo-router/src/plugins/connectors/testdata/interface-object.graphql @@ -0,0 +1,97 @@ +schema + @link(url: "https://specs.apollo.dev/link/v1.0") + @link(url: "https://specs.apollo.dev/join/v0.5", for: EXECUTION) + @link(url: "https://specs.apollo.dev/connect/v0.1", for: EXECUTION) + @join__directive(graphs: [CONNECTORS], name: "link", args: {url: "https://specs.apollo.dev/connect/v0.1", import: ["@connect", "@source"]}) + @join__directive(graphs: [CONNECTORS], name: "source", args: {name: "json", http: {baseURL: "http://localhost:4001"}}) +{ + query: Query +} + +directive @join__directive(graphs: [join__Graph!], name: String!, args: join__DirectiveArguments) repeatable on SCHEMA | OBJECT | INTERFACE | FIELD_DEFINITION + +directive @join__enumValue(graph: join__Graph!) repeatable on ENUM_VALUE + +directive @join__field(graph: join__Graph, requires: join__FieldSet, provides: join__FieldSet, type: String, external: Boolean, override: String, usedOverridden: Boolean, overrideLabel: String, contextArguments: [join__ContextArgument!]) repeatable on FIELD_DEFINITION | INPUT_FIELD_DEFINITION + +directive @join__graph(name: String!, url: String!) on ENUM_VALUE + +directive @join__implements(graph: join__Graph!, interface: String!) repeatable on OBJECT | INTERFACE + +directive @join__type(graph: join__Graph!, key: join__FieldSet, extension: Boolean! = false, resolvable: Boolean! = true, isInterfaceObject: Boolean! = false) repeatable on OBJECT | INTERFACE | UNION | ENUM | INPUT_OBJECT | SCALAR + +directive @join__unionMember(graph: join__Graph!, member: String!) repeatable on UNION + +directive @link(url: String, as: String, for: link__Purpose, import: [link__Import]) repeatable on SCHEMA + +interface Itf + @join__type(graph: CONNECTORS, key: "id", isInterfaceObject: true) + @join__type(graph: GRAPHQL, key: "id") +{ + id: ID! + c: Int! @join__field(graph: CONNECTORS) + d: Int! @join__field(graph: CONNECTORS) + e: String @join__field(graph: CONNECTORS) @join__directive(graphs: [CONNECTORS], name: "connect", args: {source: "json", http: {GET: "/itfs/{$this.id}/e"}, selection: "$"}) +} + +input join__ContextArgument { + name: String! + type: String! + context: String! + selection: join__FieldValue! +} + +scalar join__DirectiveArguments + +scalar join__FieldSet + +scalar join__FieldValue + +enum join__Graph { + CONNECTORS @join__graph(name: "connectors", url: "none") + GRAPHQL @join__graph(name: "graphql", url: "none") +} + +scalar link__Import + +enum link__Purpose { + """ + `SECURITY` features provide metadata necessary to securely resolve fields. + """ + SECURITY + + """ + `EXECUTION` features provide metadata necessary for operation execution. + """ + EXECUTION +} + +type Query + @join__type(graph: CONNECTORS) + @join__type(graph: GRAPHQL) +{ + itfs: [Itf] @join__field(graph: CONNECTORS) @join__directive(graphs: [CONNECTORS], name: "connect", args: {source: "json", http: {GET: "/itfs"}, selection: "id c"}) + itf(id: ID!): Itf @join__field(graph: CONNECTORS) @join__directive(graphs: [CONNECTORS], name: "connect", args: {source: "json", http: {GET: "/itfs/{$args.id}"}, selection: "id c d", entity: true}) +} + +type T1 implements Itf + @join__implements(graph: GRAPHQL, interface: "Itf") + @join__type(graph: GRAPHQL, key: "id") +{ + id: ID! + a: String + c: Int! @join__field + d: Int! @join__field + e: String @join__field +} + +type T2 implements Itf + @join__implements(graph: GRAPHQL, interface: "Itf") + @join__type(graph: GRAPHQL, key: "id") +{ + id: ID! + b: String + c: Int! @join__field + d: Int! @join__field + e: String @join__field +} diff --git a/apollo-router/src/plugins/connectors/testdata/interface-object.yaml b/apollo-router/src/plugins/connectors/testdata/interface-object.yaml new file mode 100644 index 0000000000..54a17a093a --- /dev/null +++ b/apollo-router/src/plugins/connectors/testdata/interface-object.yaml @@ -0,0 +1,58 @@ +subgraphs: + connectors: + routing_url: none + schema: + sdl: | + extend schema + @link(url: "https://specs.apollo.dev/federation/v2.10", import: ["@key", "@interfaceObject"]) + @link(url: "https://specs.apollo.dev/connect/v0.1", import: ["@connect", "@source"]) + @source(name: "json", http: { baseURL: "http://localhost:4001" }) + + type Query { + itfs: [Itf] + @connect( + source: "json" + http: { GET: "/itfs" } + selection: "id c" + ) + + itf(id: ID!): Itf + @connect( + source: "json" + http: { GET: "/itfs/{$$args.id}" } + selection: "id c d" + entity: true + ) + } + + type Itf @key(fields: "id") @interfaceObject { + id: ID! + c: Int! + d: Int! + e: String + @connect( + source: "json" + http: { GET: "/itfs/{$$this.id}/e" } + selection: "$" + ) + } + graphql: + routing_url: none + schema: + sdl: | + extend schema + @link(url: "https://specs.apollo.dev/federation/v2.10", import: ["@key"]) + + interface Itf @key(fields: "id") { + id: ID! + } + + type T1 implements Itf @key(fields: "id") { + id: ID! + a: String + } + + type T2 implements Itf @key(fields: "id") { + id: ID! + b: String + } diff --git a/apollo-router/src/plugins/connectors/testdata/mutation.graphql b/apollo-router/src/plugins/connectors/testdata/mutation.graphql new file mode 100644 index 0000000000..0bc0e39265 --- /dev/null +++ b/apollo-router/src/plugins/connectors/testdata/mutation.graphql @@ -0,0 +1,85 @@ +schema + @link(url: "https://specs.apollo.dev/link/v1.0") + @link(url: "https://specs.apollo.dev/join/v0.5", for: EXECUTION) + @link(url: "https://specs.apollo.dev/connect/v0.1", for: EXECUTION) + @join__directive(graphs: [CONNECTORS], name: "link", args: {url: "https://specs.apollo.dev/connect/v0.1", import: ["@connect", "@source"]}) + @join__directive(graphs: [CONNECTORS], name: "source", args: {name: "json", http: {baseURL: "https://jsonplaceholder.typicode.com/"}}) +{ + query: Query + mutation: Mutation +} + +directive @join__directive(graphs: [join__Graph!], name: String!, args: join__DirectiveArguments) repeatable on SCHEMA | OBJECT | INTERFACE | FIELD_DEFINITION + +directive @join__enumValue(graph: join__Graph!) repeatable on ENUM_VALUE + +directive @join__field(graph: join__Graph, requires: join__FieldSet, provides: join__FieldSet, type: String, external: Boolean, override: String, usedOverridden: Boolean, overrideLabel: String, contextArguments: [join__ContextArgument!]) repeatable on FIELD_DEFINITION | INPUT_FIELD_DEFINITION + +directive @join__graph(name: String!, url: String!) on ENUM_VALUE + +directive @join__implements(graph: join__Graph!, interface: String!) repeatable on OBJECT | INTERFACE + +directive @join__type(graph: join__Graph!, key: join__FieldSet, extension: Boolean! = false, resolvable: Boolean! = true, isInterfaceObject: Boolean! = false) repeatable on OBJECT | INTERFACE | UNION | ENUM | INPUT_OBJECT | SCALAR + +directive @join__unionMember(graph: join__Graph!, member: String!) repeatable on UNION + +directive @link(url: String, as: String, for: link__Purpose, import: [link__Import]) repeatable on SCHEMA + +type CreateUserPayload + @join__type(graph: CONNECTORS) +{ + success: Boolean! + user: User! +} + +input join__ContextArgument { + name: String! + type: String! + context: String! + selection: join__FieldValue! +} + +scalar join__DirectiveArguments + +scalar join__FieldSet + +scalar join__FieldValue + +enum join__Graph { + CONNECTORS @join__graph(name: "connectors", url: "none") +} + +scalar link__Import + +enum link__Purpose { + """ + `SECURITY` features provide metadata necessary to securely resolve fields. + """ + SECURITY + + """ + `EXECUTION` features provide metadata necessary for operation execution. + """ + EXECUTION +} + +type Mutation + @join__type(graph: CONNECTORS) +{ + createUser(name: String!): CreateUserPayload! @join__directive(graphs: [CONNECTORS], name: "connect", args: {source: "json", http: {POST: "/user", body: "username: $args.name"}, selection: "success: $(true)\nuser: {\n id\n name: username\n}"}) +} + +type Query + @join__type(graph: CONNECTORS) +{ + users: [User] @join__directive(graphs: [CONNECTORS], name: "connect", args: {source: "json", http: {GET: "/users"}, selection: "id name"}) + user(id: ID!): User @join__directive(graphs: [CONNECTORS], name: "connect", args: {source: "json", http: {GET: "/users/{$args.id}"}, selection: "id name email", entity: true}) +} + +type User + @join__type(graph: CONNECTORS) +{ + id: ID! + name: String + email: String +} diff --git a/apollo-router/src/plugins/connectors/testdata/mutation.yaml b/apollo-router/src/plugins/connectors/testdata/mutation.yaml new file mode 100644 index 0000000000..ec77b1a698 --- /dev/null +++ b/apollo-router/src/plugins/connectors/testdata/mutation.yaml @@ -0,0 +1,60 @@ +subgraphs: + connectors: + routing_url: none + schema: + sdl: | + extend schema + @link(url: "https://specs.apollo.dev/federation/v2.10") + @link( + url: "https://specs.apollo.dev/connect/v0.1" + import: ["@connect", "@source"] + ) + @source( + name: "json" + http: { + baseURL: "https://jsonplaceholder.typicode.com/" + } + ) + + type User { + id: ID! + name: String + email: String + } + + type Query { + users: [User] + @connect(source: "json", http: { GET: "/users" }, selection: "id name") + user(id: ID!): User + @connect( + source: "json" + http: { GET: "/users/{$$args.id}" } + selection: "id name email" + entity: true + ) + } + + type Mutation { + createUser(name: String!): CreateUserPayload! + @connect( + source: "json" + http: { + POST: "/user" + body: """ + username: $$args.name + """ + } + selection: """ + success: $(true) + user: { + id + name: username + } + """ + ) + } + + type CreateUserPayload { + success: Boolean! + user: User! + } diff --git a/apollo-router/src/plugins/connectors/testdata/nullability.graphql b/apollo-router/src/plugins/connectors/testdata/nullability.graphql new file mode 100644 index 0000000000..b70e9ffbb2 --- /dev/null +++ b/apollo-router/src/plugins/connectors/testdata/nullability.graphql @@ -0,0 +1,87 @@ +schema + @link(url: "https://specs.apollo.dev/link/v1.0") + @link(url: "https://specs.apollo.dev/join/v0.5", for: EXECUTION) + @link(url: "https://specs.apollo.dev/connect/v0.1", for: EXECUTION) + @join__directive(graphs: [CONNECTORS], name: "link", args: {url: "https://specs.apollo.dev/connect/v0.1", import: ["@connect", "@source"]}) + @join__directive(graphs: [CONNECTORS], name: "source", args: {name: "json", http: {baseURL: "https://jsonplaceholder.typicode.com/"}}) +{ + query: Query +} + +directive @join__directive(graphs: [join__Graph!], name: String!, args: join__DirectiveArguments) repeatable on SCHEMA | OBJECT | INTERFACE | FIELD_DEFINITION + +directive @join__enumValue(graph: join__Graph!) repeatable on ENUM_VALUE + +directive @join__field(graph: join__Graph, requires: join__FieldSet, provides: join__FieldSet, type: String, external: Boolean, override: String, usedOverridden: Boolean, overrideLabel: String, contextArguments: [join__ContextArgument!]) repeatable on FIELD_DEFINITION | INPUT_FIELD_DEFINITION + +directive @join__graph(name: String!, url: String!) on ENUM_VALUE + +directive @join__implements(graph: join__Graph!, interface: String!) repeatable on OBJECT | INTERFACE + +directive @join__type(graph: join__Graph!, key: join__FieldSet, extension: Boolean! = false, resolvable: Boolean! = true, isInterfaceObject: Boolean! = false) repeatable on OBJECT | INTERFACE | UNION | ENUM | INPUT_OBJECT | SCALAR + +directive @join__unionMember(graph: join__Graph!, member: String!) repeatable on UNION + +directive @link(url: String, as: String, for: link__Purpose, import: [link__Import]) repeatable on SCHEMA + +type Address + @join__type(graph: CONNECTORS) +{ + street: String + zip: String +} + +input join__ContextArgument { + name: String! + type: String! + context: String! + selection: join__FieldValue! +} + +scalar join__DirectiveArguments + +scalar join__FieldSet + +scalar join__FieldValue + +enum join__Graph { + CONNECTORS @join__graph(name: "connectors", url: "none") +} + +scalar link__Import + +enum link__Purpose { + """ + `SECURITY` features provide metadata necessary to securely resolve fields. + """ + SECURITY + + """ + `EXECUTION` features provide metadata necessary for operation execution. + """ + EXECUTION +} + +type Pet + @join__type(graph: CONNECTORS) +{ + name: String + species: String +} + +type Query + @join__type(graph: CONNECTORS) +{ + user(id: ID!): User @join__directive(graphs: [CONNECTORS], name: "connect", args: {source: "json", http: {GET: "/users/{$args.id}"}, selection: "id\nname\noccupation: job\naddress {\n street\n zip\n}\npet {\n name\n species\n}", entity: true}) + defaultArgs(str: String = "default", int: Int = 42, float: Float = 1.23, bool: Boolean = true, arr: [String] = ["default"]): String @join__directive(graphs: [CONNECTORS], name: "connect", args: {source: "json", http: {POST: "/default-args", body: "str: $args.str int: $args.int float: $args.float bool: $args.bool arr: $args.arr"}, selection: "$"}) +} + +type User + @join__type(graph: CONNECTORS) +{ + id: ID! + name: String + occupation: String + address: Address + pet: Pet +} diff --git a/apollo-router/src/plugins/connectors/testdata/nullability.yaml b/apollo-router/src/plugins/connectors/testdata/nullability.yaml new file mode 100644 index 0000000000..1fab24d5cc --- /dev/null +++ b/apollo-router/src/plugins/connectors/testdata/nullability.yaml @@ -0,0 +1,71 @@ +subgraphs: + connectors: + routing_url: none + schema: + sdl: | + extend schema + @link(url: "https://specs.apollo.dev/federation/v2.10") + @link( + url: "https://specs.apollo.dev/connect/v0.1" + import: ["@connect", "@source"] + ) + @source( + name: "json" + http: { + baseURL: "https://jsonplaceholder.typicode.com/" + } + ) + + type User { + id: ID! + name: String + occupation: String + address: Address + pet: Pet + } + + type Address { + street: String + zip: String + } + + type Pet { + name: String + species: String + } + + type Query { + user(id: ID!): User + @connect( + source: "json" + http: { GET: "/users/{$$args.id}" } + selection: """ + id + name + occupation: job + address { + street + zip + } + pet { + name + species + } + """ + entity: true + ) + + defaultArgs( + str: String = "default" + int: Int = 42 + float: Float = 1.23 + bool: Boolean = true + # TODO: input enums will be supported after 2.10.0-alpha.2 + arr: [String] = ["default"] + ): String + @connect( + source: "json" + http: { POST: "/default-args", body: "str: $$args.str int: $$args.int float: $$args.float bool: $$args.bool arr: $$args.arr" } + selection: "$" + ) + } diff --git a/apollo-router/src/plugins/connectors/testdata/quickstart.graphql b/apollo-router/src/plugins/connectors/testdata/quickstart.graphql new file mode 100644 index 0000000000..11cf26c992 --- /dev/null +++ b/apollo-router/src/plugins/connectors/testdata/quickstart.graphql @@ -0,0 +1,82 @@ +schema + @link(url: "https://specs.apollo.dev/link/v1.0") + @link(url: "https://specs.apollo.dev/join/v0.5", for: EXECUTION) + @link(url: "https://specs.apollo.dev/connect/v0.1", for: EXECUTION) + @join__directive(graphs: [CONNECTORS], name: "link", args: {url: "https://specs.apollo.dev/connect/v0.1", import: ["@connect", "@source"]}) + @join__directive(graphs: [CONNECTORS], name: "source", args: {name: "jsonPlaceholder", http: {baseURL: "https://jsonplaceholder.typicode.com/"}}) +{ + query: Query +} + +directive @join__directive(graphs: [join__Graph!], name: String!, args: join__DirectiveArguments) repeatable on SCHEMA | OBJECT | INTERFACE | FIELD_DEFINITION + +directive @join__enumValue(graph: join__Graph!) repeatable on ENUM_VALUE + +directive @join__field(graph: join__Graph, requires: join__FieldSet, provides: join__FieldSet, type: String, external: Boolean, override: String, usedOverridden: Boolean, overrideLabel: String, contextArguments: [join__ContextArgument!]) repeatable on FIELD_DEFINITION | INPUT_FIELD_DEFINITION + +directive @join__graph(name: String!, url: String!) on ENUM_VALUE + +directive @join__implements(graph: join__Graph!, interface: String!) repeatable on OBJECT | INTERFACE + +directive @join__type(graph: join__Graph!, key: join__FieldSet, extension: Boolean! = false, resolvable: Boolean! = true, isInterfaceObject: Boolean! = false) repeatable on OBJECT | INTERFACE | UNION | ENUM | INPUT_OBJECT | SCALAR + +directive @join__unionMember(graph: join__Graph!, member: String!) repeatable on UNION + +directive @link(url: String, as: String, for: link__Purpose, import: [link__Import]) repeatable on SCHEMA + +input join__ContextArgument { + name: String! + type: String! + context: String! + selection: join__FieldValue! +} + +scalar join__DirectiveArguments + +scalar join__FieldSet + +scalar join__FieldValue + +enum join__Graph { + CONNECTORS @join__graph(name: "connectors", url: "none") +} + +scalar link__Import + +enum link__Purpose { + """ + `SECURITY` features provide metadata necessary to securely resolve fields. + """ + SECURITY + + """ + `EXECUTION` features provide metadata necessary for operation execution. + """ + EXECUTION +} + +type Post + @join__type(graph: CONNECTORS) +{ + id: ID! + body: String + title: String + author: User +} + +type Query + @join__type(graph: CONNECTORS) +{ + posts: [Post] @join__directive(graphs: [CONNECTORS], name: "connect", args: {source: "jsonPlaceholder", http: {GET: "/posts"}, selection: "id\ntitle\nbody\nauthor: { id: userId }"}) + post(id: ID!): Post @join__directive(graphs: [CONNECTORS], name: "connect", args: {source: "jsonPlaceholder", http: {GET: "/posts/{$args.id}"}, selection: "id\ntitle\nbody\nauthor: { id: userId }", entity: true}) + user(id: ID!): User @join__directive(graphs: [CONNECTORS], name: "connect", args: {source: "jsonPlaceholder", http: {GET: "/users/{$args.id}"}, selection: "id\nname\nusername", entity: true}) +} + +type User + @join__type(graph: CONNECTORS) +{ + id: ID! + name: String + username: String + posts: [Post] @join__directive(graphs: [CONNECTORS], name: "connect", args: {source: "jsonPlaceholder", http: {GET: "/users/{$this.id}/posts"}, selection: "id\ntitle\nbody"}) +} diff --git a/apollo-router/src/plugins/connectors/testdata/quickstart.yaml b/apollo-router/src/plugins/connectors/testdata/quickstart.yaml new file mode 100644 index 0000000000..856705fe9e --- /dev/null +++ b/apollo-router/src/plugins/connectors/testdata/quickstart.yaml @@ -0,0 +1,75 @@ +subgraphs: + connectors: + routing_url: none + schema: + sdl: | + extend schema + @link(url: "https://specs.apollo.dev/federation/v2.8") + @link( + url: "https://specs.apollo.dev/connect/v0.1" + import: ["@connect", "@source"] + ) + @source( + name: "jsonPlaceholder" + http: { baseURL: "https://jsonplaceholder.typicode.com/" } + ) + + type Post { + id: ID! + body: String + title: String + author: User + } + + type User { + id: ID! + name: String + username: String + posts: [Post] + @connect( + source: "jsonPlaceholder" + http: { GET: "/users/{$$this.id}/posts" } + selection: """ + id + title + body + """ + ) + } + + type Query { + posts: [Post] + @connect( + source: "jsonPlaceholder" + http: { GET: "/posts" } + selection: """ + id + title + body + author: { id: userId } + """ + ) + post(id: ID!): Post + @connect( + source: "jsonPlaceholder" + http: { GET: "/posts/{$$args.id}" } + selection: """ + id + title + body + author: { id: userId } + """ + entity: true + ) + user(id: ID!): User + @connect( + source: "jsonPlaceholder" + http: { GET: "/users/{$$args.id}" } + selection: """ + id + name + username + """ + entity: true + ) + } \ No newline at end of file diff --git a/apollo-router/src/plugins/connectors/testdata/quickstart_api_snapshots/query_1.json b/apollo-router/src/plugins/connectors/testdata/quickstart_api_snapshots/query_1.json new file mode 100644 index 0000000000..5faa5ca17b --- /dev/null +++ b/apollo-router/src/plugins/connectors/testdata/quickstart_api_snapshots/query_1.json @@ -0,0 +1,31 @@ +[ + { + "request": { + "method": "GET", + "path": "posts", + "body": null + }, + "response": { + "status": 200, + "headers": { + "content-type": [ + "application/json; charset=utf-8" + ] + }, + "body": [ + { + "userId": 1, + "id": 1, + "title": "sunt aut facere repellat provident occaecati excepturi optio reprehenderit", + "body": "quia et suscipit\nsuscipit recusandae consequuntur expedita et cum\nreprehenderit molestiae ut ut quas totam\nnostrum rerum est autem sunt rem eveniet architecto" + }, + { + "userId": 1, + "id": 2, + "title": "qui est esse", + "body": "est rerum tempore vitae\nsequi sint nihil reprehenderit dolor beatae ea dolores neque\nfugiat blanditiis voluptate porro vel nihil molestiae ut reiciendis\nqui aperiam non debitis possimus qui neque nisi nulla" + } + ] + } + } +] \ No newline at end of file diff --git a/apollo-router/src/plugins/connectors/testdata/quickstart_api_snapshots/query_2.json b/apollo-router/src/plugins/connectors/testdata/quickstart_api_snapshots/query_2.json new file mode 100644 index 0000000000..f0a408eb2d --- /dev/null +++ b/apollo-router/src/plugins/connectors/testdata/quickstart_api_snapshots/query_2.json @@ -0,0 +1,23 @@ +[ + { + "request": { + "method": "GET", + "path": "posts/1", + "body": null + }, + "response": { + "status": 200, + "headers": { + "content-type": [ + "application/json; charset=utf-8" + ] + }, + "body": { + "userId": 1, + "id": 1, + "title": "sunt aut facere repellat provident occaecati excepturi optio reprehenderit", + "body": "quia et suscipit\nsuscipit recusandae consequuntur expedita et cum\nreprehenderit molestiae ut ut quas totam\nnostrum rerum est autem sunt rem eveniet architecto" + } + } + } +] \ No newline at end of file diff --git a/apollo-router/src/plugins/connectors/testdata/quickstart_api_snapshots/query_3.json b/apollo-router/src/plugins/connectors/testdata/quickstart_api_snapshots/query_3.json new file mode 100644 index 0000000000..2373dbdc5b --- /dev/null +++ b/apollo-router/src/plugins/connectors/testdata/quickstart_api_snapshots/query_3.json @@ -0,0 +1,61 @@ +[ + { + "request": { + "method": "GET", + "path": "posts/1", + "body": null + }, + "response": { + "status": 200, + "headers": { + "content-type": [ + "application/json; charset=utf-8" + ] + }, + "body": { + "userId": 1, + "id": 1, + "title": "sunt aut facere repellat provident occaecati excepturi optio reprehenderit", + "body": "quia et suscipit\nsuscipit recusandae consequuntur expedita et cum\nreprehenderit molestiae ut ut quas totam\nnostrum rerum est autem sunt rem eveniet architecto" + } + } + }, + { + "request": { + "method": "GET", + "path": "users/1", + "body": null + }, + "response": { + "status": 200, + "headers": { + "content-type": [ + "application/json; charset=utf-8" + ] + }, + "body": { + "id": 1, + "name": "Leanne Graham", + "username": "Bret", + "email": "Sincere@april.biz", + "address": { + "street": "Kulas Light", + "suite": "Apt. 556", + "city": "Gwenborough", + "zipcode": "92998-3874", + "geo": { + "lat": "-37.3159", + "lng": "81.1496" + } + }, + "phone": "1-770-736-8031 x56442", + "website": "hildegard.org", + "company": { + "name": "Romaguera-Crona", + "catchPhrase": "Multi-layered client-server neural-net", + "bs": "harness real-time e-markets" + } + } + } + } +] \ No newline at end of file diff --git a/apollo-router/src/plugins/connectors/testdata/quickstart_api_snapshots/query_4.json b/apollo-router/src/plugins/connectors/testdata/quickstart_api_snapshots/query_4.json new file mode 100644 index 0000000000..b26761e481 --- /dev/null +++ b/apollo-router/src/plugins/connectors/testdata/quickstart_api_snapshots/query_4.json @@ -0,0 +1,279 @@ +[ + { + "request": { + "method": "GET", + "path": "posts/1", + "body": null + }, + "response": { + "status": 200, + "headers": { + "content-type": [ + "application/json; charset=utf-8" + ] + }, + "body": { + "userId": 1, + "id": 1, + "title": "sunt aut facere repellat provident occaecati excepturi optio reprehenderit", + "body": "quia et suscipit\nsuscipit recusandae consequuntur expedita et cum\nreprehenderit molestiae ut ut quas totam\nnostrum rerum est autem sunt rem eveniet architecto" + } + } + }, + { + "request": { + "method": "GET", + "path": "posts/10", + "body": null + }, + "response": { + "status": 200, + "headers": { + "content-type": [ + "application/json; charset=utf-8" + ] + }, + "body": { + "userId": 1, + "id": 10, + "title": "optio molestias id quia eum", + "body": "quo et expedita modi cum officia vel magni\ndoloribus qui repudiandae\nvero nisi sit\nquos veniam quod sed accusamus veritatis error" + } + } + }, + { + "request": { + "method": "GET", + "path": "posts/2", + "body": null + }, + "response": { + "status": 200, + "headers": { + "content-type": [ + "application/json; charset=utf-8" + ] + }, + "body": { + "userId": 1, + "id": 2, + "title": "qui est esse", + "body": "est rerum tempore vitae\nsequi sint nihil reprehenderit dolor beatae ea dolores neque\nfugiat blanditiis voluptate porro vel nihil molestiae ut reiciendis\nqui aperiam non debitis possimus qui neque nisi nulla" + } + } + }, + { + "request": { + "method": "GET", + "path": "posts/3", + "body": null + }, + "response": { + "status": 200, + "headers": { + "content-type": [ + "application/json; charset=utf-8" + ] + }, + "body": { + "userId": 1, + "id": 3, + "title": "ea molestias quasi exercitationem repellat qui ipsa sit aut", + "body": "et iusto sed quo iure\nvoluptatem occaecati omnis eligendi aut ad\nvoluptatem doloribus vel accusantium quis pariatur\nmolestiae porro eius odio et labore et velit aut" + } + } + }, + { + "request": { + "method": "GET", + "path": "posts/4", + "body": null + }, + "response": { + "status": 200, + "headers": { + "content-type": [ + "application/json; charset=utf-8" + ] + }, + "body": { + "userId": 1, + "id": 4, + "title": "eum et est occaecati", + "body": "ullam et saepe reiciendis voluptatem adipisci\nsit amet autem assumenda provident rerum culpa\nquis hic commodi nesciunt rem tenetur doloremque ipsam iure\nquis sunt voluptatem rerum illo velit" + } + } + }, + { + "request": { + "method": "GET", + "path": "posts/5", + "body": null + }, + "response": { + "status": 200, + "headers": { + "content-type": [ + "application/json; charset=utf-8" + ] + }, + "body": { + "userId": 1, + "id": 5, + "title": "nesciunt quas odio", + "body": "repudiandae veniam quaerat sunt sed\nalias aut fugiat sit autem sed est\nvoluptatem omnis possimus esse voluptatibus quis\nest aut tenetur dolor neque" + } + } + }, + { + "request": { + "method": "GET", + "path": "posts/6", + "body": null + }, + "response": { + "status": 200, + "headers": { + "content-type": [ + "application/json; charset=utf-8" + ] + }, + "body": { + "userId": 1, + "id": 6, + "title": "dolorem eum magni eos aperiam quia", + "body": "ut aspernatur corporis harum nihil quis provident sequi\nmollitia nobis aliquid molestiae\nperspiciatis et ea nemo ab reprehenderit accusantium quas\nvoluptate dolores velit et doloremque molestiae" + } + } + }, + { + "request": { + "method": "GET", + "path": "posts/7", + "body": null + }, + "response": { + "status": 200, + "headers": { + "content-type": [ + "application/json; charset=utf-8" + ] + }, + "body": { + "userId": 1, + "id": 7, + "title": "magnam facilis autem", + "body": "dolore placeat quibusdam ea quo vitae\nmagni quis enim qui quis quo nemo aut saepe\nquidem repellat excepturi ut quia\nsunt ut sequi eos ea sed quas" + } + } + }, + { + "request": { + "method": "GET", + "path": "posts/8", + "body": null + }, + "response": { + "status": 200, + "headers": { + "content-type": [ + "application/json; charset=utf-8" + ] + }, + "body": { + "userId": 1, + "id": 8, + "title": "dolorem dolore est ipsam", + "body": "dignissimos aperiam dolorem qui eum\nfacilis quibusdam animi sint suscipit qui sint possimus cum\nquaerat magni maiores excepturi\nipsam ut commodi dolor voluptatum modi aut vitae" + } + } + }, + { + "request": { + "method": "GET", + "path": "posts/9", + "body": null + }, + "response": { + "status": 200, + "headers": { + "content-type": [ + "application/json; charset=utf-8" + ] + }, + "body": { + "userId": 1, + "id": 9, + "title": "nesciunt iure omnis dolorem tempora et accusantium", + "body": "consectetur animi nesciunt iure dolore\nenim quia ad\nveniam autem ut quam aut nobis\net est aut quod aut provident voluptas autem voluptas" + } + } + }, + { + "request": { + "method": "GET", + "path": "users/1", + "body": null + }, + "response": { + "status": 200, + "headers": { + "content-type": [ + "application/json; charset=utf-8" + ] + }, + "body": { + "id": 1, + "name": "Leanne Graham", + "username": "Bret", + "email": "Sincere@april.biz", + "address": { + "street": "Kulas Light", + "suite": "Apt. 556", + "city": "Gwenborough", + "zipcode": "92998-3874", + "geo": { + "lat": "-37.3159", + "lng": "81.1496" + } + }, + "phone": "1-770-736-8031 x56442", + "website": "hildegard.org", + "company": { + "name": "Romaguera-Crona", + "catchPhrase": "Multi-layered client-server neural-net", + "bs": "harness real-time e-markets" + } + } + } + }, + { + "request": { + "method": "GET", + "path": "users/1/posts", + "body": null + }, + "response": { + "status": 200, + "headers": { + "content-type": [ + "application/json; charset=utf-8" + ] + }, + "body": [ + { + "userId": 1, + "id": 1, + "title": "sunt aut facere repellat provident occaecati excepturi optio reprehenderit", + "body": "quia et suscipit\nsuscipit recusandae consequuntur expedita et cum\nreprehenderit molestiae ut ut quas totam\nnostrum rerum est autem sunt rem eveniet architecto" + }, + { + "userId": 1, + "id": 2, + "title": "qui est esse", + "body": "est rerum tempore vitae\nsequi sint nihil reprehenderit dolor beatae ea dolores neque\nfugiat blanditiis voluptate porro vel nihil molestiae ut reiciendis\nqui aperiam non debitis possimus qui neque nisi nulla" + } + ] + } + } +] \ No newline at end of file diff --git a/apollo-router/src/plugins/connectors/testdata/regenerate.sh b/apollo-router/src/plugins/connectors/testdata/regenerate.sh new file mode 100755 index 0000000000..48f004e4d6 --- /dev/null +++ b/apollo-router/src/plugins/connectors/testdata/regenerate.sh @@ -0,0 +1,23 @@ +set -euo pipefail + +if [ -z "${FEDERATION_VERSION:-}" ]; then + FEDERATION_VERSION="2.10.0-preview.2" +fi + +regenerate_graphql() { + local supergraph_config=$1 + local test_name + test_name=$(basename "$supergraph_config" .yaml) + local dir_name + dir_name=$(dirname "$supergraph_config") + echo "Regenerating $dir_name/$test_name.graphql" + rover supergraph compose --federation-version "=$FEDERATION_VERSION" --config "$supergraph_config" > "$dir_name/$test_name.graphql" +} + +if [ -z "${1:-}" ]; then + for supergraph_config in **/*.yaml; do + regenerate_graphql "$supergraph_config" + done +else + regenerate_graphql "$1" +fi \ No newline at end of file diff --git a/apollo-router/src/plugins/connectors/testdata/selection.graphql b/apollo-router/src/plugins/connectors/testdata/selection.graphql new file mode 100644 index 0000000000..d165d5ec9b --- /dev/null +++ b/apollo-router/src/plugins/connectors/testdata/selection.graphql @@ -0,0 +1,83 @@ +schema + @link(url: "https://specs.apollo.dev/link/v1.0") + @link(url: "https://specs.apollo.dev/join/v0.5", for: EXECUTION) + @link(url: "https://specs.apollo.dev/connect/v0.1", for: EXECUTION) + @join__directive(graphs: [CONNECTORS], name: "link", args: {url: "https://specs.apollo.dev/connect/v0.1", import: ["@connect", "@source"]}) + @join__directive(graphs: [CONNECTORS], name: "source", args: {name: "json", http: {baseURL: "https://jsonplaceholder.typicode.com/"}}) +{ + query: Query +} + +directive @join__directive(graphs: [join__Graph!], name: String!, args: join__DirectiveArguments) repeatable on SCHEMA | OBJECT | INTERFACE | FIELD_DEFINITION + +directive @join__enumValue(graph: join__Graph!) repeatable on ENUM_VALUE + +directive @join__field(graph: join__Graph, requires: join__FieldSet, provides: join__FieldSet, type: String, external: Boolean, override: String, usedOverridden: Boolean, overrideLabel: String, contextArguments: [join__ContextArgument!]) repeatable on FIELD_DEFINITION | INPUT_FIELD_DEFINITION + +directive @join__graph(name: String!, url: String!) on ENUM_VALUE + +directive @join__implements(graph: join__Graph!, interface: String!) repeatable on OBJECT | INTERFACE + +directive @join__type(graph: join__Graph!, key: join__FieldSet, extension: Boolean! = false, resolvable: Boolean! = true, isInterfaceObject: Boolean! = false) repeatable on OBJECT | INTERFACE | UNION | ENUM | INPUT_OBJECT | SCALAR + +directive @join__unionMember(graph: join__Graph!, member: String!) repeatable on UNION + +directive @link(url: String, as: String, for: link__Purpose, import: [link__Import]) repeatable on SCHEMA + +type Commit + @join__type(graph: CONNECTORS) +{ + commit: CommitDetail +} + +type CommitAuthor + @join__type(graph: CONNECTORS) +{ + name: String + email: String + owner: String +} + +type CommitDetail + @join__type(graph: CONNECTORS) +{ + name_from_path: String + by: CommitAuthor +} + +input join__ContextArgument { + name: String! + type: String! + context: String! + selection: join__FieldValue! +} + +scalar join__DirectiveArguments + +scalar join__FieldSet + +scalar join__FieldValue + +enum join__Graph { + CONNECTORS @join__graph(name: "connectors", url: "none") +} + +scalar link__Import + +enum link__Purpose { + """ + `SECURITY` features provide metadata necessary to securely resolve fields. + """ + SECURITY + + """ + `EXECUTION` features provide metadata necessary for operation execution. + """ + EXECUTION +} + +type Query + @join__type(graph: CONNECTORS) +{ + commits(owner: String!, repo: String!): [Commit] @join__directive(graphs: [CONNECTORS], name: "connect", args: {source: "json", http: {GET: "/repos/{$args.owner}/{$args.repo}/commits"}, selection: "commit {\n name_from_path: author.name\n by: {\n name: author.name\n email: author.email\n owner: $args.owner\n }\n}"}) +} diff --git a/apollo-router/src/plugins/connectors/testdata/selection.yaml b/apollo-router/src/plugins/connectors/testdata/selection.yaml new file mode 100644 index 0000000000..7e4a680679 --- /dev/null +++ b/apollo-router/src/plugins/connectors/testdata/selection.yaml @@ -0,0 +1,50 @@ +subgraphs: + connectors: + routing_url: none + schema: + sdl: | + extend schema + @link(url: "https://specs.apollo.dev/federation/v2.7") + @link( + url: "https://specs.apollo.dev/connect/v0.1" + import: ["@connect", "@source"] + ) + @source( + name: "json" + http: { + baseURL: "https://jsonplaceholder.typicode.com/" + } + ) + + type Commit { + commit: CommitDetail + } + + type CommitDetail { + name_from_path: String + by: CommitAuthor + } + + type CommitAuthor { + name: String + email: String + owner: String + } + + type Query { + commits(owner: String!, repo: String!): [Commit] + @connect( + source: "json" + http: { GET: "/repos/{$$args.owner}/{$$args.repo}/commits" } + selection: """ + commit { + name_from_path: author.name + by: { + name: author.name + email: author.email + owner: $$args.owner + } + } + """ + ) + } diff --git a/apollo-router/src/plugins/connectors/testdata/steelthread.graphql b/apollo-router/src/plugins/connectors/testdata/steelthread.graphql new file mode 100644 index 0000000000..3cb4212f0d --- /dev/null +++ b/apollo-router/src/plugins/connectors/testdata/steelthread.graphql @@ -0,0 +1,90 @@ +schema + @link(url: "https://specs.apollo.dev/link/v1.0") + @link(url: "https://specs.apollo.dev/join/v0.5", for: EXECUTION) + @link(url: "https://specs.apollo.dev/connect/v0.1", for: EXECUTION) + @join__directive(graphs: [CONNECTORS], name: "link", args: {url: "https://specs.apollo.dev/connect/v0.1", import: ["@connect", "@source"]}) + @join__directive(graphs: [CONNECTORS], name: "source", args: {name: "json", http: {baseURL: "https://jsonplaceholder.typicode.com/", headers: [{name: "x-new-name", from: "x-rename-source"}, {name: "x-forward", from: "x-forward"}, {name: "x-insert", value: "inserted"}, {name: "x-config-variable-source", value: "before {$config.source.val} after"}, {name: "x-context-value-source", value: "before {$context.val} after"}]}}) +{ + query: Query +} + +directive @join__directive(graphs: [join__Graph!], name: String!, args: join__DirectiveArguments) repeatable on SCHEMA | OBJECT | INTERFACE | FIELD_DEFINITION + +directive @join__enumValue(graph: join__Graph!) repeatable on ENUM_VALUE + +directive @join__field(graph: join__Graph, requires: join__FieldSet, provides: join__FieldSet, type: String, external: Boolean, override: String, usedOverridden: Boolean, overrideLabel: String, contextArguments: [join__ContextArgument!]) repeatable on FIELD_DEFINITION | INPUT_FIELD_DEFINITION + +directive @join__graph(name: String!, url: String!) on ENUM_VALUE + +directive @join__implements(graph: join__Graph!, interface: String!) repeatable on OBJECT | INTERFACE + +directive @join__type(graph: join__Graph!, key: join__FieldSet, extension: Boolean! = false, resolvable: Boolean! = true, isInterfaceObject: Boolean! = false) repeatable on OBJECT | INTERFACE | UNION | ENUM | INPUT_OBJECT | SCALAR + +directive @join__unionMember(graph: join__Graph!, member: String!) repeatable on UNION + +directive @link(url: String, as: String, for: link__Purpose, import: [link__Import]) repeatable on SCHEMA + +input join__ContextArgument { + name: String! + type: String! + context: String! + selection: join__FieldValue! +} + +scalar join__DirectiveArguments + +scalar join__FieldSet + +scalar join__FieldValue + +enum join__Graph { + CONNECTORS @join__graph(name: "connectors", url: "none") + GRAPHQL @join__graph(name: "graphql", url: "https://localhost:4001") +} + +scalar JSON + @join__type(graph: CONNECTORS) + +scalar link__Import + +enum link__Purpose { + """ + `SECURITY` features provide metadata necessary to securely resolve fields. + """ + SECURITY + + """ + `EXECUTION` features provide metadata necessary for operation execution. + """ + EXECUTION +} + +type Post + @join__type(graph: CONNECTORS) +{ + id: ID! + title: String + user: User +} + +type Query + @join__type(graph: CONNECTORS) + @join__type(graph: GRAPHQL) +{ + users: [User] @join__field(graph: CONNECTORS) @join__directive(graphs: [CONNECTORS], name: "connect", args: {source: "json", http: {GET: "/users", headers: [{name: "x-new-name", from: "x-rename-connect"}, {name: "x-insert-multi-value", value: "first,second"}, {name: "x-config-variable-connect", value: "before {$config.connect.val} after"}, {name: "x-context-value-connect", value: "before {$context.val} after"}]}, selection: "id name"}) + me: User @join__field(graph: CONNECTORS) @join__directive(graphs: [CONNECTORS], name: "connect", args: {source: "json", http: {GET: "/users/{$config.id}"}, selection: "id\nname\nusername"}) + user(id: ID!): User @join__field(graph: CONNECTORS) @join__directive(graphs: [CONNECTORS], name: "connect", args: {source: "json", http: {GET: "/users/{$args.id}", headers: [{name: "x-from-args", value: "before {$args.id} after"}]}, selection: "id\nname\nusername", entity: true}) + posts: [Post] @join__field(graph: CONNECTORS) @join__directive(graphs: [CONNECTORS], name: "connect", args: {source: "json", http: {GET: "/posts"}, selection: "id title user: { id: userId }"}) +} + +type User + @join__type(graph: CONNECTORS, key: "id") + @join__type(graph: GRAPHQL, key: "id") +{ + id: ID! + name: String @join__field(graph: CONNECTORS) + username: String @join__field(graph: CONNECTORS) + nickname: String @join__field(graph: CONNECTORS) @join__directive(graphs: [CONNECTORS], name: "connect", args: {source: "json", http: {GET: "/users/{$this.id}/nicknames", headers: [{name: "x-from-this", value: "before {$this.id} after"}]}, selection: "$.first"}) + c: String @join__field(graph: CONNECTORS, external: true) @join__field(graph: GRAPHQL) + d: String @join__field(graph: CONNECTORS, requires: "c") @join__directive(graphs: [CONNECTORS], name: "connect", args: {source: "json", http: {GET: "/users/{$this.c}"}, selection: "$.phone"}) +} diff --git a/apollo-router/src/plugins/connectors/testdata/steelthread.yaml b/apollo-router/src/plugins/connectors/testdata/steelthread.yaml new file mode 100644 index 0000000000..1da18ef5b3 --- /dev/null +++ b/apollo-router/src/plugins/connectors/testdata/steelthread.yaml @@ -0,0 +1,126 @@ +subgraphs: + connectors: + routing_url: none + schema: + sdl: | + extend schema + @link( + url: "https://specs.apollo.dev/federation/v2.10" + import: ["@key", "@external", "@requires", "@shareable"] + ) + @link( + url: "https://specs.apollo.dev/connect/v0.1" + import: ["@connect", "@source"] + ) + @source( + name: "json" + http: { + baseURL: "https://jsonplaceholder.typicode.com/" + headers: [ + { name: "x-new-name" from: "x-rename-source" } + { name: "x-forward" from: "x-forward" } + { name: "x-insert" value: "inserted" } + { name: "x-config-variable-source" value: "before {$$config.source.val} after" } + { name: "x-context-value-source", value: "before {$$context.val} after" } + ] + } + ) + + type Query { + users: [User] + @connect( + source: "json" + http: { + GET: "/users" + headers: [ + {name: "x-new-name", from: "x-rename-connect"} + {name: "x-insert-multi-value", value: "first,second"} + {name: "x-config-variable-connect" value: "before {$$config.connect.val} after"} + {name: "x-context-value-connect", value: "before {$$context.val} after"} + ] + } + selection: "id name" + ) + + me: User @connect( + source: "json" + http: { GET: "/users/{$$config.id}" } + selection: """ + id + name + username + """ + ) + + user(id: ID!): User + @connect( + source: "json" + http: { + GET: "/users/{$$args.id}" + headers: [ + {name: "x-from-args" value: "before {$$args.id} after"} + ] + } + selection: """ + id + name + username + """ + entity: true + ) + + posts: [Post] + @connect( + source: "json" + http: { GET: "/posts" } + selection: "id title user: { id: userId }" + ) + } + + type User @key(fields: "id") { + id: ID! + name: String + username: String + nickname: String + @connect( + source: "json" + http: { + GET: "/users/{$$this.id}/nicknames" + headers: [ + {name: "x-from-this" value: "before {$$this.id} after"} + ] + } + selection: "$.first" + ) + c: String @external + d: String + @requires(fields: "c") + @connect( + source: "json" + http: { GET: "/users/{$$this.c}" } + selection: "$.phone" + ) + } + + type Post { + id: ID! + title: String + user: User + } + + scalar JSON + + graphql: + routing_url: https://localhost:4001 + schema: + sdl: | + extend schema + @link( + url: "https://specs.apollo.dev/federation/v2.7" + import: ["@key"] + ) + + type User @key(fields: "id") { + id: ID! + c: String + } diff --git a/apollo-router/src/plugins/connectors/testdata/variables-subgraph.graphql b/apollo-router/src/plugins/connectors/testdata/variables-subgraph.graphql new file mode 100644 index 0000000000..f0745e80bb --- /dev/null +++ b/apollo-router/src/plugins/connectors/testdata/variables-subgraph.graphql @@ -0,0 +1,89 @@ +extend schema + @link(url: "https://specs.apollo.dev/federation/v2.10") + @link( + url: "https://specs.apollo.dev/connect/v0.1" + import: ["@connect", "@source"] + ) + @source( + name: "v1" + http: { + baseURL: "http://localhost:4001/" + headers: [ + { name: "x-source-context", value: "{$context.value}" } + { name: "x-source-config", value: "{$config.value}" } + ] + } + ) + +type Query { + f(arg: String!): T + @connect( + source: "v1" + http: { + POST: "/f?arg={$args.arg->slice(1)}&context={$context.value}&config={$config.value}" + headers: [ + { name: "x-connect-context", value: "{$context.value}" } + { name: "x-connect-config", value: "{$config.value}" } + { name: "x-connect-arg", value: "{$args.arg->last}" } + ] + body: """ + arg: $args.arg + context: $context.value + config: $config.value + """ + } + selection: """ + arg: $args.arg + context: $context.value + config: $config.value + status: $status + sibling: $("D") + extra: $->echo({ arg: $args.arg, context: $context.value, config: $config.value, status: $status }) + """ + ) +} + +type T { + arg: String + context: String + config: String + status: Int + sibling: String + extra: JSON + f(arg: String): U + @connect( + source: "v1" + http: { + POST: "/f?arg={$args.arg->slice(2)}&context={$context.value}&config={$config.value}&sibling={$this.sibling}" + headers: [ + { name: "x-connect-context", value: "{$context.value}" } + { name: "x-connect-config", value: "{$config.value}" } + { name: "x-connect-arg", value: "{$args.arg->first}" } + { name: "x-connect-sibling", value: "{$this.sibling}" } + ] + body: """ + arg: $args.arg + context: $context.value + config: $config.value + sibling: $this.sibling + """ + } + selection: """ + arg: $args.arg + context: $context.value + config: $config.value + sibling: $this.sibling + status: $status + """ + ) +} + +type U { + arg: String + context: String + config: String + status: Int + sibling: String +} + +scalar JSON diff --git a/apollo-router/src/plugins/connectors/testdata/variables.graphql b/apollo-router/src/plugins/connectors/testdata/variables.graphql new file mode 100644 index 0000000000..eba9f46791 --- /dev/null +++ b/apollo-router/src/plugins/connectors/testdata/variables.graphql @@ -0,0 +1,87 @@ +schema + @link(url: "https://specs.apollo.dev/link/v1.0") + @link(url: "https://specs.apollo.dev/join/v0.5", for: EXECUTION) + @link(url: "https://specs.apollo.dev/connect/v0.1", for: EXECUTION) + @join__directive(graphs: [CONNECTORS], name: "link", args: {url: "https://specs.apollo.dev/connect/v0.1", import: ["@connect", "@source"]}) + @join__directive(graphs: [CONNECTORS], name: "source", args: {name: "v1", http: {baseURL: "http://localhost:4001/", headers: [{name: "x-source-context", value: "{$context.value}"}, {name: "x-source-config", value: "{$config.value}"}]}}) +{ + query: Query +} + +directive @join__directive(graphs: [join__Graph!], name: String!, args: join__DirectiveArguments) repeatable on SCHEMA | OBJECT | INTERFACE | FIELD_DEFINITION + +directive @join__enumValue(graph: join__Graph!) repeatable on ENUM_VALUE + +directive @join__field(graph: join__Graph, requires: join__FieldSet, provides: join__FieldSet, type: String, external: Boolean, override: String, usedOverridden: Boolean, overrideLabel: String, contextArguments: [join__ContextArgument!]) repeatable on FIELD_DEFINITION | INPUT_FIELD_DEFINITION + +directive @join__graph(name: String!, url: String!) on ENUM_VALUE + +directive @join__implements(graph: join__Graph!, interface: String!) repeatable on OBJECT | INTERFACE + +directive @join__type(graph: join__Graph!, key: join__FieldSet, extension: Boolean! = false, resolvable: Boolean! = true, isInterfaceObject: Boolean! = false) repeatable on OBJECT | INTERFACE | UNION | ENUM | INPUT_OBJECT | SCALAR + +directive @join__unionMember(graph: join__Graph!, member: String!) repeatable on UNION + +directive @link(url: String, as: String, for: link__Purpose, import: [link__Import]) repeatable on SCHEMA + +input join__ContextArgument { + name: String! + type: String! + context: String! + selection: join__FieldValue! +} + +scalar join__DirectiveArguments + +scalar join__FieldSet + +scalar join__FieldValue + +enum join__Graph { + CONNECTORS @join__graph(name: "connectors", url: "none") +} + +scalar JSON + @join__type(graph: CONNECTORS) + +scalar link__Import + +enum link__Purpose { + """ + `SECURITY` features provide metadata necessary to securely resolve fields. + """ + SECURITY + + """ + `EXECUTION` features provide metadata necessary for operation execution. + """ + EXECUTION +} + +type Query + @join__type(graph: CONNECTORS) +{ + f(arg: String!): T @join__directive(graphs: [CONNECTORS], name: "connect", args: {source: "v1", http: {POST: "/f?arg={$args.arg->slice(1)}&context={$context.value}&config={$config.value}", headers: [{name: "x-connect-context", value: "{$context.value}"}, {name: "x-connect-config", value: "{$config.value}"}, {name: "x-connect-arg", value: "{$args.arg->last}"}], body: "arg: $args.arg\ncontext: $context.value\nconfig: $config.value"}, selection: "arg: $args.arg\ncontext: $context.value\nconfig: $config.value\nstatus: $status\nsibling: $(\"D\")\nextra: $->echo({ arg: $args.arg, context: $context.value, config: $config.value, status: $status })"}) +} + +type T + @join__type(graph: CONNECTORS) +{ + arg: String + context: String + config: String + status: Int + sibling: String + extra: JSON + f(arg: String): U @join__directive(graphs: [CONNECTORS], name: "connect", args: {source: "v1", http: {POST: "/f?arg={$args.arg->slice(2)}&context={$context.value}&config={$config.value}&sibling={$this.sibling}", headers: [{name: "x-connect-context", value: "{$context.value}"}, {name: "x-connect-config", value: "{$config.value}"}, {name: "x-connect-arg", value: "{$args.arg->first}"}, {name: "x-connect-sibling", value: "{$this.sibling}"}], body: "arg: $args.arg\ncontext: $context.value\nconfig: $config.value\nsibling: $this.sibling"}, selection: "arg: $args.arg\ncontext: $context.value\nconfig: $config.value\nsibling: $this.sibling\nstatus: $status"}) +} + +type U + @join__type(graph: CONNECTORS) +{ + arg: String + context: String + config: String + status: Int + sibling: String +} diff --git a/apollo-router/src/plugins/connectors/testdata/variables.yaml b/apollo-router/src/plugins/connectors/testdata/variables.yaml new file mode 100644 index 0000000000..c186dfe709 --- /dev/null +++ b/apollo-router/src/plugins/connectors/testdata/variables.yaml @@ -0,0 +1,6 @@ +federation_version: =2.10.0-dylan +subgraphs: + connectors: + routing_url: none + schema: + file: variables-subgraph.graphql \ No newline at end of file diff --git a/apollo-router/src/plugins/connectors/tests/mock_api.rs b/apollo-router/src/plugins/connectors/tests/mock_api.rs new file mode 100644 index 0000000000..167d130413 --- /dev/null +++ b/apollo-router/src/plugins/connectors/tests/mock_api.rs @@ -0,0 +1,150 @@ +struct PathTemplate(String); + +impl wiremock::Match for PathTemplate { + fn matches(&self, request: &wiremock::Request) -> bool { + let path = request.url.path(); + let path = path.split('/'); + let template = self.0.split('/'); + + for pair in path.zip_longest(template) { + match pair { + EitherOrBoth::Both(p, t) => { + if t.starts_with('{') && t.ends_with('}') { + continue; + } + + if p != t { + return false; + } + } + _ => return false, + } + } + true + } +} + +#[allow(dead_code)] +fn path_template(template: &str) -> PathTemplate { + PathTemplate(template.to_string()) +} + +use super::*; + +pub(crate) fn users() -> Mock { + Mock::given(method("GET")).and(path("/users")).respond_with( + ResponseTemplate::new(200).set_body_json(serde_json::json!([ + { + "id": 1, + "name": "Leanne Graham" + }, + { + "id": 2, + "name": "Ervin Howell", + } + ])), + ) +} + +pub(crate) fn user_2_nicknames() -> Mock { + Mock::given(method("GET")) + .and(path("/users/2/nicknames")) + .respond_with(ResponseTemplate::new(200).set_body_json(serde_json::json!(["cat"]))) +} + +pub(crate) fn users_error() -> Mock { + Mock::given(method("GET")).and(path("/users")).respond_with( + ResponseTemplate::new(404).set_body_json(serde_json::json!([ + { + "kind": "json", + "content": {}, + "selection": null + } + ])), + ) +} + +pub(crate) fn user_1() -> Mock { + Mock::given(method("GET")) + .and(path("/users/1")) + .respond_with(ResponseTemplate::new(200).set_body_json(serde_json::json!({ + "id": 1, + "name": "Leanne Graham", + "username": "Bret", + "phone": "1-770-736-8031 x56442", + "email": "Sincere@april.biz", + "website": "hildegard.org" + }))) +} + +pub(crate) fn user_2() -> Mock { + Mock::given(method("GET")) + .and(path("/users/2")) + .respond_with(ResponseTemplate::new(200).set_body_json(serde_json::json!({ + "id": 2, + "name": "Ervin Howell", + "username": "Antonette", + "phone": "1-770-736-8031 x56442", + "email": "Shanna@melissa.tv", + "website": "anastasia.net" + }))) +} + +pub(crate) fn create_user() -> Mock { + Mock::given(method("POST")).and(path("/user")).respond_with( + ResponseTemplate::new(200).set_body_json(serde_json::json!( + { + "id": 3, + "username": "New User" + } + )), + ) +} + +pub(crate) fn user_1_with_pet() -> Mock { + Mock::given(method("GET")) + .and(path("/users/1")) + .respond_with(ResponseTemplate::new(200).set_body_json(serde_json::json!({ + "id": 1, + "name": "Leanne Graham", + "pet": { + "name": "Spot" + } + }))) +} + +pub(crate) fn commits() -> Mock { + Mock::given(method("GET")) + .and(path("/repos/foo/bar/commits")) + .respond_with(ResponseTemplate::new(200).set_body_json(serde_json::json!( + [ + { + "sha": "abcdef", + "commit": { + "author": { + "name": "Foo Bar", + "email": "noone@nowhere", + "date": "2024-07-09T01:22:33Z" + }, + "message": "commit message", + }, + }] + ))) +} + +pub(crate) fn posts() -> Mock { + Mock::given(method("GET")).and(path("/posts")).respond_with( + ResponseTemplate::new(200).set_body_json(serde_json::json!([ + { + "id": 1, + "title": "Post 1", + "userId": 1 + }, + { + "id": 2, + "title": "Post 2", + "userId": 2 + } + ])), + ) +} diff --git a/apollo-router/src/plugins/connectors/tests/mod.rs b/apollo-router/src/plugins/connectors/tests/mod.rs new file mode 100644 index 0000000000..a6fa81ed8c --- /dev/null +++ b/apollo-router/src/plugins/connectors/tests/mod.rs @@ -0,0 +1,1814 @@ +use std::str::FromStr; +use std::sync::Arc; + +use apollo_compiler::response::JsonMap; +use http::header::CONTENT_TYPE; +use itertools::EitherOrBoth; +use itertools::Itertools; +use mime::APPLICATION_JSON; +use mockall::mock; +use mockall::predicate::eq; +use req_asserts::Matcher; +use serde_json::Value; +use serde_json_bytes::json; +use tower::ServiceExt; +use tracing_core::span::Attributes; +use tracing_core::span::Id; +use tracing_core::span::Record; +use tracing_core::Event; +use tracing_core::Metadata; +use wiremock::http::HeaderName; +use wiremock::http::HeaderValue; +use wiremock::matchers::body_json; +use wiremock::matchers::method; +use wiremock::matchers::path; +use wiremock::Mock; +use wiremock::MockServer; +use wiremock::ResponseTemplate; + +use crate::json_ext::ValueExt; +use crate::metrics::FutureMetricsExt; +use crate::plugins::telemetry::consts::CONNECT_SPAN_NAME; +use crate::plugins::telemetry::consts::OTEL_STATUS_CODE; +use crate::router_factory::RouterSuperServiceFactory; +use crate::router_factory::YamlRouterFactory; +use crate::services::new_service::ServiceFactory; +use crate::services::router::Request; +use crate::services::supergraph; +use crate::Configuration; + +mod mock_api; +mod quickstart; +#[allow(dead_code)] +mod req_asserts; + +const STEEL_THREAD_SCHEMA: &str = include_str!("../testdata/steelthread.graphql"); +const MUTATION_SCHEMA: &str = include_str!("../testdata/mutation.graphql"); +const NULLABILITY_SCHEMA: &str = include_str!("../testdata/nullability.graphql"); +const SELECTION_SCHEMA: &str = include_str!("../testdata/selection.graphql"); +const NO_SOURCES_SCHEMA: &str = include_str!("../testdata/connector-without-source.graphql"); +const QUICKSTART_SCHEMA: &str = include_str!("../testdata/quickstart.graphql"); +const INTERFACE_OBJECT_SCHEMA: &str = include_str!("../testdata/interface-object.graphql"); +const VARIABLES_SCHEMA: &str = include_str!("../testdata/variables.graphql"); + +#[tokio::test] +async fn value_from_config() { + let mock_server = MockServer::start().await; + mock_api::user_1().mount(&mock_server).await; + + let response = execute( + STEEL_THREAD_SCHEMA, + &mock_server.uri(), + "query { me { id name username} }", + Default::default(), + Some(json!({ + "preview_connectors": { + "subgraphs": { + "connectors": { + "$config": { + "id": 1, + } + } + } + } + })), + |_| {}, + ) + .await; + + insta::assert_json_snapshot!(response, @r###" + { + "data": { + "me": { + "id": 1, + "name": "Leanne Graham", + "username": "Bret" + } + } + } + "###); + + req_asserts::matches( + &mock_server.received_requests().await.unwrap(), + vec![Matcher::new().method("GET").path("/users/1")], + ); +} + +#[tokio::test] +async fn max_requests() { + let mock_server = MockServer::start().await; + mock_api::users().mount(&mock_server).await; + mock_api::user_1().mount(&mock_server).await; + mock_api::user_2().mount(&mock_server).await; + + let response = execute( + STEEL_THREAD_SCHEMA, + &mock_server.uri(), + "query { users { id name username } }", + Default::default(), + Some(json!({ + "preview_connectors": { + "max_requests_per_operation_per_source": 2 + } + })), + |_| {}, + ) + .await; + + insta::assert_json_snapshot!(response, @r###" + { + "data": { + "users": [ + { + "id": 1, + "name": "Leanne Graham", + "username": "Bret" + }, + { + "id": 2, + "name": "Ervin Howell", + "username": null + } + ] + }, + "errors": [ + { + "message": "Request limit exceeded", + "path": [ + "users", + 1 + ], + "extensions": { + "service": "connectors", + "connector": { + "coordinate": "connectors:Query.user@connect[0]" + }, + "code": "REQUEST_LIMIT_EXCEEDED" + } + } + ] + } + "###); + + req_asserts::matches( + &mock_server.received_requests().await.unwrap(), + vec![ + Matcher::new().method("GET").path("/users"), + Matcher::new().method("GET").path("/users/1"), + ], + ); +} + +#[tokio::test] +async fn source_max_requests() { + let mock_server = MockServer::start().await; + mock_api::users().mount(&mock_server).await; + mock_api::user_1().mount(&mock_server).await; + mock_api::user_2().mount(&mock_server).await; + + let response = execute( + STEEL_THREAD_SCHEMA, + &mock_server.uri(), + "query { users { id name username } }", + Default::default(), + Some(json!({ + "preview_connectors": { + "subgraphs": { + "connectors": { + "sources": { + "json": { + "max_requests_per_operation": 2, + } + } + } + } + } + })), + |_| {}, + ) + .await; + + insta::assert_json_snapshot!(response, @r###" + { + "data": { + "users": [ + { + "id": 1, + "name": "Leanne Graham", + "username": "Bret" + }, + { + "id": 2, + "name": "Ervin Howell", + "username": null + } + ] + }, + "errors": [ + { + "message": "Request limit exceeded", + "path": [ + "users", + 1 + ], + "extensions": { + "service": "connectors", + "connector": { + "coordinate": "connectors:Query.user@connect[0]" + }, + "code": "REQUEST_LIMIT_EXCEEDED" + } + } + ] + } + "###); + + req_asserts::matches( + &mock_server.received_requests().await.unwrap(), + vec![ + Matcher::new().method("GET").path("/users"), + Matcher::new().method("GET").path("/users/1"), + ], + ); +} + +#[tokio::test] +async fn test_root_field_plus_entity() { + let mock_server = MockServer::start().await; + mock_api::users().mount(&mock_server).await; + mock_api::user_1().mount(&mock_server).await; + mock_api::user_2().mount(&mock_server).await; + + let response = execute( + STEEL_THREAD_SCHEMA, + &mock_server.uri(), + "query { users { __typename id name username } }", + Default::default(), + None, + |_| {}, + ) + .await; + + insta::assert_json_snapshot!(response, @r###" + { + "data": { + "users": [ + { + "__typename": "User", + "id": 1, + "name": "Leanne Graham", + "username": "Bret" + }, + { + "__typename": "User", + "id": 2, + "name": "Ervin Howell", + "username": "Antonette" + } + ] + } + } + "###); + + req_asserts::matches( + &mock_server.received_requests().await.unwrap(), + vec![ + Matcher::new().method("GET").path("/users"), + Matcher::new().method("GET").path("/users/1"), + Matcher::new().method("GET").path("/users/2"), + ], + ); +} + +#[tokio::test] +async fn test_root_field_plus_entity_plus_requires() { + let mock_server = MockServer::start().await; + mock_api::users().mount(&mock_server).await; + mock_api::user_1().mount(&mock_server).await; + mock_api::user_2().mount(&mock_server).await; + Mock::given(method("POST")) + .and(path("/graphql")) + .and(body_json(json!({ + "query": "query($representations: [_Any!]!) { _entities(representations: $representations) { ... on User { c } } }", + "variables": {"representations":[{"__typename":"User","id":1},{"__typename":"User","id":2}]} + }))) + .respond_with( + ResponseTemplate::new(200) + .insert_header(wiremock::http::HeaderName::from_string(CONTENT_TYPE.to_string()).unwrap(), APPLICATION_JSON.essence_str()) + .set_body_json(json!({ + "data": { + "_entities": [{ + "__typename": "User", + "c": "1", + }, { + "__typename": "User", + "c": "2", + }] + } + })), + ).mount(&mock_server).await; + + let response = execute( + STEEL_THREAD_SCHEMA, + &mock_server.uri(), + "query { users { __typename id name username d } }", + Default::default(), + None, + |_| {}, + ) + .await; + + insta::assert_json_snapshot!(response, @r###" + { + "data": { + "users": [ + { + "__typename": "User", + "id": 1, + "name": "Leanne Graham", + "username": "Bret", + "d": "1-770-736-8031 x56442" + }, + { + "__typename": "User", + "id": 2, + "name": "Ervin Howell", + "username": "Antonette", + "d": "1-770-736-8031 x56442" + } + ] + } + } + "###); + + req_asserts::matches( + &mock_server.received_requests().await.unwrap(), + vec![ + Matcher::new().method("GET").path("/users"), + Matcher::new().method("GET").path("/users/1"), + Matcher::new().method("GET").path("/users/2"), + Matcher::new().method("POST").path("/graphql"), + Matcher::new().method("GET").path("/users/1"), + Matcher::new().method("GET").path("/users/2"), + ], + ); +} + +/// Tests that a connector can vend an entity reference like `user: { id: userId }` +#[tokio::test] +async fn test_entity_references() { + let mock_server = MockServer::start().await; + mock_api::posts().mount(&mock_server).await; + mock_api::user_1().mount(&mock_server).await; + mock_api::user_2().mount(&mock_server).await; + + let response = execute( + STEEL_THREAD_SCHEMA, + &mock_server.uri(), + "query { posts { title user { name } } }", + Default::default(), + None, + |_| {}, + ) + .await; + + insta::assert_json_snapshot!(response, @r###" + { + "data": { + "posts": [ + { + "title": "Post 1", + "user": { + "name": "Leanne Graham" + } + }, + { + "title": "Post 2", + "user": { + "name": "Ervin Howell" + } + } + ] + } + } + "###); + + req_asserts::matches( + &mock_server.received_requests().await.unwrap(), + vec![ + Matcher::new().method("GET").path("/posts"), + Matcher::new().method("GET").path("/users/1"), + Matcher::new().method("GET").path("/users/2"), + ], + ); +} + +#[tokio::test] +async fn basic_errors() { + let mock_server = MockServer::start().await; + Mock::given(method("GET")) + .and(path("/users")) + .respond_with(ResponseTemplate::new(404).set_body_json(serde_json::json!({ + "error": "not found" + }))) + .mount(&mock_server) + .await; + Mock::given(method("GET")) + .and(path("/posts")) + .respond_with( + ResponseTemplate::new(200) + .set_body_json(serde_json::json!([{ "id": "1", "userId": "1" }])), + ) + .mount(&mock_server) + .await; + Mock::given(method("GET")) + .and(path("/users/1")) + .respond_with(ResponseTemplate::new(400).set_body_json(serde_json::json!({"error": "bad"}))) + .mount(&mock_server) + .await; + Mock::given(method("GET")) + .and(path("/users/1/nicknames")) + .respond_with(ResponseTemplate::new(400).set_body_json(serde_json::json!({"error": "bad"}))) + .mount(&mock_server) + .await; + + let response = execute( + STEEL_THREAD_SCHEMA, + &mock_server.uri(), + "{ users { id } posts { id user { name nickname } } }", + Default::default(), + None, + |_| {}, + ) + .await; + + insta::assert_json_snapshot!(response, @r###" + { + "data": { + "users": null, + "posts": [ + { + "id": "1", + "user": { + "name": null, + "nickname": null + } + } + ] + }, + "errors": [ + { + "message": "Request failed", + "path": [ + "users" + ], + "extensions": { + "service": "connectors", + "http": { + "status": 404 + }, + "connector": { + "coordinate": "connectors:Query.users@connect[0]" + }, + "code": "CONNECTOR_FETCH" + } + }, + { + "message": "Request failed", + "path": [ + "posts", + 0, + "user" + ], + "extensions": { + "service": "connectors", + "http": { + "status": 400 + }, + "connector": { + "coordinate": "connectors:Query.user@connect[0]" + }, + "code": "CONNECTOR_FETCH" + } + }, + { + "message": "Request failed", + "path": [ + "posts", + 0, + "user", + "nickname" + ], + "extensions": { + "service": "connectors", + "http": { + "status": 400 + }, + "connector": { + "coordinate": "connectors:User.nickname@connect[0]" + }, + "code": "CONNECTOR_FETCH" + } + } + ] + } + "###); +} + +#[tokio::test] +async fn basic_connection_errors() { + let response = execute( + STEEL_THREAD_SCHEMA, + "http://localhost:9999", + "{ users { id } }", + Default::default(), + None, + |_| {}, + ) + .await; + + assert_eq!(response.get("data").unwrap(), &Value::Null); + assert_eq!(response.get("errors").unwrap().as_array().unwrap().len(), 1); + let err = response + .get("errors") + .unwrap() + .as_array() + .unwrap() + .first() + .unwrap(); + // Different OSes have different codes at the end of the message so we have to assert on the parts separately + let msg = err.get("message").unwrap().as_str().unwrap(); + assert!( + msg.starts_with( + "HTTP fetch failed from 'connectors.json': tcp connect error:" // *nix: Connection refused, Windows: No connection could be made + ), + "got message: {}", + msg + ); + assert_eq!(err.get("path").unwrap(), &serde_json::json!(["users"])); + assert_eq!( + err.get("extensions").unwrap(), + &serde_json::json!({ + "service": "connectors", + "connector": { + "coordinate": "connectors:Query.users@connect[0]" + }, + "code": "HTTP_CLIENT_ERROR" + }) + ); +} + +#[tokio::test] +async fn test_headers() { + let mock_server = MockServer::start().await; + mock_api::users().mount(&mock_server).await; + + execute( + STEEL_THREAD_SCHEMA, + &mock_server.uri(), + "query { users { id } }", + Default::default(), + Some(json!({ + "preview_connectors": { + "subgraphs": { + "connectors": { + "$config": { + "source": { + "val": "val-from-config-source" + }, + "connect": { + "val": "val-from-config-connect" + }, + } + } + } + } + })), + |request| { + let headers = request.router_request.headers_mut(); + headers.insert("x-rename-source", "renamed-by-source".parse().unwrap()); + headers.insert("x-rename-connect", "renamed-by-connect".parse().unwrap()); + headers.insert("x-forward", "forwarded".parse().unwrap()); + headers.append("x-forward", "forwarded-again".parse().unwrap()); + request + .context + .insert("val", String::from("val-from-request-context")) + .unwrap(); + }, + ) + .await; + + req_asserts::matches( + &mock_server.received_requests().await.unwrap(), + vec![Matcher::new() + .method("GET") + .header( + HeaderName::from_str("x-forward").unwrap(), + HeaderValue::from_str("forwarded").unwrap(), + ) + .header( + HeaderName::from_str("x-forward").unwrap(), + HeaderValue::from_str("forwarded-again").unwrap(), + ) + .header( + HeaderName::from_str("x-new-name").unwrap(), + HeaderValue::from_str("renamed-by-connect").unwrap(), + ) + .header( + HeaderName::from_str("x-insert").unwrap(), + HeaderValue::from_str("inserted").unwrap(), + ) + .header( + HeaderName::from_str("x-insert-multi-value").unwrap(), + HeaderValue::from_str("first").unwrap(), + ) + .header( + HeaderName::from_str("x-insert-multi-value").unwrap(), + HeaderValue::from_str("second").unwrap(), + ) + .header( + HeaderName::from_str("x-config-variable-source").unwrap(), + HeaderValue::from_str("before val-from-config-source after").unwrap(), + ) + .header( + HeaderName::from_str("x-config-variable-connect").unwrap(), + HeaderValue::from_str("before val-from-config-connect after").unwrap(), + ) + .header( + HeaderName::from_str("x-context-value-source").unwrap(), + HeaderValue::from_str("before val-from-request-context after").unwrap(), + ) + .header( + HeaderName::from_str("x-context-value-connect").unwrap(), + HeaderValue::from_str("before val-from-request-context after").unwrap(), + ) + .path("/users")], + ); +} + +#[tokio::test] +async fn test_args_and_this_in_header() { + let mock_server = MockServer::start().await; + mock_api::user_2().mount(&mock_server).await; + mock_api::user_2_nicknames().mount(&mock_server).await; + + execute( + STEEL_THREAD_SCHEMA, + &mock_server.uri(), + "query { user(id: 2){ id nickname } }", + Default::default(), + None, + |_| {}, + ) + .await; + + req_asserts::matches( + &mock_server.received_requests().await.unwrap(), + vec![ + Matcher::new() + .method("GET") + .header( + HeaderName::from_str("x-from-args").unwrap(), + HeaderValue::from_str("before 2 after").unwrap(), + ) + .path("/users/2"), + Matcher::new() + .method("GET") + .header( + HeaderName::from_str("x-from-this").unwrap(), + HeaderValue::from_str("before 2 after").unwrap(), + ) + .path("/users/2/nicknames"), + ], + ); +} + +mock! { + Subscriber {} + impl tracing_core::Subscriber for Subscriber { + fn enabled<'a>(&self, metadata: &Metadata<'a>) -> bool; + fn new_span<'a>(&self, span: &Attributes<'a>) -> Id; + fn record<'a>(&self, span: &Id, values: &Record<'a>); + fn record_follows_from(&self, span: &Id, follows: &Id); + fn event_enabled<'a>(&self, event: &Event<'a>) -> bool; + fn event<'a>(&self, event: &Event<'a>); + fn enter(&self, span: &Id); + fn exit(&self, span: &Id); + } +} + +#[tokio::test] +async fn test_tracing_connect_span() { + let mut mock_subscriber = MockSubscriber::new(); + mock_subscriber.expect_event_enabled().returning(|_| false); + mock_subscriber.expect_record().returning(|_, _| {}); + mock_subscriber + .expect_enabled() + .returning(|metadata| metadata.name() == CONNECT_SPAN_NAME); + mock_subscriber.expect_new_span().returning(|attributes| { + if attributes.metadata().name() == CONNECT_SPAN_NAME { + assert!(attributes.fields().field("apollo.connector.type").is_some()); + assert!(attributes + .fields() + .field("apollo.connector.detail") + .is_some()); + assert!(attributes + .fields() + .field("apollo.connector.field.name") + .is_some()); + assert!(attributes + .fields() + .field("apollo.connector.selection") + .is_some()); + assert!(attributes + .fields() + .field("apollo.connector.source.name") + .is_some()); + assert!(attributes + .fields() + .field("apollo.connector.source.detail") + .is_some()); + assert!(attributes.fields().field(OTEL_STATUS_CODE).is_some()); + Id::from_u64(1) + } else { + panic!("unexpected span: {}", attributes.metadata().name()); + } + }); + mock_subscriber + .expect_enter() + .with(eq(Id::from_u64(1))) + .returning(|_| {}); + mock_subscriber + .expect_exit() + .with(eq(Id::from_u64(1))) + .returning(|_| {}); + let _guard = tracing::subscriber::set_default(mock_subscriber); + + let mock_server = MockServer::start().await; + mock_api::users().mount(&mock_server).await; + + execute( + STEEL_THREAD_SCHEMA, + &mock_server.uri(), + "query { users { id } }", + Default::default(), + None, + |_| {}, + ) + .await; +} + +#[tokio::test] +async fn test_operation_counter() { + async { + let mock_server = MockServer::start().await; + mock_api::users().mount(&mock_server).await; + execute( + STEEL_THREAD_SCHEMA, + &mock_server.uri(), + "query { users { id name username } }", + Default::default(), + None, + |_| {}, + ) + .await; + req_asserts::matches( + &mock_server.received_requests().await.unwrap(), + vec![ + Matcher::new().method("GET").path("/users"), + Matcher::new().method("GET").path("/users/1"), + Matcher::new().method("GET").path("/users/2"), + ], + ); + assert_counter!( + "apollo.router.operations.connectors", + 3, + connector.type = "http", + subgraph.name = "connectors" + ); + } + .with_metrics() + .await; +} + +#[tokio::test] +async fn test_mutation() { + let mock_server = MockServer::start().await; + mock_api::create_user().mount(&mock_server).await; + + let response = execute( + MUTATION_SCHEMA, + &mock_server.uri(), + "mutation CreateUser($name: String!) { + createUser(name: $name) { + success + user { + id + name + } + } + }", + serde_json_bytes::json!({ "name": "New User" }) + .as_object() + .unwrap() + .clone(), + None, + |_| {}, + ) + .await; + + insta::assert_json_snapshot!(response, @r###" + { + "data": { + "createUser": { + "success": true, + "user": { + "id": 3, + "name": "New User" + } + } + } + } + "###); + + req_asserts::matches( + &mock_server.received_requests().await.unwrap(), + vec![Matcher::new() + .method("POST") + .body(serde_json::json!({ "username": "New User" })) + .path("/user")], + ); +} + +#[tokio::test] +async fn test_selection_set() { + let mock_server = MockServer::start().await; + mock_api::commits().mount(&mock_server).await; + + let response = execute( + SELECTION_SCHEMA, + &mock_server.uri(), + "query Commits($owner: String!, $repo: String!, $skipInlineFragment: Boolean!, + $skipNamedFragment: Boolean!, $skipField: Boolean!) { + commits(owner: $owner, repo: $repo) { + __typename + commit { + __typename + from_path_alias: name_from_path + ...CommitDetails @skip(if: $skipNamedFragment) + } + } + } + + fragment CommitDetails on CommitDetail { + by { + __typename + user: name @skip(if: $skipField) + name + ...on CommitAuthor @skip(if: $skipInlineFragment) { + address: email + owner + } + owner_not_fragment: owner + } + }", + serde_json_bytes::json!({ + "owner": "foo", + "repo": "bar", + "skipField": false, + "skipInlineFragment": false, + "skipNamedFragment": false + }) + .as_object() + .unwrap() + .clone(), + None, + |_| {}, + ) + .await; + + insta::assert_json_snapshot!(response, @r###" + { + "data": { + "commits": [ + { + "__typename": "Commit", + "commit": { + "__typename": "CommitDetail", + "from_path_alias": "Foo Bar", + "by": { + "__typename": "CommitAuthor", + "user": "Foo Bar", + "name": "Foo Bar", + "address": "noone@nowhere", + "owner": "foo", + "owner_not_fragment": "foo" + } + } + } + ] + } + } + "###); + + req_asserts::matches( + &mock_server.received_requests().await.unwrap(), + vec![Matcher::new().method("GET").path("/repos/foo/bar/commits")], + ); +} + +#[tokio::test] +async fn test_nullability() { + let mock_server = MockServer::start().await; + mock_api::user_1_with_pet().mount(&mock_server).await; + + let response = execute( + NULLABILITY_SCHEMA, + &mock_server.uri(), + "query { user(id: 1) { id name occupation address { zip } pet { species } } }", + Default::default(), + None, + |_| {}, + ) + .await; + + insta::assert_json_snapshot!(response, @r###" + { + "data": { + "user": { + "id": 1, + "name": "Leanne Graham", + "occupation": null, + "address": null, + "pet": { + "species": null + } + } + } + } + "###); + + req_asserts::matches( + &mock_server.received_requests().await.unwrap(), + vec![Matcher::new().method("GET").path("/users/1")], + ); +} + +#[tokio::test] +async fn test_default_argument_values() { + let mock_server = MockServer::start().await; + Mock::given(method("POST")) + .and(path("/default-args")) + .respond_with(ResponseTemplate::new(200).set_body_json(serde_json::json!("hello"))) + .mount(&mock_server) + .await; + + let response = execute( + NULLABILITY_SCHEMA, + &mock_server.uri(), + "query { defaultArgs }", + Default::default(), + None, + |_| {}, + ) + .await; + + insta::assert_json_snapshot!(response, @r###" + { + "data": { + "defaultArgs": "hello" + } + } + "###); + + req_asserts::matches( + &mock_server.received_requests().await.unwrap(), + vec![Matcher::new() + .method("POST") + .path("/default-args") + .body(serde_json::json!({ + "str": "default", + "int": 42, + "float": 1.23, + "bool": true, + "arr": ["default"], + }))], + ); +} + +#[tokio::test] +async fn test_default_argument_overrides() { + let mock_server = MockServer::start().await; + Mock::given(method("POST")) + .and(path("/default-args")) + .respond_with(ResponseTemplate::new(200).set_body_json(serde_json::json!("hello"))) + .mount(&mock_server) + .await; + + let response = execute( + NULLABILITY_SCHEMA, + &mock_server.uri(), + "query { defaultArgs(str: \"hi\" int: 108 float: 9.87 bool: false arr: [\"hi again\"]) }", + Default::default(), + None, + |_| {}, + ) + .await; + + insta::assert_json_snapshot!(response, @r###" + { + "data": { + "defaultArgs": "hello" + } + } + "###); + + req_asserts::matches( + &mock_server.received_requests().await.unwrap(), + vec![Matcher::new() + .method("POST") + .path("/default-args") + .body(serde_json::json!({ + "str": "hi", + "int": 108, + "float": 9.87, + "bool": false, + "arr": ["hi again"], + }))], + ); +} + +#[tokio::test] +async fn test_form_encoding() { + let mock_server = MockServer::start().await; + Mock::given(method("POST")) + .and(path("/posts")) + .respond_with(ResponseTemplate::new(200).set_body_json(serde_json::json!({ "id": 1 }))) + .mount(&mock_server) + .await; + let uri = mock_server.uri(); + + let response = execute( + include_str!("../testdata/form-encoding.graphql"), + &uri, + "mutation { + post( + input: { + int: 1 + str: \"s\" + bool: true + id: \"id\" + + intArr: [1, 2] + strArr: [\"a\", \"b\"] + boolArr: [true, false] + idArr: [\"id1\", \"id2\"] + + obj: { + a: 1 + b: \"b\" + c: true + nested: { + d: 1 + e: \"e\" + f: true + } + } + objArr: [ + { + a: 1 + b: \"b\" + c: true + nested: { + d: 1 + e: \"e\" + f: true + } + }, + { + a: 2 + b: \"bb\" + c: false + nested: { + d: 1 + e: \"e\" + f: true + } + } + ] + } + ) + { id } + }", + Default::default(), + None, + |_| {}, + ) + .await; + + insta::assert_json_snapshot!(response, @r###" + { + "data": { + "post": { + "id": 1 + } + } + } + "###); + + req_asserts::matches( + &mock_server.received_requests().await.unwrap(), + vec![Matcher::new().method("POST").path("/posts")], + ); + + let reqs = mock_server.received_requests().await.unwrap(); + let body = String::from_utf8_lossy(&reqs[0].body).to_string(); + assert_eq!(body, "int=1&str=s&bool=true&id=id&intArr%5B0%5D=1&intArr%5B1%5D=2&strArr%5B0%5D=a&strArr%5B1%5D=b&boolArr%5B0%5D=true&boolArr%5B1%5D=false&idArr%5B0%5D=id1&idArr%5B1%5D=id2&obj%5Ba%5D=1&obj%5Bb%5D=b&obj%5Bc%5D=true&obj%5Bnested%5D%5Bd%5D=1&obj%5Bnested%5D%5Be%5D=e&obj%5Bnested%5D%5Bf%5D=true&objArr%5B0%5D%5Ba%5D=1&objArr%5B0%5D%5Bb%5D=b&objArr%5B0%5D%5Bc%5D=true&objArr%5B0%5D%5Bnested%5D%5Bd%5D=1&objArr%5B0%5D%5Bnested%5D%5Be%5D=e&objArr%5B0%5D%5Bnested%5D%5Bf%5D=true&objArr%5B1%5D%5Ba%5D=2&objArr%5B1%5D%5Bb%5D=bb&objArr%5B1%5D%5Bc%5D=false&objArr%5B1%5D%5Bnested%5D%5Bd%5D=1&objArr%5B1%5D%5Bnested%5D%5Be%5D=e&objArr%5B1%5D%5Bnested%5D%5Bf%5D=true"); +} + +#[tokio::test] +async fn test_no_source() { + let mock_server = MockServer::start().await; + mock_api::user_1().mount(&mock_server).await; + let uri = mock_server.uri(); + + let response = execute( + &NO_SOURCES_SCHEMA.replace("http://localhost", &uri), + &uri, + "query { user(id: 1) { id name }}", + Default::default(), + None, + |_| {}, + ) + .await; + + insta::assert_json_snapshot!(response, @r###" + { + "data": { + "user": { + "id": 1, + "name": "Leanne Graham" + } + } + } + "###); + + req_asserts::matches( + &mock_server.received_requests().await.unwrap(), + vec![Matcher::new().method("GET").path("/users/1")], + ); +} + +#[tokio::test] +async fn error_not_redacted() { + let mock_server = MockServer::start().await; + mock_api::users_error().mount(&mock_server).await; + + let response = execute( + STEEL_THREAD_SCHEMA, + &mock_server.uri(), + "query { users { id name username } }", + Default::default(), + Some(json!({ + "include_subgraph_errors": { + "subgraphs": { + "connectors": true + } + } + })), + |_| {}, + ) + .await; + + insta::assert_json_snapshot!(response, @r###" + { + "data": { + "users": null + }, + "errors": [ + { + "message": "Request failed", + "path": [ + "users" + ], + "extensions": { + "service": "connectors", + "http": { + "status": 404 + }, + "connector": { + "coordinate": "connectors:Query.users@connect[0]" + }, + "code": "CONNECTOR_FETCH" + } + } + ] + } + "###); + + req_asserts::matches( + &mock_server.received_requests().await.unwrap(), + vec![Matcher::new().method("GET").path("/users")], + ); +} + +#[tokio::test] +async fn error_redacted() { + let mock_server = MockServer::start().await; + mock_api::users_error().mount(&mock_server).await; + + let response = execute( + STEEL_THREAD_SCHEMA, + &mock_server.uri(), + "query { users { id name username } }", + Default::default(), + Some(json!({ + "include_subgraph_errors": { + "subgraphs": { + "connectors": false + } + } + })), + |_| {}, + ) + .await; + + insta::assert_json_snapshot!(response, @r###" + { + "data": { + "users": null + }, + "errors": [ + { + "message": "Subgraph errors redacted", + "path": [ + "users" + ] + } + ] + } + "###); + + req_asserts::matches( + &mock_server.received_requests().await.unwrap(), + vec![Matcher::new().method("GET").path("/users")], + ); +} + +#[tokio::test] +async fn test_interface_object() { + let mock_server = MockServer::start().await; + Mock::given(method("GET")) + .and(path("/itfs")) + .respond_with( + ResponseTemplate::new(200) + .set_body_json(serde_json::json!([{ "id": 1, "c": 10 }, { "id": 2, "c": 11 }])), + ) + .mount(&mock_server) + .await; + Mock::given(method("GET")) + .and(path("/itfs/1")) + .respond_with( + ResponseTemplate::new(200) + .set_body_json(serde_json::json!({ "id": 1, "c": 10, "d": 20 })), + ) + .mount(&mock_server) + .await; + Mock::given(method("GET")) + .and(path("/itfs/2")) + .respond_with( + ResponseTemplate::new(200) + .set_body_json(serde_json::json!({ "id": 1, "c": 11, "d": 21 })), + ) + .mount(&mock_server) + .await; + Mock::given(method("GET")) + .and(path("/itfs/1/e")) + .respond_with(ResponseTemplate::new(200).set_body_json(serde_json::json!("e1"))) + .mount(&mock_server) + .await; + Mock::given(method("GET")) + .and(path("/itfs/2/e")) + .respond_with(ResponseTemplate::new(200).set_body_json(serde_json::json!("e2"))) + .mount(&mock_server) + .await; + Mock::given(method("POST")) + .and(path("/graphql")) + .respond_with(ResponseTemplate::new(200).set_body_json(serde_json::json!({ + "data": { + "_entities": [{ + "__typename": "T1", + "a": "a" + }, { + "__typename": "T2", + "b": "b" + }] + } + }))) + .mount(&mock_server) + .await; + + let response = execute( + INTERFACE_OBJECT_SCHEMA, + &mock_server.uri(), + "query { itfs { __typename id c d e ... on T1 { a } ... on T2 { b } } }", + Default::default(), + None, + |_| {}, + ) + .await; + + insta::assert_json_snapshot!(response, @r###" + { + "data": { + "itfs": [ + { + "__typename": "T1", + "id": 1, + "c": 10, + "d": 20, + "e": "e1", + "a": "a" + }, + { + "__typename": "T2", + "id": 2, + "c": 11, + "d": 21, + "e": "e2", + "b": "b" + } + ] + } + } + "###); + + req_asserts::matches( + &mock_server.received_requests().await.unwrap(), + vec![ + Matcher::new().method("GET").path("/itfs"), + Matcher::new().method("GET").path("/itfs/1/e"), + Matcher::new().method("GET").path("/itfs/2/e"), + Matcher::new().method("GET").path("/itfs/1"), + Matcher::new().method("GET").path("/itfs/2"), + Matcher::new() + .method("POST") + .path("/graphql") + .body(serde_json::json!({ + "query": r#"query($representations: [_Any!]!) { _entities(representations: $representations) { ..._generated_onItf3_0 } } fragment _generated_onItf3_0 on Itf { __typename ... on T1 { a } ... on T2 { b } }"#, + "variables": { + "representations": [ + { "__typename": "Itf", "id": 1 }, + { "__typename": "Itf", "id": 2 } + ] + } + })), + ], + ); +} + +#[tokio::test] +async fn test_sources_in_context() { + let mock_server = MockServer::start().await; + Mock::given(method("POST")) + .and(path("/coprocessor")) + .respond_with(ResponseTemplate::new(200).set_body_json(serde_json::json!({ + "control": "continue", + "version": 1, + "stage": "ExecutionRequest" + }))) + .mount(&mock_server) + .await; + Mock::given(method("GET")) + .and(path("/posts")) + .respond_with(ResponseTemplate::new(200).set_body_json(serde_json::json!([ + { "userId": 1, "id": 1, "title": "title", "body": "body" }, + { "userId": 1, "id": 2, "title": "title", "body": "body" }] + ))) + .mount(&mock_server) + .await; + Mock::given(method("GET")) + .and(path("/users/1")) + .respond_with(ResponseTemplate::new(200).set_body_json(serde_json::json!({ + "id": 1, + "name": "Leanne Graham", + "username": "Bret" + }))) + .mount(&mock_server) + .await; + let uri = mock_server.uri(); + + let _ = execute( + &QUICKSTART_SCHEMA.replace("https://jsonplaceholder.typicode.com", &mock_server.uri()), + &uri, + "query Posts { posts { id body title author { name username } } }", + Default::default(), + Some(json!({ + "preview_connectors": { + "expose_sources_in_context": true + }, + "coprocessor": { + "url": format!("{}/coprocessor", mock_server.uri()), + "execution": { + "request": { + "context": true + } + } + } + })), + |_| {}, + ) + .await; + + let requests = &mock_server.received_requests().await.unwrap(); + let coprocessor_request = requests.first().unwrap(); + let body = coprocessor_request + .body_json::() + .unwrap(); + pretty_assertions::assert_eq!( + body.get("context") + .unwrap() + .as_object() + .unwrap() + .get("entries") + .unwrap() + .as_object() + .unwrap() + .get("apollo_connectors::sources_in_query_plan") + .unwrap(), + &serde_json_bytes::json!([ + { "subgraph_name": "connectors", "source_name": "jsonPlaceholder" } + ]) + ); +} + +#[tokio::test] +async fn test_variables() { + let mock_server = MockServer::start().await; + Mock::given(method("POST")) + .and(path("/coprocessor")) + .respond_with(ResponseTemplate::new(200).set_body_json(serde_json::json!({ + "control": "continue", + "version": 1, + "stage": "SupergraphRequest", + "context": { + "entries": { + "value": "B" + } + } + }))) + .mount(&mock_server) + .await; + Mock::given(method("POST")) + .and(path("/f")) + .respond_with(ResponseTemplate::new(200).set_body_json(serde_json::json!({}))) + .mount(&mock_server) + .await; + let uri = mock_server.uri(); + + let response = execute( + &VARIABLES_SCHEMA.replace("http://localhost:4001/", &mock_server.uri()), + &uri, + "{ f(arg: \"arg\") { arg context config sibling status extra f(arg: \"arg\") { arg context config sibling status } } }", + Default::default(), + Some(json!({ + "preview_connectors": { + "subgraphs": { + "connectors": { + "$config": { + "value": "C" + } + } + } + }, + "coprocessor": { + "url": format!("{}/coprocessor", mock_server.uri()), + "supergraph": { + "request": { + "context": true + } + } + } + })), + |_| {}, + ) + .await; + + insta::assert_json_snapshot!(response, @r###" + { + "data": { + "f": { + "arg": "arg", + "context": "B", + "config": "C", + "sibling": "D", + "status": 200, + "extra": { + "arg": "arg", + "context": "B", + "config": "C", + "status": 200 + }, + "f": { + "arg": "arg", + "context": "B", + "config": "C", + "sibling": "D", + "status": 200 + } + } + } + } + "###); + + req_asserts::matches( + &mock_server.received_requests().await.unwrap(), + vec![ + Matcher::new().method("POST").path("/coprocessor"), + Matcher::new() + .method("POST") + .path("/f") + .query("arg=rg&context=B&config=C") + .header("x-source-context".into(), "B".try_into().unwrap()) + .header("x-source-config".into(), "C".try_into().unwrap()) + .header("x-connect-arg".into(), "g".try_into().unwrap()) + .header("x-connect-context".into(), "B".try_into().unwrap()) + .header("x-connect-config".into(), "C".try_into().unwrap()) + .body(serde_json::json!({ "arg": "arg", "context": "B", "config": "C" })) + , + Matcher::new() + .method("POST") + .path("/f") + .query("arg=g&context=B&config=C&sibling=D") + .header("x-source-context".into(), "B".try_into().unwrap()) + .header("x-source-config".into(), "C".try_into().unwrap()) + .header("x-connect-arg".into(), "a".try_into().unwrap()) + .header("x-connect-context".into(), "B".try_into().unwrap()) + .header("x-connect-config".into(), "C".try_into().unwrap()) + .header("x-connect-sibling".into(), "D".try_into().unwrap()) + .body(serde_json::json!({ "arg": "arg", "context": "B", "config": "C", "sibling": "D" })) + , + ], + ); +} + +mod quickstart_tests { + use http::Uri; + + use super::*; + use crate::test_harness::http_snapshot::SnapshotServer; + + const SNAPSHOT_DIR: &str = "./src/plugins/connectors/testdata/quickstart_api_snapshots/"; + + macro_rules! map { + ($($tt:tt)*) => { + serde_json_bytes::json!($($tt)*).as_object().unwrap().clone() + }; + } + + async fn execute( + query: &str, + variables: JsonMap, + snapshot_file_name: &str, + ) -> serde_json::Value { + let snapshot_path = [SNAPSHOT_DIR, snapshot_file_name, ".json"].concat(); + + let server = SnapshotServer::spawn( + snapshot_path, + Uri::from_str("https://jsonPlaceholder.typicode.com/").unwrap(), + true, + false, + Some(vec![CONTENT_TYPE.to_string()]), + None, + ) + .await; + + super::execute( + &QUICKSTART_SCHEMA.replace("https://jsonplaceholder.typicode.com", &server.uri()), + &server.uri(), + query, + variables, + None, + |_| {}, + ) + .await + } + #[tokio::test] + async fn query_1() { + let query = r#" + query Posts { + posts { + id + body + title + } + } + "#; + + let response = execute(query, Default::default(), "query_1").await; + + insta::assert_json_snapshot!(response, @r###" + { + "data": { + "posts": [ + { + "id": 1, + "body": "quia et suscipit\nsuscipit recusandae consequuntur expedita et cum\nreprehenderit molestiae ut ut quas totam\nnostrum rerum est autem sunt rem eveniet architecto", + "title": "sunt aut facere repellat provident occaecati excepturi optio reprehenderit" + }, + { + "id": 2, + "body": "est rerum tempore vitae\nsequi sint nihil reprehenderit dolor beatae ea dolores neque\nfugiat blanditiis voluptate porro vel nihil molestiae ut reiciendis\nqui aperiam non debitis possimus qui neque nisi nulla", + "title": "qui est esse" + } + ] + } + } + "###); + } + + #[tokio::test] + async fn query_2() { + let query = r#" + query Post($postId: ID!) { + post(id: $postId) { + id + title + body + } + } + "#; + + let response = execute(query, map!({ "postId": "1" }), "query_2").await; + + insta::assert_json_snapshot!(response, @r###" + { + "data": { + "post": { + "id": 1, + "title": "sunt aut facere repellat provident occaecati excepturi optio reprehenderit", + "body": "quia et suscipit\nsuscipit recusandae consequuntur expedita et cum\nreprehenderit molestiae ut ut quas totam\nnostrum rerum est autem sunt rem eveniet architecto" + } + } + } + "###); + } + + #[tokio::test] + async fn query_3() { + let query = r#" + query PostWithAuthor($postId: ID!) { + post(id: $postId) { + id + title + body + author { + id + name + } + } + } + "#; + + let response = execute(query, map!({ "postId": "1" }), "query_3").await; + + insta::assert_json_snapshot!(response, @r###" + { + "data": { + "post": { + "id": 1, + "title": "sunt aut facere repellat provident occaecati excepturi optio reprehenderit", + "body": "quia et suscipit\nsuscipit recusandae consequuntur expedita et cum\nreprehenderit molestiae ut ut quas totam\nnostrum rerum est autem sunt rem eveniet architecto", + "author": { + "id": 1, + "name": "Leanne Graham" + } + } + } + } + "###); + } + + #[tokio::test] + async fn query_4() { + let query = r#" + query PostsForUser($userId: ID!) { + user(id: $userId) { + id + name + posts { + id + title + author { + id + name + } + } + } + } + "#; + + let response = execute(query, map!({ "userId": "1" }), "query_4").await; + + insta::assert_json_snapshot!(response, @r###" + { + "data": { + "user": { + "id": 1, + "name": "Leanne Graham", + "posts": [ + { + "id": 1, + "title": "sunt aut facere repellat provident occaecati excepturi optio reprehenderit", + "author": { + "id": 1, + "name": "Leanne Graham" + } + }, + { + "id": 2, + "title": "qui est esse", + "author": { + "id": 1, + "name": "Leanne Graham" + } + } + ] + } + } + } + "###); + } +} + +async fn execute( + schema: &str, + uri: &str, + query: &str, + variables: JsonMap, + config: Option, + mut request_mutator: impl FnMut(&mut Request), +) -> serde_json::Value { + let connector_uri = format!("{}/", uri); + let subgraph_uri = format!("{}/graphql", uri); + + // we cannot use Testharness because the subgraph connectors are actually extracted in YamlRouterFactory + let mut factory = YamlRouterFactory; + + let common_config = json!({ + "include_subgraph_errors": { "all": true }, + "override_subgraph_url": {"graphql": subgraph_uri}, + "preview_connectors": { + "subgraphs": { + "connectors": { + "sources": { + "json": { + "override_url": connector_uri + } + } + } + } + } + }); + let config = if let Some(mut config) = config { + config.deep_merge(common_config); + config + } else { + common_config + }; + let config: Configuration = serde_json_bytes::from_value(config).unwrap(); + + let router_creator = factory + .create( + false, + Arc::new(config.clone()), + Arc::new(crate::spec::Schema::parse(schema, &config).unwrap()), + None, + None, + ) + .await + .unwrap(); + let service = router_creator.create(); + + let mut request = supergraph::Request::fake_builder() + .query(query) + .variables(variables) + .header("x-client-header", "client-header-value") + .build() + .unwrap() + .try_into() + .unwrap(); + + request_mutator(&mut request); + + let response = service + .oneshot(request) + .await + .unwrap() + .next_response() + .await + .unwrap() + .unwrap(); + + serde_json::from_slice(&response).unwrap() +} diff --git a/apollo-router/src/plugins/connectors/tests/quickstart.rs b/apollo-router/src/plugins/connectors/tests/quickstart.rs new file mode 100644 index 0000000000..0c48f18f0a --- /dev/null +++ b/apollo-router/src/plugins/connectors/tests/quickstart.rs @@ -0,0 +1,277 @@ +use super::*; + +macro_rules! map { + ($($tt:tt)*) => { + serde_json_bytes::json!($($tt)*).as_object().unwrap().clone() + }; + } + +async fn execute(query: &str, variables: JsonMap) -> (serde_json::Value, MockServer) { + let mock_server = MockServer::start().await; + Mock::given(method("GET")).and(path("/posts")).respond_with( + ResponseTemplate::new(200).set_body_json(serde_json::json!([ + { + "userId": 1, + "id": 1, + "title": "sunt aut facere repellat provident occaecati excepturi optio reprehenderit", + "body": "quia et suscipit\nsuscipit recusandae consequuntur expedita et cum\nreprehenderit molestiae ut ut quas totam\nnostrum rerum est autem sunt rem eveniet architecto" + }, + { + "userId": 1, + "id": 2, + "title": "qui est esse", + "body": "est rerum tempore vitae\nsequi sint nihil reprehenderit dolor beatae ea dolores neque\nfugiat blanditiis voluptate porro vel nihil molestiae ut reiciendis\nqui aperiam non debitis possimus qui neque nisi nulla" + }] + )), + ).mount(&mock_server).await; + Mock::given(method("GET")).and(path("/posts/1")).respond_with( + ResponseTemplate::new(200).set_body_json(serde_json::json!( + { + "userId": 1, + "id": 1, + "title": "sunt aut facere repellat provident occaecati excepturi optio reprehenderit", + "body": "quia et suscipit\nsuscipit recusandae consequuntur expedita et cum\nreprehenderit molestiae ut ut quas totam\nnostrum rerum est autem sunt rem eveniet architecto" + } + )), + ).mount(&mock_server).await; + Mock::given(method("GET")).and(path("/posts/2")).respond_with( + ResponseTemplate::new(200).set_body_json(serde_json::json!({ + "userId": 1, + "id": 2, + "title": "qui est esse", + "body": "est rerum tempore vitae\nsequi sint nihil reprehenderit dolor beatae ea dolores neque\nfugiat blanditiis voluptate porro vel nihil molestiae ut reiciendis\nqui aperiam non debitis possimus qui neque nisi nulla" + } + )), + ).mount(&mock_server).await; + Mock::given(method("GET")) + .and(path("/users/1")) + .respond_with(ResponseTemplate::new(200).set_body_json(serde_json::json!({ + "id": 1, + "name": "Leanne Graham", + "username": "Bret", + "email": "Sincere@april.biz", + "address": { + "street": "Kulas Light", + "suite": "Apt. 556", + "city": "Gwenborough", + "zipcode": "92998-3874", + "geo": { + "lat": "-37.3159", + "lng": "81.1496" + } + }, + "phone": "1-770-736-8031 x56442", + "website": "hildegard.org", + "company": { + "name": "Romaguera-Crona", + "catchPhrase": "Multi-layered client-server neural-net", + "bs": "harness real-time e-markets" + } + }))) + .mount(&mock_server) + .await; + Mock::given(method("GET")).and(path("/users/1/posts")).respond_with( + ResponseTemplate::new(200).set_body_json(serde_json::json!([ + { + "userId": 1, + "id": 1, + "title": "sunt aut facere repellat provident occaecati excepturi optio reprehenderit", + "body": "quia et suscipit\nsuscipit recusandae consequuntur expedita et cum\nreprehenderit molestiae ut ut quas totam\nnostrum rerum est autem sunt rem eveniet architecto" + }, + { + "userId": 1, + "id": 2, + "title": "qui est esse", + "body": "est rerum tempore vitae\nsequi sint nihil reprehenderit dolor beatae ea dolores neque\nfugiat blanditiis voluptate porro vel nihil molestiae ut reiciendis\nqui aperiam non debitis possimus qui neque nisi nulla" + }] + )), + ).mount(&mock_server).await; + + let res = super::execute( + &QUICKSTART_SCHEMA.replace("https://jsonplaceholder.typicode.com", &mock_server.uri()), + &mock_server.uri(), + query, + variables, + None, + |_| {}, + ) + .await; + + (res, mock_server) +} + +#[tokio::test] +async fn query_1() { + let query = r#" + query Posts { + posts { + id + body + title + } + } + "#; + + let (response, server) = execute(query, Default::default()).await; + + insta::assert_json_snapshot!(response, @r###" + { + "data": { + "posts": [ + { + "id": 1, + "body": "quia et suscipit\nsuscipit recusandae consequuntur expedita et cum\nreprehenderit molestiae ut ut quas totam\nnostrum rerum est autem sunt rem eveniet architecto", + "title": "sunt aut facere repellat provident occaecati excepturi optio reprehenderit" + }, + { + "id": 2, + "body": "est rerum tempore vitae\nsequi sint nihil reprehenderit dolor beatae ea dolores neque\nfugiat blanditiis voluptate porro vel nihil molestiae ut reiciendis\nqui aperiam non debitis possimus qui neque nisi nulla", + "title": "qui est esse" + } + ] + } + } + "###); + + req_asserts::matches( + &server.received_requests().await.unwrap(), + vec![Matcher::new().method("GET").path("/posts")], + ); +} + +#[tokio::test] +async fn query_2() { + let query = r#" + query Post($postId: ID!) { + post(id: $postId) { + id + title + body + } + } + "#; + + let (response, server) = execute(query, map!({ "postId": "1" })).await; + + insta::assert_json_snapshot!(response, @r###" + { + "data": { + "post": { + "id": 1, + "title": "sunt aut facere repellat provident occaecati excepturi optio reprehenderit", + "body": "quia et suscipit\nsuscipit recusandae consequuntur expedita et cum\nreprehenderit molestiae ut ut quas totam\nnostrum rerum est autem sunt rem eveniet architecto" + } + } + } + "###); + + req_asserts::matches( + &server.received_requests().await.unwrap(), + vec![Matcher::new().method("GET").path("/posts/1")], + ); +} + +#[tokio::test] +async fn query_3() { + let query = r#" + query PostWithAuthor($postId: ID!) { + post(id: $postId) { + id + title + body + author { + id + name + } + } + } + "#; + + let (response, server) = execute(query, map!({ "postId": "1" })).await; + + insta::assert_json_snapshot!(response, @r###" + { + "data": { + "post": { + "id": 1, + "title": "sunt aut facere repellat provident occaecati excepturi optio reprehenderit", + "body": "quia et suscipit\nsuscipit recusandae consequuntur expedita et cum\nreprehenderit molestiae ut ut quas totam\nnostrum rerum est autem sunt rem eveniet architecto", + "author": { + "id": 1, + "name": "Leanne Graham" + } + } + } + } + "###); + + req_asserts::matches( + &server.received_requests().await.unwrap(), + vec![ + Matcher::new().method("GET").path("/posts/1"), + Matcher::new().method("GET").path("/users/1"), + ], + ); +} + +#[tokio::test] +async fn query_4() { + let query = r#" + query PostsForUser($userId: ID!) { + user(id: $userId) { + id + name + posts { + id + title + author { + id + name + } + } + } + } + "#; + + let (response, server) = execute(query, map!({ "userId": "1" })).await; + + insta::assert_json_snapshot!(response, @r###" + { + "data": { + "user": { + "id": 1, + "name": "Leanne Graham", + "posts": [ + { + "id": 1, + "title": "sunt aut facere repellat provident occaecati excepturi optio reprehenderit", + "author": { + "id": 1, + "name": "Leanne Graham" + } + }, + { + "id": 2, + "title": "qui est esse", + "author": { + "id": 1, + "name": "Leanne Graham" + } + } + ] + } + } + } + "###); + + req_asserts::matches( + &server.received_requests().await.unwrap(), + vec![ + Matcher::new().method("GET").path("/users/1"), + Matcher::new().method("GET").path("/users/1/posts"), + Matcher::new().method("GET").path("/posts/1"), + Matcher::new().method("GET").path("/posts/2"), + Matcher::new().method("GET").path("/users/1"), + ], + ); +} diff --git a/apollo-router/src/plugins/connectors/tests/req_asserts.rs b/apollo-router/src/plugins/connectors/tests/req_asserts.rs new file mode 100644 index 0000000000..25e4455fe8 --- /dev/null +++ b/apollo-router/src/plugins/connectors/tests/req_asserts.rs @@ -0,0 +1,134 @@ +use std::collections::HashMap; +use std::collections::HashSet; + +use itertools::Itertools; +use wiremock::http::HeaderName; +use wiremock::http::HeaderValue; +use wiremock::http::HeaderValues; + +#[derive(Clone)] +pub(crate) struct Matcher { + method: Option, + path: Option, + query: Option, + body: Option, + headers: HashMap, +} + +impl Matcher { + pub(crate) fn new() -> Self { + Self { + method: None, + path: None, + query: None, + body: None, + headers: Default::default(), + } + } + + pub(crate) fn method(mut self, method: &str) -> Self { + self.method = Some(method.to_string()); + self + } + + pub(crate) fn path(mut self, path: &str) -> Self { + self.path = Some(path.to_string()); + self + } + + pub(crate) fn query(mut self, query: &str) -> Self { + self.query = Some(query.to_string()); + self + } + + pub(crate) fn body(mut self, body: serde_json::Value) -> Self { + self.body = Some(body); + self + } + + pub(crate) fn header(mut self, name: HeaderName, value: HeaderValue) -> Self { + let values = self.headers.entry(name).or_insert(Vec::new().into()); + values.append(&mut Vec::from([value]).into()); + self + } + + fn matches(&self, request: &wiremock::Request, index: usize) { + if let Some(method) = self.method.as_ref() { + assert_eq!( + method, + &request.method.to_string(), + "[Request {}]: Expected method {}, got {}", + index, + method, + request.method + ) + } + + if let Some(path) = self.path.as_ref() { + assert_eq!( + path, + request.url.path(), + "[Request {}]: Expected path {}, got {}", + index, + path, + request.url.path() + ) + } + + if let Some(query) = self.query.as_ref() { + assert_eq!( + query, + request.url.query().unwrap_or_default(), + "[Request {}]: Expected query {}, got {}", + index, + query, + request.url.query().unwrap_or_default() + ) + } + + if let Some(body) = self.body.as_ref() { + assert_eq!( + body, + &request.body_json::().unwrap(), + "[Request {}]: incorrect body", + index, + ) + } + + for (name, expected) in self.headers.iter() { + match request.headers.get(name) { + Some(actual) => { + let expected: HashSet = + expected.iter().map(|v| v.as_str().to_owned()).collect(); + let actual: HashSet = + actual.iter().map(|v| v.as_str().to_owned()).collect(); + assert_eq!( + expected, + actual, + "[Request {}]: expected header {} to be [{}], was [{}]", + index, + name, + expected.iter().join(", "), + actual.iter().join(", ") + ); + } + None => { + panic!("[Request {}]: expected header {}, was missing", index, name); + } + } + } + } +} + +pub(crate) fn matches(received: &[wiremock::Request], matchers: Vec) { + assert_eq!( + received.len(), + matchers.len(), + "Expected {} requests, recorded {}", + matchers.len(), + received.len() + ); + for (i, (request, matcher)) in received.iter().zip(matchers.iter()).enumerate() { + matcher.matches(request, i); + } +} diff --git a/apollo-router/src/plugins/connectors/tracing.rs b/apollo-router/src/plugins/connectors/tracing.rs new file mode 100644 index 0000000000..34db750156 --- /dev/null +++ b/apollo-router/src/plugins/connectors/tracing.rs @@ -0,0 +1,134 @@ +use std::collections::HashMap; + +use apollo_federation::sources::connect::expand::Connectors; +use opentelemetry::KeyValue; +use opentelemetry_api::metrics::MeterProvider as _; +use opentelemetry_api::metrics::ObservableGauge; + +use crate::metrics::meter_provider; + +pub(crate) const CONNECTOR_TYPE_HTTP: &str = "http"; + +/// Create a gauge instrument for the number of connectors and their spec versions +pub(crate) fn connect_spec_version_instrument( + connectors: Option<&Connectors>, +) -> Option> { + connectors.map(|connectors| { + let spec_counts = connect_spec_counts(connectors); + meter_provider() + .meter("apollo/router") + .u64_observable_gauge("apollo.router.schema.connectors") + .with_description("Number connect directives in the supergraph") + .with_callback(move |observer| { + spec_counts.iter().for_each(|(spec, &count)| { + observer.observe( + count, + &[KeyValue::new("connect.spec.version", spec.clone())], + ) + }) + }) + .init() + }) +} + +/// Map from connect spec version to the number of connectors with that version +fn connect_spec_counts(connectors: &Connectors) -> HashMap { + connectors + .by_service_name + .values() + .map(|connector| connector.spec.as_str().to_string()) + .fold(HashMap::new(), |mut acc, spec| { + *acc.entry(spec).or_insert(0u64) += 1u64; + acc + }) +} + +#[cfg(test)] +mod tests { + use std::sync::Arc; + + use apollo_compiler::name; + use apollo_federation::sources::connect::expand::Connectors; + use apollo_federation::sources::connect::ConnectId; + use apollo_federation::sources::connect::ConnectSpec; + use apollo_federation::sources::connect::Connector; + use apollo_federation::sources::connect::HTTPMethod; + use apollo_federation::sources::connect::HttpJsonTransport; + use apollo_federation::sources::connect::JSONSelection; + use url::Url; + + use crate::metrics::FutureMetricsExt as _; + use crate::plugins::connectors::tracing::connect_spec_counts; + use crate::services::connector_service::ConnectorServiceFactory; + use crate::spec::Schema; + + #[test] + fn test_connect_spec_counts() { + let connector = Connector { + spec: ConnectSpec::V0_1, + id: ConnectId::new( + "subgraph_name".into(), + None, + name!(Query), + name!(users), + 0, + "label", + ), + transport: HttpJsonTransport { + source_url: Some(Url::parse("http://localhost/").unwrap()), + connect_template: "/path".parse().unwrap(), + method: HTTPMethod::Get, + headers: Default::default(), + body: Default::default(), + }, + selection: JSONSelection::parse("$.data").unwrap(), + entity_resolver: None, + config: Default::default(), + max_requests: None, + request_variables: Default::default(), + response_variables: Default::default(), + }; + + let connectors = Connectors { + by_service_name: Arc::new( + [ + ("service_name_1".into(), connector.clone()), + ("service_name_2".into(), connector.clone()), + ("service_name_3".into(), connector), + ] + .into(), + ), + labels_by_service_name: Default::default(), + }; + + assert_eq!( + connect_spec_counts(&connectors), + [(ConnectSpec::V0_1.to_string(), 3u64)].into() + ); + } + + const STEEL_THREAD_SCHEMA: &str = include_str!("./testdata/steelthread.graphql"); + + #[tokio::test] + async fn test_connect_spec_version_instrument() { + async { + let config = Arc::default(); + let schema = Schema::parse(STEEL_THREAD_SCHEMA, &config).unwrap(); + let _factory = ConnectorServiceFactory::new( + schema.into(), + Arc::default(), + Arc::default(), + Default::default(), + Arc::default(), + ); + + assert_gauge!( + "apollo.router.schema.connectors", + 6, + connect.spec.version = "0.1" + ); + } + .with_metrics() + .await; + } +} diff --git a/apollo-router/src/plugins/coprocessor/execution.rs b/apollo-router/src/plugins/coprocessor/execution.rs index ba1b12e70d..9f6b6d0a27 100644 --- a/apollo-router/src/plugins/coprocessor/execution.rs +++ b/apollo-router/src/plugins/coprocessor/execution.rs @@ -467,7 +467,7 @@ where } // We return the deferred_response into our stream of response chunks - Ok(new_deferred_response) + Ok::<_, BoxError>(new_deferred_response) } }) .map(|res: Result| match res { @@ -511,7 +511,7 @@ mod tests { use crate::plugin::test::MockExecutionService; use crate::plugin::test::MockInternalHttpClientService; use crate::services::execution; - use crate::services::router::body::get_body_bytes; + use crate::services::router; use crate::services::router::body::RouterBody; #[allow(clippy::type_complexity)] @@ -617,7 +617,7 @@ mod tests { let mock_http_client = mock_with_callback(move |_: http::Request| { Box::pin(async { Ok(http::Response::builder() - .body(RouterBody::from( + .body(router::body::from_bytes( r#"{ "version": 1, "stage": "ExecutionRequest", @@ -713,7 +713,7 @@ mod tests { let mock_http_client = mock_with_callback(move |_: http::Request| { Box::pin(async { Ok(http::Response::builder() - .body(RouterBody::from( + .body(router::body::from_bytes( r#"{ "version": 1, "stage": "ExecutionRequest", @@ -796,8 +796,10 @@ mod tests { mock_with_deferred_callback(move |res: http::Request| { Box::pin(async { let deserialized_response: Externalizable = - serde_json::from_slice(&get_body_bytes(res.into_body()).await.unwrap()) - .unwrap(); + serde_json::from_slice( + &router::body::into_bytes(res.into_body()).await.unwrap(), + ) + .unwrap(); assert_eq!(EXTERNALIZABLE_VERSION, deserialized_response.version); assert_eq!( @@ -858,7 +860,9 @@ mod tests { "sdl": "the sdl shouldn't change" }); Ok(http::Response::builder() - .body(RouterBody::from(serde_json::to_string(&input).unwrap())) + .body(router::body::from_bytes( + serde_json::to_string(&input).unwrap(), + )) .unwrap()) }) }); @@ -943,8 +947,10 @@ mod tests { mock_with_deferred_callback(move |res: http::Request| { Box::pin(async { let mut deserialized_response: Externalizable = - serde_json::from_slice(&get_body_bytes(res.into_body()).await.unwrap()) - .unwrap(); + serde_json::from_slice( + &router::body::into_bytes(res.into_body()).await.unwrap(), + ) + .unwrap(); assert_eq!(EXTERNALIZABLE_VERSION, deserialized_response.version); assert_eq!( PipelineStep::ExecutionResponse.to_string(), @@ -970,7 +976,7 @@ mod tests { ); Ok(http::Response::builder() - .body(RouterBody::from( + .body(router::body::from_bytes( serde_json::to_string(&deserialized_response).unwrap_or_default(), )) .unwrap()) diff --git a/apollo-router/src/plugins/coprocessor/mod.rs b/apollo-router/src/plugins/coprocessor/mod.rs index d2cf150edf..355b0c98d9 100644 --- a/apollo-router/src/plugins/coprocessor/mod.rs +++ b/apollo-router/src/plugins/coprocessor/mod.rs @@ -16,9 +16,11 @@ use http::header; use http::HeaderMap; use http::HeaderName; use http::HeaderValue; -use hyper::client::HttpConnector; +use http_body_util::BodyExt; use hyper_rustls::ConfigBuilderExt; use hyper_rustls::HttpsConnector; +use hyper_util::client::legacy::connect::HttpConnector; +use hyper_util::rt::TokioExecutor; use schemars::JsonSchema; use serde::Deserialize; use serde::Serialize; @@ -51,9 +53,7 @@ use crate::services::external::EXTERNALIZABLE_VERSION; use crate::services::hickory_dns_connector::new_async_http_connector; use crate::services::hickory_dns_connector::AsyncHyperResolver; use crate::services::router; -use crate::services::router::body::get_body_bytes; use crate::services::router::body::RouterBody; -use crate::services::router::body::RouterBodyConverter; use crate::services::subgraph; #[cfg(test)] @@ -67,10 +67,16 @@ const POOL_IDLE_TIMEOUT_DURATION: Option = Some(Duration::from_secs(5) const COPROCESSOR_ERROR_EXTENSION: &str = "ERROR"; const COPROCESSOR_DESERIALIZATION_ERROR_EXTENSION: &str = "EXTERNAL_DESERIALIZATION_ERROR"; -type HTTPClientService = RouterBodyConverter< +type MapFn = fn(http::Response) -> http::Response; + +type HTTPClientService = tower::util::MapResponse< tower::timeout::Timeout< - hyper::Client>, RouterBody>, + hyper_util::client::legacy::Client< + HttpsConnector>, + RouterBody, + >, >, + MapFn, >; #[async_trait::async_trait] @@ -85,9 +91,11 @@ impl Plugin for CoprocessorPlugin { http_connector.set_keepalive(Some(std::time::Duration::from_secs(60))); http_connector.enforce_http(false); + // Enable crypto + let _ = rustls::crypto::aws_lc_rs::default_provider().install_default(); + let tls_config = rustls::ClientConfig::builder() - .with_safe_defaults() - .with_native_roots() + .with_native_roots()? .with_no_client_auth(); let builder = hyper_rustls::HttpsConnectorBuilder::new() @@ -102,17 +110,20 @@ impl Plugin for CoprocessorPlugin { builder.wrap_connector(http_connector) }; - let http_client = RouterBodyConverter { - inner: ServiceBuilder::new() - .layer(TimeoutLayer::new(init.config.timeout)) - .service( - hyper::Client::builder() - .http2_only(experimental_http2 == Http2Config::Http2Only) - .pool_idle_timeout(POOL_IDLE_TIMEOUT_DURATION) - .build(connector), - ), - }; - + let http_client = ServiceBuilder::new() + .map_response( + |http_response: http::Response| -> http::Response { + let (parts, body) = http_response.into_parts(); + http::Response::from_parts(parts, body.map_err(axum::Error::new).boxed_unsync()) + } as MapFn, + ) + .layer(TimeoutLayer::new(init.config.timeout)) + .service( + hyper_util::client::legacy::Client::builder(TokioExecutor::new()) + .http2_only(experimental_http2 == Http2Config::Http2Only) + .pool_idle_timeout(POOL_IDLE_TIMEOUT_DURATION) + .build(connector), + ); CoprocessorPlugin::new(http_client, init.config, init.supergraph_sdl) } @@ -645,7 +656,7 @@ where // First, extract the data we need from our request and prepare our // external call. Use our configuration to figure out which data to send. let (parts, body) = request.router_request.into_parts(); - let bytes = get_body_bytes(body).await?; + let bytes = router::body::into_bytes(body).await?; let headers_to_send = request_config .headers @@ -755,11 +766,11 @@ where // are present in our co_processor_output. let new_body = match co_processor_output.body { - Some(bytes) => RouterBody::from(bytes), - None => RouterBody::from(bytes), + Some(bytes) => router::body::from_bytes(bytes), + None => router::body::from_bytes(bytes), }; - request.router_request = http::Request::from_parts(parts, new_body.into_inner()); + request.router_request = http::Request::from_parts(parts, new_body); if let Some(context) = co_processor_output.context { for (key, value) in context.try_into_iter()? { @@ -804,14 +815,12 @@ where // we split the body (which is a stream) into first response + rest of responses, // for which we will implement mapping later - let (first, rest): ( - Option>, - crate::services::router::Body, - ) = body.into_future().await; + let mut stream = body.into_data_stream(); + let first = stream.next().await.transpose()?; + let rest = stream; // If first is None, or contains an error we return an error - let opt_first: Option = first.and_then(|f| f.ok()); - let bytes = match opt_first { + let bytes = match first { Some(b) => b, None => { tracing::error!( @@ -867,11 +876,11 @@ where // bits that we sent to the co_processor. let new_body = match co_processor_output.body { - Some(bytes) => RouterBody::from(bytes), - None => RouterBody::from(bytes), + Some(bytes) => router::body::from_bytes(bytes), + None => router::body::from_bytes(bytes), }; - response.response = http::Response::from_parts(parts, new_body.into_inner()); + response.response = http::Response::from_parts(parts, new_body); if let Some(control) = co_processor_output.control { *response.response.status_mut() = control.get_http_status()? @@ -961,16 +970,17 @@ where // Create our response stream which consists of the bytes from our first body chained with the // rest of the responses in our mapped stream. - let bytes = get_body_bytes(body).await.map_err(BoxError::from); - let final_stream = once(ready(bytes)).chain(mapped_stream).boxed(); + let bytes = router::body::into_bytes(body).await.map_err(BoxError::from); + let final_stream = RouterBody::new(http_body_util::StreamBody::new( + once(ready(bytes)) + .chain(mapped_stream) + .map(|b| b.map(http_body::Frame::data).map_err(axum::Error::new)), + )); // Finally, return a response which has a Body that wraps our stream of response chunks. Ok(router::Response { context, - response: http::Response::from_parts( - parts, - RouterBody::wrap_stream(final_stream).into_inner(), - ), + response: http::Response::from_parts(parts, final_stream), }) } // ----------------------------------------------------------------------------------------------------- diff --git a/apollo-router/src/plugins/coprocessor/supergraph.rs b/apollo-router/src/plugins/coprocessor/supergraph.rs index 63e9addb59..a34104eaa7 100644 --- a/apollo-router/src/plugins/coprocessor/supergraph.rs +++ b/apollo-router/src/plugins/coprocessor/supergraph.rs @@ -538,7 +538,7 @@ mod tests { use crate::plugin::test::MockInternalHttpClientService; use crate::plugin::test::MockSupergraphService; use crate::plugins::telemetry::config_new::conditions::SelectorOrValue; - use crate::services::router::body::get_body_bytes; + use crate::services::router; use crate::services::supergraph; #[allow(clippy::type_complexity)] @@ -644,7 +644,7 @@ mod tests { let mock_http_client = mock_with_callback(move |_: http::Request| { Box::pin(async { Ok(http::Response::builder() - .body(RouterBody::from( + .body(router::body::from_bytes( r#"{ "version": 1, "stage": "SupergraphRequest", @@ -748,7 +748,7 @@ mod tests { let mock_http_client = mock_with_callback(move |_: http::Request| { Box::pin(async { Ok(http::Response::builder() - .body(RouterBody::from( + .body(router::body::from_bytes( r#"{ "version": 1, "stage": "SupergraphRequest", @@ -825,7 +825,7 @@ mod tests { let mock_http_client = mock_with_callback(move |_: http::Request| { Box::pin(async { Ok(http::Response::builder() - .body(RouterBody::from( + .body(router::body::from_bytes( r#"{ "version": 1, "stage": "SupergraphRequest", @@ -895,7 +895,8 @@ mod tests { mock_with_deferred_callback(move |mut res: http::Request| { Box::pin(async move { let deserialized_response: Externalizable = - serde_json::from_slice(&get_body_bytes(&mut res).await.unwrap()).unwrap(); + serde_json::from_slice(&router::body::into_bytes(&mut res).await.unwrap()) + .unwrap(); assert_eq!(EXTERNALIZABLE_VERSION, deserialized_response.version); assert_eq!( @@ -956,7 +957,9 @@ mod tests { "sdl": "the sdl shouldn't change" }); Ok(http::Response::builder() - .body(RouterBody::from(serde_json::to_string(&input).unwrap())) + .body(router::body::from_bytes( + serde_json::to_string(&input).unwrap(), + )) .unwrap()) }) }); @@ -1042,8 +1045,10 @@ mod tests { mock_with_deferred_callback(move |res: http::Request| { Box::pin(async { let mut deserialized_response: Externalizable = - serde_json::from_slice(&get_body_bytes(res.into_body()).await.unwrap()) - .unwrap(); + serde_json::from_slice( + &router::body::into_bytes(res.into_body()).await.unwrap(), + ) + .unwrap(); assert_eq!(EXTERNALIZABLE_VERSION, deserialized_response.version); assert_eq!( PipelineStep::SupergraphResponse.to_string(), @@ -1069,7 +1074,7 @@ mod tests { ); Ok(http::Response::builder() - .body(RouterBody::from( + .body(router::body::from_bytes( serde_json::to_string(&deserialized_response).unwrap_or_default(), )) .unwrap()) @@ -1160,8 +1165,10 @@ mod tests { mock_with_deferred_callback(move |res: http::Request| { Box::pin(async { let mut deserialized_response: Externalizable = - serde_json::from_slice(&get_body_bytes(res.into_body()).await.unwrap()) - .unwrap(); + serde_json::from_slice( + &router::body::into_bytes(res.into_body()).await.unwrap(), + ) + .unwrap(); assert_eq!(EXTERNALIZABLE_VERSION, deserialized_response.version); assert_eq!( PipelineStep::SupergraphResponse.to_string(), @@ -1187,7 +1194,7 @@ mod tests { ); Ok(http::Response::builder() - .body(RouterBody::from( + .body(router::body::from_bytes( serde_json::to_string(&deserialized_response).unwrap_or_default(), )) .unwrap()) diff --git a/apollo-router/src/plugins/coprocessor/test.rs b/apollo-router/src/plugins/coprocessor/test.rs index a2b9374fb9..25185542e5 100644 --- a/apollo-router/src/plugins/coprocessor/test.rs +++ b/apollo-router/src/plugins/coprocessor/test.rs @@ -30,7 +30,7 @@ mod tests { use crate::services::external::Externalizable; use crate::services::external::PipelineStep; use crate::services::external::EXTERNALIZABLE_VERSION; - use crate::services::router::body::get_body_bytes; + use crate::services::router; use crate::services::subgraph; use crate::services::supergraph; @@ -132,7 +132,9 @@ mod tests { "sdl": "the sdl shouldnt change" }); Ok(http::Response::builder() - .body(RouterBody::from(serde_json::to_string(&input).unwrap())) + .body(router::body::from_bytes( + serde_json::to_string(&input).unwrap(), + )) .unwrap()) }) }); @@ -192,7 +194,9 @@ mod tests { "sdl": "the sdl shouldnt change" }); Ok(http::Response::builder() - .body(RouterBody::from(serde_json::to_string(&input).unwrap())) + .body(router::body::from_bytes( + serde_json::to_string(&input).unwrap(), + )) .unwrap()) }) }); @@ -251,7 +255,9 @@ mod tests { "sdl": "the sdl shouldnt change" }); Ok(http::Response::builder() - .body(RouterBody::from(serde_json::to_string(&input).unwrap())) + .body(router::body::from_bytes( + serde_json::to_string(&input).unwrap(), + )) .unwrap()) }) }); @@ -292,7 +298,7 @@ mod tests { let mock_http_client = mock_with_callback(move |_: http::Request| { Box::pin(async { Ok(http::Response::builder() - .body(RouterBody::from( + .body(router::body::from_bytes( r#"{ "version": 1, "stage": "SubgraphRequest", @@ -393,14 +399,16 @@ mod tests { let mock_http_client = mock_with_callback(move |req: http::Request| { Box::pin(async { let deserialized_request: Externalizable = - serde_json::from_slice(&hyper::body::to_bytes(req.into_body()).await.unwrap()) - .unwrap(); + serde_json::from_slice( + &router::body::into_bytes(req.into_body()).await.unwrap(), + ) + .unwrap(); assert_eq!( deserialized_request.subgraph_request_id.as_deref(), Some("5678") ); Ok(http::Response::builder() - .body(RouterBody::from( + .body(router::body::from_bytes( r#"{ "version": 1, "stage": "SubgraphRequest", @@ -508,7 +516,7 @@ mod tests { let mock_http_client = mock_with_callback(move |_: http::Request| { Box::pin(async { Ok(http::Response::builder() - .body(RouterBody::from( + .body(router::body::from_bytes( r#"{ "version": 1, "stage": "SubgraphRequest", @@ -565,7 +573,7 @@ mod tests { let mock_http_client = mock_with_callback(move |_: http::Request| { Box::pin(async { Ok(http::Response::builder() - .body(RouterBody::from( + .body(router::body::from_bytes( r#"{ "version": 1, "stage": "SubgraphRequest", @@ -631,7 +639,7 @@ mod tests { let mock_http_client = mock_with_callback(move |_: http::Request| { Box::pin(async { Ok(http::Response::builder() - .body(RouterBody::from( + .body(router::body::from_bytes( r#"{ "version": 1, "stage": "SubgraphRequest", @@ -705,12 +713,13 @@ mod tests { let mock_http_client = mock_with_callback(move |r: http::Request| { Box::pin(async move { let (_, body) = r.into_parts(); - let body: Value = serde_json::from_slice(&body.to_bytes().await.unwrap()).unwrap(); + let body: Value = + serde_json::from_slice(&router::body::into_bytes(body).await.unwrap()).unwrap(); let subgraph_id = body.get("subgraphRequestId").unwrap(); assert_eq!(subgraph_id.as_str(), Some("5678")); Ok(http::Response::builder() - .body(RouterBody::from( + .body(router::body::from_bytes( r#"{ "version": 1, "stage": "SubgraphResponse", @@ -831,7 +840,7 @@ mod tests { let mock_http_client = mock_with_callback(move |_: http::Request| { Box::pin(async { Ok(http::Response::builder() - .body(RouterBody::from( + .body(router::body::from_bytes( r#"{ "version": 1, "stage": "SubgraphResponse", @@ -944,7 +953,7 @@ mod tests { let mock_http_client = mock_with_deferred_callback(move |_: http::Request| { Box::pin(async { Ok(http::Response::builder() - .body(RouterBody::from( + .body(router::body::from_bytes( r#"{ "version": 1, "stage": "SupergraphResponse", @@ -1021,8 +1030,10 @@ mod tests { let mock_http_client = mock_with_callback(move |req: http::Request| { Box::pin(async { let deserialized_request: Externalizable = - serde_json::from_slice(&hyper::body::to_bytes(req.into_body()).await.unwrap()) - .unwrap(); + serde_json::from_slice( + &router::body::into_bytes(req.into_body()).await.unwrap(), + ) + .unwrap(); assert_eq!(EXTERNALIZABLE_VERSION, deserialized_request.version); assert_eq!( @@ -1076,7 +1087,9 @@ mod tests { "sdl": "the sdl shouldnt change" }); Ok(http::Response::builder() - .body(RouterBody::from(serde_json::to_string(&input).unwrap())) + .body(router::body::from_bytes( + serde_json::to_string(&input).unwrap(), + )) .unwrap()) }) }); @@ -1133,8 +1146,10 @@ mod tests { let mock_http_client = mock_with_callback(move |req: http::Request| { Box::pin(async { let deserialized_request: Externalizable = - serde_json::from_slice(&hyper::body::to_bytes(req.into_body()).await.unwrap()) - .unwrap(); + serde_json::from_slice( + &router::body::into_bytes(req.into_body()).await.unwrap(), + ) + .unwrap(); assert_eq!(EXTERNALIZABLE_VERSION, deserialized_request.version); assert_eq!( @@ -1188,7 +1203,9 @@ mod tests { "sdl": "the sdl shouldnt change" }); Ok(http::Response::builder() - .body(RouterBody::from(serde_json::to_string(&input).unwrap())) + .body(router::body::from_bytes( + serde_json::to_string(&input).unwrap(), + )) .unwrap()) }) }); @@ -1257,8 +1274,10 @@ mod tests { let mock_http_client = mock_with_callback(move |req: http::Request| { Box::pin(async { let deserialized_request: Externalizable = - serde_json::from_slice(&hyper::body::to_bytes(req.into_body()).await.unwrap()) - .unwrap(); + serde_json::from_slice( + &router::body::into_bytes(req.into_body()).await.unwrap(), + ) + .unwrap(); assert_eq!(EXTERNALIZABLE_VERSION, deserialized_request.version); assert_eq!( @@ -1314,7 +1333,9 @@ mod tests { "sdl": "the sdl shouldnt change" }); Ok(http::Response::builder() - .body(RouterBody::from(serde_json::to_string(&input).unwrap())) + .body(router::body::from_bytes( + serde_json::to_string(&input).unwrap(), + )) .unwrap()) }) }); @@ -1354,8 +1375,10 @@ mod tests { let mock_http_client = mock_with_callback(move |req: http::Request| { Box::pin(async { let deserialized_request: Externalizable = - serde_json::from_slice(&get_body_bytes(req.into_body()).await.unwrap()) - .unwrap(); + serde_json::from_slice( + &router::body::into_bytes(req.into_body()).await.unwrap(), + ) + .unwrap(); assert_eq!(EXTERNALIZABLE_VERSION, deserialized_request.version); assert_eq!( @@ -1385,7 +1408,9 @@ mod tests { } ); Ok(http::Response::builder() - .body(RouterBody::from(serde_json::to_string(&input).unwrap())) + .body(router::body::from_bytes( + serde_json::to_string(&input).unwrap(), + )) .unwrap()) }) }); @@ -1409,7 +1434,9 @@ mod tests { assert_eq!("a value", value); let actual_response = serde_json::from_slice::( - &hyper::body::to_bytes(response.into_body()).await.unwrap(), + &router::body::into_bytes(response.into_body()) + .await + .unwrap(), ) .unwrap(); @@ -1443,8 +1470,10 @@ mod tests { let mock_http_client = mock_with_callback(move |req: http::Request| { Box::pin(async { let deserialized_request: Externalizable = - serde_json::from_slice(&hyper::body::to_bytes(req.into_body()).await.unwrap()) - .unwrap(); + serde_json::from_slice( + &router::body::into_bytes(req.into_body()).await.unwrap(), + ) + .unwrap(); assert_eq!(EXTERNALIZABLE_VERSION, deserialized_request.version); assert_eq!( @@ -1464,7 +1493,9 @@ mod tests { } ); Ok(http::Response::builder() - .body(RouterBody::from(serde_json::to_string(&input).unwrap())) + .body(router::body::from_bytes( + serde_json::to_string(&input).unwrap(), + )) .unwrap()) }) }); @@ -1486,7 +1517,9 @@ mod tests { assert_eq!(response.status(), http::StatusCode::UNAUTHORIZED); let actual_response = serde_json::from_slice::( - &hyper::body::to_bytes(response.into_body()).await.unwrap(), + &router::body::into_bytes(response.into_body()) + .await + .unwrap(), ) .unwrap(); @@ -1531,7 +1564,7 @@ mod tests { Box::pin(async { let deserialized_response: Externalizable = serde_json::from_slice( - &hyper::body::to_bytes(res.into_body()).await.unwrap(), + &router::body::into_bytes(res.into_body()).await.unwrap(), ) .unwrap(); @@ -1594,7 +1627,9 @@ mod tests { "sdl": "the sdl shouldnt change" }); Ok(http::Response::builder() - .body(RouterBody::from(serde_json::to_string(&input).unwrap())) + .body(router::body::from_bytes( + serde_json::to_string(&input).unwrap(), + )) .unwrap()) }) }); @@ -1629,7 +1664,9 @@ mod tests { assert_eq!( json!({ "data": { "test": 42_u32 } }), serde_json::from_slice::( - &get_body_bytes(res.response.into_body()).await.unwrap() + &router::body::into_bytes(res.response.into_body()) + .await + .unwrap() ) .unwrap() ); diff --git a/apollo-router/src/plugins/expose_query_plan.rs b/apollo-router/src/plugins/expose_query_plan.rs index 657969e27c..f83a1d933b 100644 --- a/apollo-router/src/plugins/expose_query_plan.rs +++ b/apollo-router/src/plugins/expose_query_plan.rs @@ -9,6 +9,8 @@ use serde_json_bytes::json; use tower::BoxError; use tower::ServiceExt as TowerServiceExt; +use super::connectors::query_plans::replace_connector_service_names; +use super::connectors::query_plans::replace_connector_service_names_text; use crate::layers::ServiceExt; use crate::plugin::Plugin; use crate::plugin::PluginInit; @@ -56,14 +58,17 @@ impl Plugin for ExposeQueryPlan { .flatten() .is_some() { + let plan = + replace_connector_service_names(req.query_plan.root.clone(), &req.context); + + let text = replace_connector_service_names_text( + req.query_plan.formatted_query_plan.clone(), + &req.context, + ); + + req.context.insert(QUERY_PLAN_CONTEXT_KEY, plan).unwrap(); req.context - .insert(QUERY_PLAN_CONTEXT_KEY, req.query_plan.root.clone()) - .unwrap(); - req.context - .insert( - FORMATTED_QUERY_PLAN_CONTEXT_KEY, - req.query_plan.formatted_query_plan.clone(), - ) + .insert(FORMATTED_QUERY_PLAN_CONTEXT_KEY, text) .unwrap(); } diff --git a/apollo-router/src/plugins/file_uploads/error.rs b/apollo-router/src/plugins/file_uploads/error.rs index e11d2f8ba6..c89cd9e44f 100644 --- a/apollo-router/src/plugins/file_uploads/error.rs +++ b/apollo-router/src/plugins/file_uploads/error.rs @@ -54,6 +54,9 @@ pub(super) enum FileUploadError { #[error("{0}")] HyperBodyErrorWrapper(#[from] hyper::Error), + + #[error("{0}")] + AxumError(#[from] axum::Error), } impl From for graphql::Error { diff --git a/apollo-router/src/plugins/file_uploads/mod.rs b/apollo-router/src/plugins/file_uploads/mod.rs index 6932495ebc..ba764d55c8 100644 --- a/apollo-router/src/plugins/file_uploads/mod.rs +++ b/apollo-router/src/plugins/file_uploads/mod.rs @@ -174,7 +174,7 @@ async fn router_layer( let (mut request_parts, request_body) = req.router_request.into_parts(); - let mut multipart = MultipartRequest::new(request_body.into(), boundary, limits); + let mut multipart = MultipartRequest::new(request_body, boundary, limits); let operations_stream = multipart.operations_field().await?; req.context @@ -191,9 +191,9 @@ async fn router_layer( request_parts.headers.insert(CONTENT_TYPE, content_type); request_parts.headers.remove(CONTENT_LENGTH); - let request_body = RouterBody::wrap_stream(operations_stream); + let request_body = router::body::from_result_stream(operations_stream); return Ok(router::Request::from(( - http::Request::from_parts(request_parts, request_body.into_inner()), + http::Request::from_parts(request_parts, request_body), req.context, ))); } @@ -361,8 +361,9 @@ pub(crate) async fn http_request_wrapper( request_parts .headers .insert(CONTENT_TYPE, form.content_type()); - let body = RouterBody::wrap_stream(form.into_stream(operations).await); - return http::Request::from_parts(request_parts, body); + let request_body = router::body::from_result_stream(form.into_stream(operations).await); + + return http::Request::from_parts(request_parts, request_body); } req } diff --git a/apollo-router/src/plugins/file_uploads/multipart_form_data.rs b/apollo-router/src/plugins/file_uploads/multipart_form_data.rs index 59b3a1d54a..aec0475b13 100644 --- a/apollo-router/src/plugins/file_uploads/multipart_form_data.rs +++ b/apollo-router/src/plugins/file_uploads/multipart_form_data.rs @@ -7,12 +7,14 @@ use futures::stream::TryStreamExt; use futures::Stream; use http::HeaderMap; use http::HeaderValue; +use http_body_util::BodyExt; use mediatype::names::BOUNDARY; use mediatype::names::FORM_DATA; use mediatype::names::MULTIPART; use mediatype::MediaType; use rand::RngCore; +use super::error::FileUploadError; use super::map_field::MapFieldRaw; use super::MultipartRequest; use super::Result as UploadResult; @@ -57,9 +59,8 @@ impl MultipartFormData { self.boundary, name ) }; - let static_part = tokio_stream::once(Ok(Bytes::from(field_prefix("operations")))) - .chain(operations.into_inner().map_err(Into::into)) + .chain(operations.into_data_stream().map_err(FileUploadError::from)) .chain(tokio_stream::once(Ok(Bytes::from(format!( "\r\n{}{}\r\n", field_prefix("map"), diff --git a/apollo-router/src/plugins/file_uploads/multipart_request.rs b/apollo-router/src/plugins/file_uploads/multipart_request.rs index c91095138c..5142ffb5b7 100644 --- a/apollo-router/src/plugins/file_uploads/multipart_request.rs +++ b/apollo-router/src/plugins/file_uploads/multipart_request.rs @@ -8,6 +8,7 @@ use std::task::Poll; use bytes::Bytes; use futures::Stream; use http::HeaderMap; +use http_body_util::BodyExt; use itertools::Itertools; use multer::Constraints; use multer::Multipart; @@ -74,7 +75,7 @@ impl MultipartRequest { limits: MultipartRequestLimits, ) -> Self { let multer = Multipart::with_constraints( - request_body, + request_body.into_data_stream(), boundary, Constraints::new().size_limit(SizeLimit::new().for_field("map", MAP_SIZE_LIMIT)), ); diff --git a/apollo-router/src/plugins/fleet_detector.rs b/apollo-router/src/plugins/fleet_detector.rs index 97e6524865..eeb24cc348 100644 --- a/apollo-router/src/plugins/fleet_detector.rs +++ b/apollo-router/src/plugins/fleet_detector.rs @@ -7,7 +7,8 @@ use std::time::Duration; use std::time::Instant; use futures::StreamExt; -use http_body::Body; +use http_body::Body as _; +use http_body_util::BodyExt as _; use opentelemetry::metrics::MeterProvider; use opentelemetry_api::metrics::ObservableGauge; use opentelemetry_api::metrics::Unit; @@ -27,7 +28,6 @@ use crate::plugin::PluginPrivate; use crate::services::http::HttpRequest; use crate::services::http::HttpResponse; use crate::services::router; -use crate::services::router::body::RouterBody; const REFRESH_INTERVAL: Duration = Duration::from_secs(60); const COMPUTE_DETECTOR_THRESHOLD: u16 = 24576; @@ -253,7 +253,7 @@ impl PluginPrivate for FleetDetector { // Count the number of request bytes from clients to the router .map_request(move |req: router::Request| router::Request { router_request: req.router_request.map(move |body| { - router::Body::wrap_stream(body.inspect(|res| { + router::body::from_result_stream(body.into_data_stream().inspect(|res| { if let Ok(bytes) = res { u64_counter!( "apollo.router.operations.request_size", @@ -268,7 +268,7 @@ impl PluginPrivate for FleetDetector { // Count the number of response bytes from the router to clients .map_response(move |res: router::Response| router::Response { response: res.response.map(move |body| { - router::Body::wrap_stream(body.inspect(|res| { + router::body::from_result_stream(body.into_data_stream().inspect(|res| { if let Ok(bytes) = res { u64_counter!( "apollo.router.operations.response_size", @@ -318,17 +318,19 @@ impl PluginPrivate for FleetDetector { } // For streaming bodies, we need to wrap the stream and count bytes as we go - RouterBody::wrap_stream(body.inspect(move |res| { - if let Ok(bytes) = res { - let sn = sn.clone(); - u64_counter!( - "apollo.router.operations.fetch.request_size", - "Total number of request bytes for subgraph fetches", - bytes.len() as u64, - subgraph.name = sn.to_string() - ); - } - })) + router::body::from_result_stream(body.into_data_stream().inspect( + move |res| { + if let Ok(bytes) = res { + let sn = sn.clone(); + u64_counter!( + "apollo.router.operations.fetch.request_size", + "Total number of request bytes for subgraph fetches", + bytes.len() as u64, + subgraph.name = sn.to_string() + ); + } + }, + )) }), context: req.context, } @@ -350,17 +352,19 @@ impl PluginPrivate for FleetDetector { Ok(HttpResponse { http_response: res.http_response.map(move |body| { let sn = sn.clone(); - RouterBody::wrap_stream(body.inspect(move |res| { - if let Ok(bytes) = res { - let sn = sn.clone(); - u64_counter!( + router::body::from_result_stream(body.into_data_stream().inspect( + move |res| { + if let Ok(bytes) = res { + let sn = sn.clone(); + u64_counter!( "apollo.router.operations.fetch.response_size", "Total number of response bytes for subgraph fetches", bytes.len() as u64, subgraph.name = sn.to_string() ); - } - })) + } + }, + )) }), context: res.context, }) @@ -503,7 +507,7 @@ mod tests { use crate::metrics::FutureMetricsExt as _; use crate::plugin::test::MockHttpClientService; use crate::plugin::test::MockRouterService; - use crate::services::Body; + use crate::services::router::Body; #[tokio::test] async fn test_disabled_router_service() { @@ -530,7 +534,7 @@ mod tests { let mut bad_request_router_service = plugin.router_service(mock_bad_request_service.boxed()); let router_req = router::Request::fake_builder() - .body("request") + .body(router::body::from_bytes("request")) .build() .unwrap(); let _router_response = bad_request_router_service @@ -588,7 +592,7 @@ mod tests { let mut bad_request_router_service = plugin.router_service(mock_bad_request_service.boxed()); let router_req = router::Request::fake_builder() - .body(Body::wrap_stream(Body::from("request"))) + .body(router::body::from_bytes("request")) .build() .unwrap(); let _router_response = bad_request_router_service @@ -618,32 +622,32 @@ mod tests { // GIVEN an http client service request let mut mock_bad_request_service = MockHttpClientService::new(); - mock_bad_request_service.expect_call().times(1).returning( - |req: http::Request| { + mock_bad_request_service + .expect_call() + .times(1) + .returning(|req| { Box::pin(async { - let data = hyper::body::to_bytes(req.into_body()).await?; Ok(http::Response::builder() .status(StatusCode::BAD_REQUEST) .header("content-type", "application/json") // making sure the request body is consumed - .body(Body::from(data)) + .body(req.into_body()) .unwrap()) }) - }, - ); + }); let mut bad_request_http_client_service = plugin.http_client_service( "subgraph", mock_bad_request_service - .map_request(|req: HttpRequest| req.http_request.map(|body| body.into_inner())) - .map_response(|res: http::Response| HttpResponse { - http_response: res.map(RouterBody::from), + .map_request(|req: HttpRequest| req.http_request) + .map_response(|res| HttpResponse { + http_response: res, context: Default::default(), }) .boxed(), ); let http_client_req = HttpRequest { http_request: http::Request::builder() - .body(RouterBody::from("request")) + .body(router::body::from_bytes("request")) .unwrap(), context: Default::default(), }; @@ -655,7 +659,10 @@ mod tests { .await .unwrap(); // making sure the response body is consumed - let _data = hyper::body::to_bytes(http_client_response.http_response.into_body()) + let _data = http_client_response + .http_response + .into_body() + .collect() .await .unwrap(); @@ -691,32 +698,32 @@ mod tests { // GIVEN an http client service request with a complete body let mut mock_bad_request_service = MockHttpClientService::new(); - mock_bad_request_service.expect_call().times(1).returning( - |req: http::Request| { + mock_bad_request_service + .expect_call() + .times(1) + .returning(|req| { Box::pin(async { - let data = hyper::body::to_bytes(req.into_body()).await?; Ok(http::Response::builder() .status(StatusCode::BAD_REQUEST) .header("content-type", "application/json") // making sure the request body is consumed - .body(Body::from(data)) + .body(req.into_body()) .unwrap()) }) - }, - ); + }); let mut bad_request_http_client_service = plugin.http_client_service( "subgraph", mock_bad_request_service - .map_request(|req: HttpRequest| req.http_request.map(|body| body.into_inner())) - .map_response(|res: http::Response| HttpResponse { - http_response: res.map(RouterBody::from), + .map_request(|req: HttpRequest| req.http_request) + .map_response(|res| HttpResponse { + http_response: res, context: Default::default(), }) .boxed(), ); let http_client_req = HttpRequest { http_request: http::Request::builder() - .body(RouterBody::from("request")) + .body(router::body::from_bytes("request")) .unwrap(), context: Default::default(), }; @@ -729,7 +736,10 @@ mod tests { .unwrap(); // making sure the response body is consumed - let _data = hyper::body::to_bytes(http_client_response.http_response.into_body()) + let _data = http_client_response + .http_response + .into_body() + .collect() .await .unwrap(); @@ -772,12 +782,12 @@ mod tests { mock_bad_request_service.expect_call().times(1).returning( |req: http::Request| { Box::pin(async { - let data = hyper::body::to_bytes(req.into_body()).await?; + // making sure the request body is consumed + let data = router::body::into_bytes(req.into_body()).await?; Ok(http::Response::builder() .status(StatusCode::BAD_REQUEST) .header("content-type", "application/json") - // making sure the request body is consumed - .body(Body::from(data)) + .body(router::body::from_bytes(data)) .unwrap()) }) }, @@ -785,18 +795,18 @@ mod tests { let mut bad_request_http_client_service = plugin.http_client_service( "subgraph", mock_bad_request_service - .map_request(|req: HttpRequest| req.http_request.map(|body| body.into_inner())) + .map_request(|req: HttpRequest| req.http_request) .map_response(|res: http::Response| HttpResponse { - http_response: res.map(RouterBody::from), + http_response: res.map(Body::from), context: Default::default(), }) .boxed(), ); let http_client_req = HttpRequest { http_request: http::Request::builder() - .body(RouterBody::wrap_stream(futures::stream::once(async { - Ok::<_, Infallible>("request") - }))) + .body(router::body::from_result_stream(futures::stream::once( + async { Ok::<_, Infallible>(bytes::Bytes::from("request")) }, + ))) .unwrap(), context: Default::default(), }; @@ -809,7 +819,7 @@ mod tests { .unwrap(); // making sure the response body is consumed - let _data = hyper::body::to_bytes(http_client_response.http_response.into_body()) + let _data = router::body::into_bytes(http_client_response.http_response.into_body()) .await .unwrap(); diff --git a/apollo-router/src/plugins/include_subgraph_errors.rs b/apollo-router/src/plugins/include_subgraph_errors.rs index 2779758cbf..2b047adbbd 100644 --- a/apollo-router/src/plugins/include_subgraph_errors.rs +++ b/apollo-router/src/plugins/include_subgraph_errors.rs @@ -1,14 +1,20 @@ use std::collections::HashMap; +use std::future; +use futures::stream; +use futures::StreamExt; use schemars::JsonSchema; use serde::Deserialize; use tower::BoxError; +use tower::ServiceBuilder; use tower::ServiceExt; use crate::json_ext::Object; use crate::plugin::Plugin; use crate::plugin::PluginInit; use crate::register_plugin; +use crate::services::execution; +use crate::services::fetch::SubgraphNameExt; use crate::services::subgraph; use crate::services::SubgraphResponse; @@ -84,6 +90,34 @@ impl Plugin for IncludeSubgraphErrors { }) .boxed() } + + // TODO: promote fetch_service to a plugin hook + fn execution_service(&self, service: execution::BoxService) -> execution::BoxService { + let all = self.config.all; + let subgraphs = self.config.subgraphs.clone(); + ServiceBuilder::new() + .map_response(move |mut response: execution::Response| { + response.response = response.response.map(move |response| { + response + .flat_map(move |mut response| { + response.errors.iter_mut().for_each(|error| { + if let Some(subgraph_name) = error.subgraph_name() { + if !*subgraphs.get(&subgraph_name).unwrap_or(&all) { + tracing::info!("redacted subgraph({subgraph_name}) error"); + error.message = REDACTED_ERROR_MESSAGE.to_string(); + error.extensions = Object::default(); + } + } + }); + stream::once(future::ready(response)) + }) + .boxed() + }); + response + }) + .service(service) + .boxed() + } } #[cfg(test)] diff --git a/apollo-router/src/plugins/limits/layer.rs b/apollo-router/src/plugins/limits/layer.rs index 680a95c41b..e84e333c74 100644 --- a/apollo-router/src/plugins/limits/layer.rs +++ b/apollo-router/src/plugins/limits/layer.rs @@ -224,13 +224,13 @@ where mod test { use futures::stream::StreamExt; use http::StatusCode; + use http_body_util::BodyStream; use tower::BoxError; use tower::ServiceBuilder; use tower_service::Service; use crate::plugins::limits::layer::BodyLimitControl; use crate::plugins::limits::layer::RequestBodyLimitLayer; - use crate::services; #[tokio::test] async fn test_body_content_length_limit_exceeded() { @@ -238,9 +238,7 @@ mod test { let mut service = ServiceBuilder::new() .layer(RequestBodyLimitLayer::new(control.clone())) .service_fn(|r: http::Request<_>| async move { - services::http::body_stream::BodyStream::new(r.into_body()) - .collect::>() - .await; + BodyStream::new(r.into_body()).collect::>().await; panic!("should have rejected request"); }); let resp: Result, BoxError> = service @@ -255,9 +253,7 @@ mod test { let mut service = ServiceBuilder::new() .layer(RequestBodyLimitLayer::new(control.clone())) .service_fn(|r: http::Request<_>| async move { - services::http::body_stream::BodyStream::new(r.into_body()) - .collect::>() - .await; + BodyStream::new(r.into_body()).collect::>().await; Ok(http::Response::builder() .status(StatusCode::OK) .body("This is a test".to_string()) @@ -277,9 +273,7 @@ mod test { let mut service = ServiceBuilder::new() .layer(RequestBodyLimitLayer::new(control.clone())) .service_fn(|r: http::Request<_>| async move { - services::http::body_stream::BodyStream::new(r.into_body()) - .collect::>() - .await; + BodyStream::new(r.into_body()).collect::>().await; panic!("should have rejected request"); }); let resp: Result, BoxError> = service @@ -299,9 +293,7 @@ mod test { let mut service = ServiceBuilder::new() .layer(RequestBodyLimitLayer::new(control.clone())) .service_fn(|r: http::Request<_>| async move { - services::http::body_stream::BodyStream::new(r.into_body()) - .collect::>() - .await; + BodyStream::new(r.into_body()).collect::>().await; Ok(http::Response::builder() .status(StatusCode::OK) .body("This is a test".to_string()) @@ -329,9 +321,7 @@ mod test { .service_fn(move |r: http::Request<_>| { let control = control.clone(); async move { - services::http::body_stream::BodyStream::new(r.into_body()) - .collect::>() - .await; + BodyStream::new(r.into_body()).collect::>().await; control.update_limit(100); Ok(http::Response::builder() .status(StatusCode::OK) @@ -351,9 +341,7 @@ mod test { let mut service = ServiceBuilder::new() .layer(RequestBodyLimitLayer::new(control.clone())) .service_fn(|r: http::Request<_>| async move { - services::http::body_stream::BodyStream::new(r.into_body()) - .collect::>() - .await; + BodyStream::new(r.into_body()).collect::>().await; Ok(http::Response::builder() .status(StatusCode::OK) .body("This is a test".to_string()) diff --git a/apollo-router/src/plugins/limits/limited.rs b/apollo-router/src/plugins/limits/limited.rs index 54a632d93e..7db7ad0ad3 100644 --- a/apollo-router/src/plugins/limits/limited.rs +++ b/apollo-router/src/plugins/limits/limited.rs @@ -3,12 +3,11 @@ use std::task::Context; use std::task::Poll; use bytes::Buf; -use http::HeaderMap; use http_body::SizeHint; use pin_project_lite::pin_project; use tokio::sync::OwnedSemaphorePermit; -use crate::plugins::limits::layer::BodyLimitControl; +use super::layer::BodyLimitControl; pin_project! { /// An implementation of http_body::Body that limits the number of bytes read from the inner body. @@ -72,15 +71,19 @@ where type Data = Body::Data; type Error = Body::Error; - fn poll_data( + fn poll_frame( self: Pin<&mut Self>, cx: &mut Context<'_>, - ) -> Poll>> { + ) -> Poll, Self::Error>>> { let mut this = self.project(); - let res = match this.inner.poll_data(cx) { + let res = match this.inner.poll_frame(cx) { Poll::Pending => return Poll::Pending, Poll::Ready(None) => None, - Poll::Ready(Some(Ok(data))) => { + Poll::Ready(Some(Ok(frame))) => { + let Some(data) = frame.data_ref() else { + return Poll::Ready(Some(Ok(frame))); + }; + if data.remaining() > this.control.remaining() { // This is the difference between http_body::Limited and our implementation. // Dropping this mutex allows the containing layer to immediately return an error response @@ -90,7 +93,7 @@ where return Poll::Pending; } else { this.control.increment(data.remaining()); - Some(Ok(data)) + Some(Ok(frame)) } } Poll::Ready(Some(Err(err))) => Some(Err(err)), @@ -99,22 +102,10 @@ where Poll::Ready(res) } - fn poll_trailers( - self: Pin<&mut Self>, - cx: &mut Context<'_>, - ) -> Poll, Self::Error>> { - let this = self.project(); - let res = match this.inner.poll_trailers(cx) { - Poll::Pending => return Poll::Pending, - Poll::Ready(Ok(data)) => Ok(data), - Poll::Ready(Err(err)) => Err(err), - }; - - Poll::Ready(res) - } fn is_end_stream(&self) -> bool { self.inner.is_end_stream() } + fn size_hint(&self) -> SizeHint { match u64::try_from(self.control.remaining()) { Ok(n) => { @@ -138,24 +129,32 @@ mod test { use std::pin::Pin; use std::sync::Arc; + use bytes::Bytes; use http_body::Body; use tower::BoxError; use crate::plugins::limits::layer::BodyLimitControl; + use crate::services::router::body; #[test] fn test_completes() { let control = BodyLimitControl::new(100); let semaphore = Arc::new(tokio::sync::Semaphore::new(1)); let lock = semaphore.clone().try_acquire_owned().unwrap(); - let mut limited = super::Limited::new("test".to_string(), control, lock); + let mut limited = super::Limited::new(body::from_bytes("test".to_string()), control, lock); + + match Pin::new(&mut limited).poll_frame(&mut std::task::Context::from_waker( + &futures::task::noop_waker(), + )) { + std::task::Poll::Ready(Some(Ok(data))) => { + let data = data.into_data().unwrap().to_vec(); + let content = String::from_utf8_lossy(data.as_slice()); + assert_eq!(&content, "test"); + } + std::task::Poll::Pending => panic!("it should be ready"), + _ => panic!("the data returned is incorrect"), + } - assert_eq!( - Pin::new(&mut limited).poll_data(&mut std::task::Context::from_waker( - &futures::task::noop_waker() - )), - std::task::Poll::Ready(Some(Ok("test".to_string().into_bytes().into()))) - ); assert!(semaphore.try_acquire().is_err()); // We need to assert that if the stream is dropped the semaphore isn't released. @@ -171,12 +170,12 @@ mod test { let lock = semaphore.clone().try_acquire_owned().unwrap(); let mut limited = super::Limited::new("test".to_string(), control, lock); - assert_eq!( - Pin::new(&mut limited).poll_data(&mut std::task::Context::from_waker( - &futures::task::noop_waker() - )), - std::task::Poll::Pending - ); + match Pin::new(&mut limited).poll_frame(&mut std::task::Context::from_waker( + &futures::task::noop_waker(), + )) { + std::task::Poll::Pending => {} + std::task::Poll::Ready(_) => panic!("it should be pending"), + } assert!(semaphore.try_acquire().is_ok()) } @@ -187,26 +186,28 @@ mod test { let lock = semaphore.clone().try_acquire_owned().unwrap(); let mut limited = super::Limited::new( - hyper::Body::wrap_stream(futures::stream::iter(vec![ - Ok::<&str, BoxError>("hello"), - Ok("world"), + body::from_result_stream(futures::stream::iter(vec![ + Ok::("hello".into()), + Ok("world".into()), ])), control, lock, ); - assert!(matches!( - Pin::new(&mut limited).poll_data(&mut std::task::Context::from_waker( - &futures::task::noop_waker() - )), - std::task::Poll::Ready(Some(Ok(_))) - )); + match Pin::new(&mut limited).poll_frame(&mut std::task::Context::from_waker( + &futures::task::noop_waker(), + )) { + std::task::Poll::Ready(Some(Ok(_))) => {} + _ => panic!("it should be ready with Some(Ok(_)"), + } assert!(semaphore.try_acquire().is_err()); - assert!(matches!( - Pin::new(&mut limited).poll_data(&mut std::task::Context::from_waker( - &futures::task::noop_waker() - )), - std::task::Poll::Pending - )); + if Pin::new(&mut limited) + .poll_frame(&mut std::task::Context::from_waker( + &futures::task::noop_waker(), + )) + .is_ready() + { + panic!("it should be pending"); + } assert!(semaphore.try_acquire().is_ok()); } } diff --git a/apollo-router/src/plugins/limits/mod.rs b/apollo-router/src/plugins/limits/mod.rs index ec041a1c02..b2cdb3b35c 100644 --- a/apollo-router/src/plugins/limits/mod.rs +++ b/apollo-router/src/plugins/limits/mod.rs @@ -194,7 +194,6 @@ impl LimitsPlugin { match resp { Ok(r) => { if r.response.status() == StatusCode::PAYLOAD_TOO_LARGE { - Self::increment_legacy_metric(); Ok(BodyLimitError::PayloadTooLarge.into_response(ctx)) } else { Ok(r) @@ -209,26 +208,11 @@ impl LimitsPlugin { match root_cause.downcast_ref::() { None => Err(e), - Some(_) => { - Self::increment_legacy_metric(); - Ok(BodyLimitError::PayloadTooLarge.into_response(ctx)) - } + Some(_) => Ok(BodyLimitError::PayloadTooLarge.into_response(ctx)), } } } } - - fn increment_legacy_metric() { - // Remove this eventually - // This is already handled by the telemetry plugin via the http.server.request metric. - u64_counter!( - "apollo_router_http_requests_total", - "Total number of HTTP requests made.", - 1, - status = StatusCode::PAYLOAD_TOO_LARGE.as_u16() as i64, - error = BodyLimitError::PayloadTooLarge.to_string() - ); - } } impl BodyLimitError { @@ -261,7 +245,6 @@ mod test { use crate::plugins::limits::LimitsPlugin; use crate::plugins::test::PluginTestHarness; use crate::services::router; - use crate::services::router::body::get_body_bytes; #[tokio::test] async fn test_body_content_length_limit_exceeded() { @@ -269,12 +252,12 @@ mod test { let resp = plugin .call_router( router::Request::fake_builder() - .body("This is a test") + .body(router::body::from_bytes("This is a test")) .build() .unwrap(), |r| async { let body = r.router_request.into_body(); - let _ = get_body_bytes(body).await?; + let _ = router::body::into_bytes(body).await?; panic!("should have failed to read stream") }, ) @@ -284,7 +267,7 @@ mod test { assert_eq!(resp.response.status(), StatusCode::PAYLOAD_TOO_LARGE); assert_eq!( String::from_utf8( - get_body_bytes(resp.response.into_body()) + router::body::into_bytes(resp.response.into_body()) .await .unwrap() .to_vec() @@ -299,10 +282,13 @@ mod test { let plugin = plugin().await; let resp = plugin .call_router( - router::Request::fake_builder().body("").build().unwrap(), + router::Request::fake_builder() + .body(router::body::empty()) + .build() + .unwrap(), |r| async { let body = r.router_request.into_body(); - let body = get_body_bytes(body).await; + let body = router::body::into_bytes(body).await; assert!(body.is_ok()); Ok(router::Response::fake_builder().build().unwrap()) }, @@ -314,7 +300,7 @@ mod test { assert_eq!(resp.response.status(), StatusCode::OK); assert_eq!( String::from_utf8( - get_body_bytes(resp.response.into_body()) + router::body::into_bytes(resp.response.into_body()) .await .unwrap() .to_vec() @@ -331,7 +317,7 @@ mod test { .call_router( router::Request::fake_builder() .header("Content-Length", "100") - .body("") + .body(router::body::empty()) .build() .unwrap(), |_| async { panic!("should have rejected request") }, @@ -342,7 +328,7 @@ mod test { assert_eq!(resp.response.status(), StatusCode::PAYLOAD_TOO_LARGE); assert_eq!( String::from_utf8( - get_body_bytes(resp.response.into_body()) + router::body::into_bytes(resp.response.into_body()) .await .unwrap() .to_vec() @@ -359,7 +345,7 @@ mod test { .call_router( router::Request::fake_builder() .header("Content-Length", "5") - .body("") + .body(router::body::empty()) .build() .unwrap(), |_| async { Ok(router::Response::fake_builder().build().unwrap()) }, @@ -370,7 +356,7 @@ mod test { assert_eq!(resp.response.status(), StatusCode::OK); assert_eq!( String::from_utf8( - get_body_bytes(resp.response.into_body()) + router::body::into_bytes(resp.response.into_body()) .await .unwrap() .to_vec() @@ -386,7 +372,10 @@ mod test { let plugin = plugin().await; let resp = plugin .call_router( - router::Request::fake_builder().body("").build().unwrap(), + router::Request::fake_builder() + .body(router::body::empty()) + .build() + .unwrap(), |_| async { Err(BoxError::from("error")) }, ) .await; @@ -399,7 +388,7 @@ mod test { let resp = plugin .call_router( router::Request::fake_builder() - .body("This is a test") + .body(router::body::from_bytes("This is a test")) .build() .unwrap(), |r| async move { @@ -412,7 +401,7 @@ mod test { control.update_limit(100); }); let body = r.router_request.into_body(); - let _ = get_body_bytes(body).await?; + let _ = router::body::into_bytes(body).await?; // Now let's check progress r.context.extensions().with_lock(|lock| { @@ -429,7 +418,7 @@ mod test { assert_eq!(resp.response.status(), StatusCode::OK); assert_eq!( String::from_utf8( - get_body_bytes(resp.response.into_body()) + router::body::into_bytes(resp.response.into_body()) .await .unwrap() .to_vec() diff --git a/apollo-router/src/plugins/mod.rs b/apollo-router/src/plugins/mod.rs index 5684d0c3d3..b81bbe1f0d 100644 --- a/apollo-router/src/plugins/mod.rs +++ b/apollo-router/src/plugins/mod.rs @@ -23,6 +23,7 @@ macro_rules! schemar_fn { pub(crate) mod authentication; pub(crate) mod authorization; pub(crate) mod cache; +pub(crate) mod connectors; mod coprocessor; pub(crate) mod csrf; mod demand_control; diff --git a/apollo-router/src/plugins/record_replay/record.rs b/apollo-router/src/plugins/record_replay/record.rs index d1c0bc9829..f2d7e1ab81 100644 --- a/apollo-router/src/plugins/record_replay/record.rs +++ b/apollo-router/src/plugins/record_replay/record.rs @@ -4,6 +4,7 @@ use std::sync::Arc; use futures::stream::once; use futures::StreamExt; +use http_body_util::BodyExt; use tokio::fs; use tower::BoxError; use tower::ServiceBuilder; @@ -19,7 +20,6 @@ use crate::plugin::PluginInit; use crate::services::execution; use crate::services::external::externalize_header_map; use crate::services::router; -use crate::services::router::body::RouterBody; use crate::services::subgraph; use crate::services::supergraph; @@ -125,13 +125,13 @@ impl Plugin for Record { }) .filter_map(|a| async move { a.unwrap() }); - let stream = stream.chain(after_complete); + let stream = stream.into_data_stream().chain(after_complete); Ok(router::Response { context: res.context, response: http::Response::from_parts( parts, - RouterBody::wrap_stream(stream).into_inner(), + router::body::from_result_stream(stream), ), }) } diff --git a/apollo-router/src/plugins/subscription.rs b/apollo-router/src/plugins/subscription.rs index 1d342f586f..6f46c0c14b 100644 --- a/apollo-router/src/plugins/subscription.rs +++ b/apollo-router/src/plugins/subscription.rs @@ -295,7 +295,7 @@ impl Plugin for Subscription { .clone() .expect("cannot run subscription in callback mode without a hmac key"); let endpoint = Endpoint::from_router_service( - format!("{path}/:callback"), + format!("{path}/{{callback}}"), CallbackService::new(self.notify.clone(), path.to_string(), callback_hmac_key) .boxed(), ); @@ -430,7 +430,7 @@ impl Service for CallbackService { match parts.method { Method::POST => { - let cb_body = Into::::into(body).to_bytes() + let cb_body = router::body::into_bytes(Into::::into(body)) .await .map_err(|e| format!("failed to get the request body: {e}")) .and_then(|bytes| { @@ -447,7 +447,7 @@ impl Service for CallbackService { return Ok(router::Response { response: http::Response::builder() .status(StatusCode::BAD_REQUEST) - .body(err.into()) + .body(router::body::from_bytes(err)) .map_err(BoxError::from)?, context: req.context, }); @@ -474,7 +474,7 @@ impl Service for CallbackService { return Ok(router::Response { response: http::Response::builder() .status(StatusCode::UNAUTHORIZED) - .body("verifier doesn't match".into()) + .body(router::body::from_bytes("verifier doesn't match")) .map_err(BoxError::from)?, context: req.context, }); @@ -495,7 +495,7 @@ impl Service for CallbackService { return Ok(router::Response { response: http::Response::builder() .status(StatusCode::NOT_FOUND) - .body("suscription doesn't exist".into()) + .body(router::body::from_bytes("suscription doesn't exist")) .map_err(BoxError::from)?, context: req.context, }); @@ -514,7 +514,7 @@ impl Service for CallbackService { Ok(router::Response { response: http::Response::builder() .status(StatusCode::OK) - .body("".into()) + .body(router::body::empty()) .map_err(BoxError::from)?, context: req.context, }) @@ -527,7 +527,7 @@ impl Service for CallbackService { response: http::Response::builder() .status(StatusCode::NO_CONTENT) .header(HeaderName::from_static(CALLBACK_SUBSCRIPTION_HEADER_NAME), HeaderValue::from_static(CALLBACK_SUBSCRIPTION_HEADER_VALUE)) - .body("".into()) + .body(router::body::empty()) .map_err(BoxError::from)?, context: req.context, }) @@ -536,7 +536,7 @@ impl Service for CallbackService { response: http::Response::builder() .status(StatusCode::NOT_FOUND) .header(HeaderName::from_static(CALLBACK_SUBSCRIPTION_HEADER_NAME), HeaderValue::from_static(CALLBACK_SUBSCRIPTION_HEADER_VALUE)) - .body("suscription doesn't exist".into()) + .body(router::body::from_bytes("suscription doesn't exist")) .map_err(BoxError::from)?, context: req.context, }) @@ -551,7 +551,7 @@ impl Service for CallbackService { return Ok(router::Response { response: http::Response::builder() .status(StatusCode::UNAUTHORIZED) - .body("id used for the verifier is not part of ids array".into()) + .body(router::body::from_bytes("id used for the verifier is not part of ids array")) .map_err(BoxError::from)?, context: req.context, }); @@ -562,7 +562,7 @@ impl Service for CallbackService { Ok(router::Response { response: http::Response::builder() .status(StatusCode::NO_CONTENT) - .body("".into()) + .body(router::body::empty()) .map_err(BoxError::from)?, context: req.context, }) @@ -570,7 +570,7 @@ impl Service for CallbackService { Ok(router::Response { response: http::Response::builder() .status(StatusCode::NOT_FOUND) - .body("suscriptions don't exist".into()) + .body(router::body::from_bytes("suscriptions don't exist")) .map_err(BoxError::from)?, context: req.context, }) @@ -592,11 +592,13 @@ impl Service for CallbackService { Ok(router::Response { response: http::Response::builder() .status(StatusCode::NOT_FOUND) - .body(serde_json::to_string_pretty(&InvalidIdsPayload{ - invalid_ids, - id, - verifier, - })?.into()) + .body(router::body::from_bytes( + serde_json::to_string_pretty(&InvalidIdsPayload{ + invalid_ids, + id, + verifier, + })?, + )) .map_err(BoxError::from)?, context: req.context, }) @@ -613,7 +615,7 @@ impl Service for CallbackService { return Ok(router::Response { response: http::Response::builder() .status(StatusCode::NOT_FOUND) - .body("unknown topic".into()) + .body(router::body::from_bytes("unknown topic")) .map_err(BoxError::from)?, context: req.context, }); @@ -622,7 +624,7 @@ impl Service for CallbackService { return Ok(router::Response { response: http::Response::builder() .status(StatusCode::NOT_FOUND) - .body(err.to_string().into()) + .body(router::body::from_bytes(err.to_string())) .map_err(BoxError::from)?, context: req.context, }); @@ -641,7 +643,7 @@ impl Service for CallbackService { return Ok(router::Response { response: http::Response::builder() .status(StatusCode::NOT_FOUND) - .body("cannot send errors to the client".into()) + .body(router::body::from_bytes("cannot send errors to the client")) .map_err(BoxError::from)?, context: req.context, }); @@ -651,7 +653,7 @@ impl Service for CallbackService { return Ok(router::Response { response: http::Response::builder() .status(StatusCode::NOT_FOUND) - .body("cannot force delete".into()) + .body(router::body::from_bytes("cannot force delete")) .map_err(BoxError::from)?, context: req.context, }); @@ -659,7 +661,7 @@ impl Service for CallbackService { Ok(router::Response { response: http::Response::builder() .status(StatusCode::ACCEPTED) - .body("".into()) + .body(router::body::empty()) .map_err(BoxError::from)?, context: req.context, }) @@ -669,7 +671,7 @@ impl Service for CallbackService { _ => Ok(router::Response { response: http::Response::builder() .status(StatusCode::METHOD_NOT_ALLOWED) - .body("".into()) + .body(router::body::empty()) .map_err(BoxError::from)?, context: req.context, }), @@ -702,7 +704,9 @@ fn ensure_id_consistency( Err(router::Response { response: http::Response::builder() .status(StatusCode::BAD_REQUEST) - .body("id from url path and id from body are different".into()) + .body(router::body::from_bytes( + "id from url path and id from body are different", + )) .expect("this body is valid"), context: context.clone(), }) @@ -725,6 +729,7 @@ mod tests { use crate::http_ext; use crate::plugin::test::MockSubgraphService; use crate::plugin::DynPlugin; + use crate::services::router::body; use crate::services::SubgraphRequest; use crate::services::SubgraphResponse; use crate::Notify; @@ -759,7 +764,7 @@ mod tests { .unwrap(); let http_req_prom = http::Request::get("http://localhost:4000/subscription/callback") - .body(Default::default()) + .body(body::empty()) .unwrap(); let mut web_endpoint = dyn_plugin .web_endpoints() @@ -772,6 +777,7 @@ mod tests { .unwrap() .into_router(); let resp = web_endpoint + .as_service() .ready() .await .unwrap() @@ -788,16 +794,13 @@ mod tests { let http_req = http::Request::post(format!( "http://localhost:4000/subscription/callback/{new_sub_id}" )) - .body( - RouterBody::from( - serde_json::to_vec(&CallbackPayload::Subscription(SubscriptionPayload::Check { - id: new_sub_id.clone(), - verifier: verifier.clone(), - })) - .unwrap(), - ) - .into_inner(), - ) + .body(router::body::from_bytes( + serde_json::to_vec(&CallbackPayload::Subscription(SubscriptionPayload::Check { + id: new_sub_id.clone(), + verifier: verifier.clone(), + })) + .unwrap(), + )) .unwrap(); let resp = web_endpoint.clone().oneshot(http_req).await.unwrap(); assert_eq!(resp.status(), http::StatusCode::NO_CONTENT); @@ -811,7 +814,7 @@ mod tests { let http_req = http::Request::post(format!( "http://localhost:4000/subscription/callback/{new_sub_id}" )) - .body(RouterBody::from( + .body(router::body::from_bytes( serde_json::to_vec(&CallbackPayload::Subscription(SubscriptionPayload::Next { id: new_sub_id.clone(), payload: graphql::Response::builder() @@ -820,7 +823,7 @@ mod tests { verifier: verifier.clone(), })) .unwrap(), - ).into_inner()) + )) .unwrap(); let resp = web_endpoint.clone().oneshot(http_req).await.unwrap(); assert_eq!(resp.status(), http::StatusCode::OK); @@ -840,7 +843,7 @@ mod tests { let http_req = http::Request::post(format!( "http://localhost:4000/subscription/callback/{new_sub_id}" )) - .body(RouterBody::from( + .body(router::body::from_bytes( serde_json::to_vec(&CallbackPayload::Subscription(SubscriptionPayload::Next { id: new_sub_id.clone(), payload: graphql::Response::builder() @@ -849,7 +852,7 @@ mod tests { verifier: verifier.clone(), })) .unwrap(), - ).into_inner()) + )) .unwrap(); let resp = web_endpoint.clone().oneshot(http_req).await.unwrap(); assert_eq!(resp.status(), http::StatusCode::NOT_FOUND); @@ -858,19 +861,16 @@ mod tests { let http_req = http::Request::post(format!( "http://localhost:4000/subscription/callback/{new_sub_id}" )) - .body( - RouterBody::from( - serde_json::to_vec(&CallbackPayload::Subscription( - SubscriptionPayload::Heartbeat { - id: new_sub_id.clone(), - ids: vec![new_sub_id, "FAKE_SUB_ID".to_string()], - verifier: verifier.clone(), - }, - )) - .unwrap(), - ) - .into_inner(), - ) + .body(router::body::from_bytes( + serde_json::to_vec(&CallbackPayload::Subscription( + SubscriptionPayload::Heartbeat { + id: new_sub_id.clone(), + ids: vec![new_sub_id, "FAKE_SUB_ID".to_string()], + verifier: verifier.clone(), + }, + )) + .unwrap(), + )) .unwrap(); let resp = web_endpoint.oneshot(http_req).await.unwrap(); assert_eq!(resp.status(), http::StatusCode::NOT_FOUND); @@ -906,7 +906,7 @@ mod tests { .unwrap(); let http_req_prom = http::Request::get("http://localhost:4000/subscription/callback") - .body(Default::default()) + .body(body::empty()) .unwrap(); let mut web_endpoint = dyn_plugin .web_endpoints() @@ -919,6 +919,7 @@ mod tests { .unwrap() .into_router(); let resp = web_endpoint + .as_service() .ready() .await .unwrap() @@ -935,16 +936,13 @@ mod tests { let http_req = http::Request::post(format!( "http://localhost:4000/subscription/callback/{new_sub_id}" )) - .body( - RouterBody::from( - serde_json::to_vec(&CallbackPayload::Subscription(SubscriptionPayload::Check { - id: new_sub_id.clone(), - verifier: verifier.clone(), - })) - .unwrap(), - ) - .into_inner(), - ) + .body(router::body::from_bytes( + serde_json::to_vec(&CallbackPayload::Subscription(SubscriptionPayload::Check { + id: new_sub_id.clone(), + verifier: verifier.clone(), + })) + .unwrap(), + )) .unwrap(); let resp = web_endpoint.clone().oneshot(http_req).await.unwrap(); assert_eq!(resp.status(), http::StatusCode::UNAUTHORIZED); @@ -952,7 +950,7 @@ mod tests { let http_req = http::Request::post(format!( "http://localhost:4000/subscription/callback/{new_sub_id}" )) - .body(RouterBody::from( + .body(router::body::from_bytes( serde_json::to_vec(&CallbackPayload::Subscription(SubscriptionPayload::Next { id: new_sub_id.clone(), payload: graphql::Response::builder() @@ -961,7 +959,7 @@ mod tests { verifier: verifier.clone(), })) .unwrap(), - ).into_inner()) + )) .unwrap(); let resp = web_endpoint.clone().oneshot(http_req).await.unwrap(); assert_eq!(resp.status(), http::StatusCode::UNAUTHORIZED); @@ -997,7 +995,7 @@ mod tests { .unwrap(); let http_req_prom = http::Request::get("http://localhost:4000/subscription/callback") - .body(Default::default()) + .body(body::empty()) .unwrap(); let mut web_endpoint = dyn_plugin .web_endpoints() @@ -1010,6 +1008,7 @@ mod tests { .unwrap() .into_router(); let resp = web_endpoint + .as_service() .ready() .await .unwrap() @@ -1027,16 +1026,13 @@ mod tests { let http_req = http::Request::post(format!( "http://localhost:4000/subscription/callback/{new_sub_id}" )) - .body( - RouterBody::from( - serde_json::to_vec(&CallbackPayload::Subscription(SubscriptionPayload::Check { - id: new_sub_id.clone(), - verifier: verifier.clone(), - })) - .unwrap(), - ) - .into_inner(), - ) + .body(router::body::from_bytes( + serde_json::to_vec(&CallbackPayload::Subscription(SubscriptionPayload::Check { + id: new_sub_id.clone(), + verifier: verifier.clone(), + })) + .unwrap(), + )) .unwrap(); let resp = web_endpoint.clone().oneshot(http_req).await.unwrap(); assert_eq!(resp.status(), http::StatusCode::NO_CONTENT); @@ -1050,7 +1046,7 @@ mod tests { let http_req = http::Request::post(format!( "http://localhost:4000/subscription/callback/{new_sub_id}" )) - .body(crate::services::router::Body::from( + .body(router::body::from_bytes( serde_json::to_vec(&CallbackPayload::Subscription(SubscriptionPayload::Next { id: new_sub_id.clone(), payload: graphql::Response::builder() @@ -1077,22 +1073,19 @@ mod tests { let http_req = http::Request::post(format!( "http://localhost:4000/subscription/callback/{new_sub_id}" )) - .body( - RouterBody::from( - serde_json::to_vec(&CallbackPayload::Subscription( - SubscriptionPayload::Complete { - id: new_sub_id.clone(), - errors: Some(vec![graphql::Error::builder() - .message("cannot complete the subscription") - .extension_code("SUBSCRIPTION_ERROR") - .build()]), - verifier: verifier.clone(), - }, - )) - .unwrap(), - ) - .into_inner(), - ) + .body(router::body::from_bytes( + serde_json::to_vec(&CallbackPayload::Subscription( + SubscriptionPayload::Complete { + id: new_sub_id.clone(), + errors: Some(vec![graphql::Error::builder() + .message("cannot complete the subscription") + .extension_code("SUBSCRIPTION_ERROR") + .build()]), + verifier: verifier.clone(), + }, + )) + .unwrap(), + )) .unwrap(); let resp = web_endpoint.clone().oneshot(http_req).await.unwrap(); assert_eq!(resp.status(), http::StatusCode::ACCEPTED); @@ -1112,7 +1105,7 @@ mod tests { let http_req = http::Request::post(format!( "http://localhost:4000/subscription/callback/{new_sub_id}" )) - .body(RouterBody::from( + .body(router::body::from_bytes( serde_json::to_vec(&CallbackPayload::Subscription(SubscriptionPayload::Next { id: new_sub_id.clone(), payload: graphql::Response::builder() @@ -1121,7 +1114,7 @@ mod tests { verifier, })) .unwrap(), - ).into_inner()) + )) .unwrap(); let resp = web_endpoint.oneshot(http_req).await.unwrap(); assert_eq!(resp.status(), http::StatusCode::NOT_FOUND); diff --git a/apollo-router/src/plugins/telemetry/apollo.rs b/apollo-router/src/plugins/telemetry/apollo.rs index 89c1ecd8e8..97ac2fbdac 100644 --- a/apollo-router/src/plugins/telemetry/apollo.rs +++ b/apollo-router/src/plugins/telemetry/apollo.rs @@ -84,7 +84,7 @@ pub(crate) struct Config { pub(crate) field_level_instrumentation_sampler: SamplerOption, /// Percentage of traces to send via the OTel protocol when sending to Apollo Studio. - pub(crate) experimental_otlp_tracing_sampler: SamplerOption, + pub(crate) otlp_tracing_sampler: SamplerOption, /// OTLP protocol used for OTel traces. /// Note this only applies if OTel traces are enabled and is only intended for use in tests. @@ -164,8 +164,8 @@ const fn default_field_level_instrumentation_sampler() -> SamplerOption { SamplerOption::TraceIdRatioBased(0.01) } -const fn default_experimental_otlp_tracing_sampler() -> SamplerOption { - SamplerOption::Always(Sampler::AlwaysOff) +const fn default_otlp_tracing_sampler() -> SamplerOption { + SamplerOption::Always(Sampler::AlwaysOn) } fn endpoint_default() -> Url { @@ -209,7 +209,7 @@ impl Default for Config { schema_id: "".to_string(), buffer_size: default_buffer_size(), field_level_instrumentation_sampler: default_field_level_instrumentation_sampler(), - experimental_otlp_tracing_sampler: default_experimental_otlp_tracing_sampler(), + otlp_tracing_sampler: default_otlp_tracing_sampler(), send_headers: ForwardHeaders::None, send_variable_values: ForwardValues::None, batch_processor: BatchProcessorConfig::default(), diff --git a/apollo-router/src/plugins/telemetry/apollo_otlp_exporter.rs b/apollo-router/src/plugins/telemetry/apollo_otlp_exporter.rs index dd745ef48d..3fef6dbbca 100644 --- a/apollo-router/src/plugins/telemetry/apollo_otlp_exporter.rs +++ b/apollo-router/src/plugins/telemetry/apollo_otlp_exporter.rs @@ -23,6 +23,7 @@ use sys_info::hostname; use tonic::codec::CompressionEncoding; use tonic::metadata::MetadataMap; use tonic::metadata::MetadataValue; +use tonic_0_9 as tonic; use tower::BoxError; use url::Url; diff --git a/apollo-router/src/plugins/telemetry/config.rs b/apollo-router/src/plugins/telemetry/config.rs index 8dc84e85c0..2a94976378 100644 --- a/apollo-router/src/plugins/telemetry/config.rs +++ b/apollo-router/src/plugins/telemetry/config.rs @@ -2,7 +2,7 @@ use std::collections::BTreeMap; use std::collections::HashSet; -use axum::headers::HeaderName; +use axum_extra::headers::HeaderName; use derivative::Derivative; use num_traits::ToPrimitive; use opentelemetry::sdk::metrics::new_view; @@ -19,7 +19,6 @@ use schemars::JsonSchema; use serde::Deserialize; use serde::Serialize; -use super::metrics::MetricsAttributesConf; use super::*; use crate::plugin::serde::deserialize_option_header_name; use crate::plugins::telemetry::metrics; @@ -118,8 +117,6 @@ pub(crate) struct Metrics { #[derive(Clone, Debug, Deserialize, JsonSchema)] #[serde(deny_unknown_fields, default)] pub(crate) struct MetricsCommon { - /// Configuration to add custom labels/attributes to metrics - pub(crate) attributes: MetricsAttributesConf, /// Set a service.name resource in your metrics pub(crate) service_name: Option, /// Set a service.namespace attribute in your metrics @@ -135,7 +132,6 @@ pub(crate) struct MetricsCommon { impl Default for MetricsCommon { fn default() -> Self { Self { - attributes: Default::default(), service_name: None, service_namespace: None, resource: BTreeMap::new(), @@ -286,10 +282,10 @@ impl TraceIdFormat { #[serde(deny_unknown_fields, rename_all = "lowercase")] pub(crate) enum ApolloSignatureNormalizationAlgorithm { /// Use the algorithm that matches the JavaScript-based implementation. - #[default] Legacy, /// Use a new algorithm that includes input object forms, normalized aliases and variable names, and removes some /// edge cases from the JS implementation that affected normalization. + #[default] Enhanced, } @@ -298,9 +294,9 @@ pub(crate) enum ApolloSignatureNormalizationAlgorithm { #[serde(deny_unknown_fields, rename_all = "lowercase")] pub(crate) enum ApolloMetricsReferenceMode { /// Use the extended mode to report input object fields and enum value references as well as object fields. + #[default] Extended, /// Use the standard mode that only reports referenced object fields. - #[default] Standard, } diff --git a/apollo-router/src/plugins/telemetry/config_new/attributes.rs b/apollo-router/src/plugins/telemetry/config_new/attributes.rs index 60179c5c20..6081d12e4f 100644 --- a/apollo-router/src/plugins/telemetry/config_new/attributes.rs +++ b/apollo-router/src/plugins/telemetry/config_new/attributes.rs @@ -67,6 +67,16 @@ const NETWORK_LOCAL_PORT: Key = Key::from_static_str("network.local.port"); const NETWORK_PEER_ADDRESS: Key = Key::from_static_str("network.peer.address"); const NETWORK_PEER_PORT: Key = Key::from_static_str("network.peer.port"); +pub(crate) const HTTP_REQUEST_HEADERS: Key = Key::from_static_str("http.request.headers"); +pub(crate) const HTTP_REQUEST_URI: Key = Key::from_static_str("http.request.uri"); +pub(crate) const HTTP_REQUEST_VERSION: Key = Key::from_static_str("http.request.version"); +pub(crate) const HTTP_REQUEST_BODY: Key = Key::from_static_str("http.request.body"); + +pub(super) const HTTP_RESPONSE_HEADERS: Key = Key::from_static_str("http.response.headers"); +pub(super) const HTTP_RESPONSE_STATUS: Key = Key::from_static_str("http.response.status"); +pub(super) const HTTP_RESPONSE_VERSION: Key = Key::from_static_str("http.response.version"); +pub(super) const HTTP_RESPONSE_BODY: Key = Key::from_static_str("http.response.body"); + #[derive(Deserialize, JsonSchema, Clone, Debug, Default, Copy)] #[serde(deny_unknown_fields, rename_all = "snake_case")] pub(crate) enum DefaultAttributeRequirementLevel { diff --git a/apollo-router/src/plugins/telemetry/config_new/connector/attributes.rs b/apollo-router/src/plugins/telemetry/config_new/connector/attributes.rs new file mode 100644 index 0000000000..f1178059ae --- /dev/null +++ b/apollo-router/src/plugins/telemetry/config_new/connector/attributes.rs @@ -0,0 +1,144 @@ +use opentelemetry_api::Key; +use opentelemetry_api::KeyValue; +use schemars::JsonSchema; +use serde::Deserialize; +use tower::BoxError; + +use crate::plugins::telemetry::config_new::attributes::DefaultAttributeRequirementLevel; +use crate::plugins::telemetry::config_new::attributes::StandardAttribute; +use crate::plugins::telemetry::config_new::attributes::SUBGRAPH_NAME; +use crate::plugins::telemetry::config_new::connector::ConnectorRequest; +use crate::plugins::telemetry::config_new::connector::ConnectorResponse; +use crate::plugins::telemetry::config_new::DefaultForLevel; +use crate::plugins::telemetry::config_new::Selectors; +use crate::plugins::telemetry::otlp::TelemetryDataKind; +use crate::services::connector_service::ConnectorInfo; +use crate::services::connector_service::CONNECTOR_INFO_CONTEXT_KEY; +use crate::Context; + +const CONNECTOR_HTTP_METHOD: Key = Key::from_static_str("connector.http.method"); +const CONNECTOR_SOURCE_NAME: Key = Key::from_static_str("connector.source.name"); +const CONNECTOR_URL_TEMPLATE: Key = Key::from_static_str("connector.url.template"); + +#[derive(Deserialize, JsonSchema, Clone, Default, Debug)] +#[serde(deny_unknown_fields, default)] +pub(crate) struct ConnectorAttributes { + /// The name of the subgraph containing the connector + /// Examples: + /// + /// * posts + /// + /// Requirement level: Required + #[serde(rename = "subgraph.name")] + subgraph_name: Option, + + /// The name of the source for this connector, if defined + /// Examples: + /// + /// * posts_api + /// + /// Requirement level: Conditionally Required: If the connector has a source defined + #[serde(rename = "connector.source.name")] + connector_source_name: Option, + + /// The HTTP method for the connector + /// Examples: + /// + /// * GET + /// * POST + /// + /// Requirement level: Required + #[serde(rename = "connector.http.method")] + connector_http_method: Option, + + /// The connector URL template, relative to the source base URL if one is defined + /// Examples: + /// + /// * /users/{$this.id!}/post + /// + /// Requirement level: Required + #[serde(rename = "connector.url.template")] + connector_url_template: Option, +} + +impl DefaultForLevel for ConnectorAttributes { + fn defaults_for_level( + &mut self, + requirement_level: DefaultAttributeRequirementLevel, + _kind: TelemetryDataKind, + ) { + match requirement_level { + DefaultAttributeRequirementLevel::Required => { + if self.subgraph_name.is_none() { + self.subgraph_name = Some(StandardAttribute::Bool(true)); + } + } + DefaultAttributeRequirementLevel::Recommended => { + if self.subgraph_name.is_none() { + self.subgraph_name = Some(StandardAttribute::Bool(true)); + } + if self.connector_source_name.is_none() { + self.connector_source_name = Some(StandardAttribute::Bool(true)); + } + if self.connector_http_method.is_none() { + self.connector_http_method = Some(StandardAttribute::Bool(true)); + } + if self.connector_url_template.is_none() { + self.connector_url_template = Some(StandardAttribute::Bool(true)); + } + } + DefaultAttributeRequirementLevel::None => {} + } + } +} + +impl Selectors for ConnectorAttributes { + fn on_request(&self, request: &ConnectorRequest) -> Vec { + let mut attrs = Vec::new(); + + if let Ok(Some(connector_info)) = request + .context + .get::<&str, ConnectorInfo>(CONNECTOR_INFO_CONTEXT_KEY) + { + if let Some(key) = self + .subgraph_name + .as_ref() + .and_then(|a| a.key(SUBGRAPH_NAME)) + { + attrs.push(KeyValue::new(key, connector_info.subgraph_name.to_string())); + } + if let Some(key) = self + .connector_source_name + .as_ref() + .and_then(|a| a.key(CONNECTOR_SOURCE_NAME)) + { + if let Some(source_name) = connector_info.source_name { + attrs.push(KeyValue::new(key, source_name.to_string())); + } + } + if let Some(key) = self + .connector_http_method + .as_ref() + .and_then(|a| a.key(CONNECTOR_HTTP_METHOD)) + { + attrs.push(KeyValue::new(key, connector_info.http_method)); + } + if let Some(key) = self + .connector_url_template + .as_ref() + .and_then(|a| a.key(CONNECTOR_URL_TEMPLATE)) + { + attrs.push(KeyValue::new(key, connector_info.url_template.to_string())); + } + } + attrs + } + + fn on_response(&self, _response: &ConnectorResponse) -> Vec { + Vec::default() + } + + fn on_error(&self, _error: &BoxError, _ctx: &Context) -> Vec { + Vec::default() + } +} diff --git a/apollo-router/src/plugins/telemetry/config_new/connector/events.rs b/apollo-router/src/plugins/telemetry/config_new/connector/events.rs new file mode 100644 index 0000000000..588f2cd5b5 --- /dev/null +++ b/apollo-router/src/plugins/telemetry/config_new/connector/events.rs @@ -0,0 +1,213 @@ +use opentelemetry_api::Key; +use opentelemetry_api::KeyValue; +use opentelemetry_semantic_conventions::trace::HTTP_REQUEST_METHOD; +use parking_lot::Mutex; +use schemars::JsonSchema; +use serde::Deserialize; +use tower::BoxError; + +use crate::plugins::telemetry::config_new::attributes::HTTP_REQUEST_HEADERS; +use crate::plugins::telemetry::config_new::attributes::HTTP_REQUEST_URI; +use crate::plugins::telemetry::config_new::attributes::HTTP_REQUEST_VERSION; +use crate::plugins::telemetry::config_new::attributes::HTTP_RESPONSE_HEADERS; +use crate::plugins::telemetry::config_new::attributes::HTTP_RESPONSE_STATUS; +use crate::plugins::telemetry::config_new::attributes::HTTP_RESPONSE_VERSION; +use crate::plugins::telemetry::config_new::connector::attributes::ConnectorAttributes; +use crate::plugins::telemetry::config_new::connector::selectors::ConnectorSelector; +use crate::plugins::telemetry::config_new::connector::ConnectorRequest; +use crate::plugins::telemetry::config_new::connector::ConnectorResponse; +use crate::plugins::telemetry::config_new::events::log_event; +use crate::plugins::telemetry::config_new::events::CustomEvent; +use crate::plugins::telemetry::config_new::events::CustomEventInner; +use crate::plugins::telemetry::config_new::events::CustomEvents; +use crate::plugins::telemetry::config_new::events::Event; +use crate::plugins::telemetry::config_new::events::EventLevel; +use crate::plugins::telemetry::config_new::events::StandardEventConfig; +use crate::plugins::telemetry::config_new::extendable::Extendable; +use crate::plugins::telemetry::config_new::instruments::Instrumented; +use crate::Context; + +#[derive(Clone, Deserialize, JsonSchema, Debug, Default)] +#[serde(deny_unknown_fields, default)] +pub(crate) struct ConnectorEventsConfig { + /// Log the connector HTTP request + pub(crate) request: StandardEventConfig, + /// Log the connector HTTP response + pub(crate) response: StandardEventConfig, + /// Log the connector HTTP error + pub(crate) error: StandardEventConfig, +} + +pub(crate) type ConnectorEvents = + CustomEvents; + +pub(crate) fn new_connector_events( + config: &Extendable>, +) -> ConnectorEvents { + let custom_events = config + .custom + .iter() + .filter_map(|(event_name, event_cfg)| match &event_cfg.level { + EventLevel::Off => None, + _ => Some(CustomEvent { + inner: Mutex::new(CustomEventInner { + name: event_name.clone(), + level: event_cfg.level, + event_on: event_cfg.on, + message: event_cfg.message.clone(), + selectors: event_cfg.attributes.clone().into(), + condition: event_cfg.condition.clone(), + attributes: Vec::new(), + _phantom: Default::default(), + }), + }), + }) + .collect(); + + ConnectorEvents { + request: config.attributes.request.clone().into(), + response: config.attributes.response.clone().into(), + error: config.attributes.error.clone().into(), + custom: custom_events, + } +} + +impl Instrumented + for CustomEvents< + ConnectorRequest, + ConnectorResponse, + (), + ConnectorAttributes, + ConnectorSelector, + > +{ + type Request = ConnectorRequest; + type Response = ConnectorResponse; + type EventResponse = (); + + fn on_request(&self, request: &Self::Request) { + if self.request.level() != EventLevel::Off { + if let Some(condition) = self.request.condition() { + if condition.lock().evaluate_request(request) != Some(true) { + return; + } + } + let mut attrs = Vec::with_capacity(5); + #[cfg(test)] + let headers = { + let mut headers: indexmap::IndexMap = request + .http_request + .headers() + .clone() + .into_iter() + .filter_map(|(name, val)| Some((name?.to_string(), val))) + .collect(); + headers.sort_keys(); + headers + }; + #[cfg(not(test))] + let headers = request.http_request.headers(); + + attrs.push(KeyValue::new( + HTTP_REQUEST_HEADERS, + opentelemetry::Value::String(format!("{:?}", headers).into()), + )); + attrs.push(KeyValue::new( + HTTP_REQUEST_METHOD, + opentelemetry::Value::String(format!("{}", request.http_request.method()).into()), + )); + attrs.push(KeyValue::new( + HTTP_REQUEST_URI, + opentelemetry::Value::String(format!("{}", request.http_request.uri()).into()), + )); + attrs.push(KeyValue::new( + HTTP_REQUEST_VERSION, + opentelemetry::Value::String( + format!("{:?}", request.http_request.version()).into(), + ), + )); + // FIXME: need to re-introduce this the same way we did for router request body but we need a request id in order to + // match the request to the element in context.extensions to make sure to not mismatch the request body settings to another one + // attrs.push(KeyValue::new( + // HTTP_REQUEST_BODY, + // opentelemetry::Value::String(format!("{:?}", request.http_request.body()).into()), + // )); + log_event(self.request.level(), "connector.request", attrs, ""); + } + for custom_event in &self.custom { + custom_event.on_request(request); + } + } + + fn on_response(&self, response: &Self::Response) { + if self.response.level() != EventLevel::Off { + if let Some(condition) = self.response.condition() { + if !condition.lock().evaluate_response(response) { + return; + } + } + let mut attrs = Vec::with_capacity(4); + #[cfg(test)] + let headers = { + let mut headers: indexmap::IndexMap = response + .http_response + .headers() + .clone() + .into_iter() + .filter_map(|(name, val)| Some((name?.to_string(), val))) + .collect(); + headers.sort_keys(); + headers + }; + #[cfg(not(test))] + let headers = response.http_response.headers(); + + attrs.push(KeyValue::new( + HTTP_RESPONSE_HEADERS, + opentelemetry::Value::String(format!("{:?}", headers).into()), + )); + attrs.push(KeyValue::new( + HTTP_RESPONSE_STATUS, + opentelemetry::Value::String(format!("{}", response.http_response.status()).into()), + )); + attrs.push(KeyValue::new( + HTTP_RESPONSE_VERSION, + opentelemetry::Value::String( + format!("{:?}", response.http_response.version()).into(), + ), + )); + // FIXME: need to re-introduce this the same way we did for router response body but we need a request id in order to + // match the request to the element in context.extensions to make sure to not mismatch the response body settings to another one + // attrs.push(KeyValue::new( + // HTTP_RESPONSE_BODY, + // opentelemetry::Value::String(format!("{:?}", response.http_response.body()).into()), + // )); + log_event(self.response.level(), "connector.response", attrs, ""); + } + for custom_event in &self.custom { + custom_event.on_response(response); + } + } + + fn on_error(&self, error: &BoxError, ctx: &Context) { + if self.error.level() != EventLevel::Off { + if let Some(condition) = self.error.condition() { + if !condition.lock().evaluate_error(error, ctx) { + return; + } + } + log_event( + self.error.level(), + "connector.http.error", + vec![KeyValue::new( + Key::from_static_str("error"), + opentelemetry::Value::String(error.to_string().into()), + )], + "", + ); + } + for custom_event in &self.custom { + custom_event.on_error(error, ctx); + } + } +} diff --git a/apollo-router/src/plugins/telemetry/config_new/connector/instruments.rs b/apollo-router/src/plugins/telemetry/config_new/connector/instruments.rs new file mode 100644 index 0000000000..601bb08de9 --- /dev/null +++ b/apollo-router/src/plugins/telemetry/config_new/connector/instruments.rs @@ -0,0 +1,349 @@ +use std::collections::HashMap; +use std::sync::Arc; + +use opentelemetry::metrics::MeterProvider; +use opentelemetry_api::metrics::Unit; +use parking_lot::Mutex; +use schemars::JsonSchema; +use serde::Deserialize; +use tokio::time::Instant; +use tower::BoxError; + +use crate::metrics; +use crate::plugins::telemetry::config_new::attributes::DefaultAttributeRequirementLevel; +use crate::plugins::telemetry::config_new::conditions::Condition; +use crate::plugins::telemetry::config_new::connector::attributes::ConnectorAttributes; +use crate::plugins::telemetry::config_new::connector::selectors::ConnectorSelector; +use crate::plugins::telemetry::config_new::connector::selectors::ConnectorValue; +use crate::plugins::telemetry::config_new::connector::ConnectorRequest; +use crate::plugins::telemetry::config_new::connector::ConnectorResponse; +use crate::plugins::telemetry::config_new::connector::HttpRequest; +use crate::plugins::telemetry::config_new::extendable::Extendable; +use crate::plugins::telemetry::config_new::instruments::CustomHistogram; +use crate::plugins::telemetry::config_new::instruments::CustomHistogramInner; +use crate::plugins::telemetry::config_new::instruments::CustomInstruments; +use crate::plugins::telemetry::config_new::instruments::DefaultedStandardInstrument; +use crate::plugins::telemetry::config_new::instruments::Increment; +use crate::plugins::telemetry::config_new::instruments::Instrument; +use crate::plugins::telemetry::config_new::instruments::Instrumented; +use crate::plugins::telemetry::config_new::instruments::StaticInstrument; +use crate::plugins::telemetry::config_new::instruments::HTTP_CLIENT_REQUEST_BODY_SIZE_METRIC; +use crate::plugins::telemetry::config_new::instruments::HTTP_CLIENT_REQUEST_DURATION_METRIC; +use crate::plugins::telemetry::config_new::instruments::HTTP_CLIENT_RESPONSE_BODY_SIZE_METRIC; +use crate::plugins::telemetry::config_new::instruments::METER_NAME; +use crate::plugins::telemetry::config_new::DefaultForLevel; +use crate::plugins::telemetry::otlp::TelemetryDataKind; +use crate::Context; + +#[derive(Clone, Deserialize, JsonSchema, Debug, Default)] +#[serde(deny_unknown_fields, default)] +pub(crate) struct ConnectorInstrumentsConfig { + /// Histogram of client request duration + #[serde(rename = "http.client.request.duration")] + http_client_request_duration: + DefaultedStandardInstrument>, + + /// Histogram of client request body size + #[serde(rename = "http.client.request.body.size")] + http_client_request_body_size: + DefaultedStandardInstrument>, + + /// Histogram of client response body size + #[serde(rename = "http.client.response.body.size")] + http_client_response_body_size: + DefaultedStandardInstrument>, +} + +impl DefaultForLevel for ConnectorInstrumentsConfig { + fn defaults_for_level( + &mut self, + requirement_level: DefaultAttributeRequirementLevel, + kind: TelemetryDataKind, + ) { + self.http_client_request_duration + .defaults_for_level(requirement_level, kind); + self.http_client_request_body_size + .defaults_for_level(requirement_level, kind); + self.http_client_response_body_size + .defaults_for_level(requirement_level, kind); + } +} + +pub(crate) struct ConnectorInstruments { + http_client_request_duration: Option< + CustomHistogram< + ConnectorRequest, + ConnectorResponse, + (), + ConnectorAttributes, + ConnectorSelector, + >, + >, + http_client_request_body_size: Option< + CustomHistogram< + ConnectorRequest, + ConnectorResponse, + (), + ConnectorAttributes, + ConnectorSelector, + >, + >, + http_client_response_body_size: Option< + CustomHistogram< + ConnectorRequest, + ConnectorResponse, + (), + ConnectorAttributes, + ConnectorSelector, + >, + >, + custom: ConnectorCustomInstruments, +} + +impl ConnectorInstruments { + pub(crate) fn new( + config: &Extendable< + ConnectorInstrumentsConfig, + Instrument, + >, + static_instruments: Arc>, + ) -> Self { + let http_client_request_duration = + config + .attributes + .http_client_request_duration + .is_enabled() + .then(|| { + let mut nb_attributes = 0; + let selectors = match &config.attributes.http_client_request_duration { + DefaultedStandardInstrument::Bool(_) + | DefaultedStandardInstrument::Unset => None, + DefaultedStandardInstrument::Extendable { attributes } => { + nb_attributes = attributes.custom.len(); + Some(attributes.clone()) + } + }; + CustomHistogram { + inner: Mutex::new(CustomHistogramInner { + increment: Increment::Duration(Instant::now()), + condition: Condition::True, + histogram: Some(static_instruments + .get(HTTP_CLIENT_REQUEST_DURATION_METRIC) + .expect( + "cannot get static instrument for connector; this should not happen", + ) + .as_histogram() + .cloned() + .expect( + "cannot convert instrument to histogram for connector; this should not happen", + ) + ), + attributes: Vec::with_capacity(nb_attributes), + selector: None, + selectors, + updated: false, + _phantom: Default::default() + }), + } + }); + let http_client_request_body_size = + config + .attributes + .http_client_request_body_size + .is_enabled() + .then(|| { + let mut nb_attributes = 0; + let selectors = match &config.attributes.http_client_request_body_size { + DefaultedStandardInstrument::Bool(_) + | DefaultedStandardInstrument::Unset => None, + DefaultedStandardInstrument::Extendable { attributes } => { + nb_attributes = attributes.custom.len(); + Some(attributes.clone()) + } + }; + CustomHistogram { + inner: Mutex::new(CustomHistogramInner { + increment: Increment::Custom(None), + condition: Condition::True, + histogram: Some(static_instruments + .get(HTTP_CLIENT_REQUEST_BODY_SIZE_METRIC) + .expect( + "cannot get static instrument for connector; this should not happen", + ) + .as_histogram() + .cloned() + .expect( + "cannot convert instrument to histogram for connector; this should not happen", + ) + ), + attributes: Vec::with_capacity(nb_attributes), + selector: Some(Arc::new(ConnectorSelector::HttpRequestHeader { + connector_http_request_header: "content-length".to_string(), + redact: None, + default: None, + })), + selectors, + updated: false, + _phantom: Default::default() + }), + } + }); + let http_client_response_body_size = + config + .attributes + .http_client_response_body_size + .is_enabled() + .then(|| { + let mut nb_attributes = 0; + let selectors = match &config.attributes.http_client_response_body_size { + DefaultedStandardInstrument::Bool(_) + | DefaultedStandardInstrument::Unset => None, + DefaultedStandardInstrument::Extendable { attributes } => { + nb_attributes = attributes.custom.len(); + Some(attributes.clone()) + } + }; + CustomHistogram { + inner: Mutex::new(CustomHistogramInner { + increment: Increment::Custom(None), + condition: Condition::True, + histogram: Some(static_instruments + .get(HTTP_CLIENT_RESPONSE_BODY_SIZE_METRIC) + .expect( + "cannot get static instrument for connector; this should not happen", + ) + .as_histogram() + .cloned() + .expect( + "cannot convert instrument to histogram for connector; this should not happen", + ) + ), + attributes: Vec::with_capacity(nb_attributes), + selector: Some(Arc::new(ConnectorSelector::ConnectorResponseHeader { + connector_http_response_header: "content-length".to_string(), + redact: None, + default: None, + })), + selectors, + updated: false, + _phantom: Default::default() + }), + } + }); + ConnectorInstruments { + http_client_request_duration, + http_client_request_body_size, + http_client_response_body_size, + custom: CustomInstruments::new(&config.custom, static_instruments), + } + } + + pub(crate) fn new_builtin( + config: &Extendable< + ConnectorInstrumentsConfig, + Instrument, + >, + ) -> HashMap { + let meter = metrics::meter_provider().meter(METER_NAME); + let mut static_instruments = HashMap::with_capacity(3); + + if config.attributes.http_client_request_duration.is_enabled() { + static_instruments.insert( + HTTP_CLIENT_REQUEST_DURATION_METRIC.to_string(), + StaticInstrument::Histogram( + meter + .f64_histogram(HTTP_CLIENT_REQUEST_DURATION_METRIC) + .with_unit(Unit::new("s")) + .with_description("Duration of HTTP client requests.") + .init(), + ), + ); + } + + if config.attributes.http_client_request_body_size.is_enabled() { + static_instruments.insert( + HTTP_CLIENT_REQUEST_BODY_SIZE_METRIC.to_string(), + StaticInstrument::Histogram( + meter + .f64_histogram(HTTP_CLIENT_REQUEST_BODY_SIZE_METRIC) + .with_unit(Unit::new("By")) + .with_description("Size of HTTP client request bodies.") + .init(), + ), + ); + } + + if config + .attributes + .http_client_response_body_size + .is_enabled() + { + static_instruments.insert( + HTTP_CLIENT_RESPONSE_BODY_SIZE_METRIC.to_string(), + StaticInstrument::Histogram( + meter + .f64_histogram(HTTP_CLIENT_RESPONSE_BODY_SIZE_METRIC) + .with_unit(Unit::new("By")) + .with_description("Size of HTTP client response bodies.") + .init(), + ), + ); + } + + static_instruments + } +} + +impl Instrumented for ConnectorInstruments { + type Request = ConnectorRequest; + type Response = ConnectorResponse; + type EventResponse = (); + + fn on_request(&self, request: &Self::Request) { + if let Some(http_client_request_duration) = &self.http_client_request_duration { + http_client_request_duration.on_request(request); + } + if let Some(http_client_request_body_size) = &self.http_client_request_body_size { + http_client_request_body_size.on_request(request); + } + if let Some(http_client_response_body_size) = &self.http_client_response_body_size { + http_client_response_body_size.on_request(request); + } + self.custom.on_request(request); + } + + fn on_response(&self, response: &Self::Response) { + if let Some(http_client_request_duration) = &self.http_client_request_duration { + http_client_request_duration.on_response(response); + } + if let Some(http_client_request_body_size) = &self.http_client_request_body_size { + http_client_request_body_size.on_response(response); + } + if let Some(http_client_response_body_size) = &self.http_client_response_body_size { + http_client_response_body_size.on_response(response); + } + self.custom.on_response(response); + } + + fn on_error(&self, error: &BoxError, ctx: &Context) { + if let Some(http_client_request_duration) = &self.http_client_request_duration { + http_client_request_duration.on_error(error, ctx); + } + if let Some(http_client_request_body_size) = &self.http_client_request_body_size { + http_client_request_body_size.on_error(error, ctx); + } + if let Some(http_client_response_body_size) = &self.http_client_response_body_size { + http_client_response_body_size.on_error(error, ctx); + } + self.custom.on_error(error, ctx); + } +} + +pub(crate) type ConnectorCustomInstruments = CustomInstruments< + HttpRequest, + ConnectorResponse, + (), + ConnectorAttributes, + ConnectorSelector, + ConnectorValue, +>; diff --git a/apollo-router/src/plugins/telemetry/config_new/connector/mod.rs b/apollo-router/src/plugins/telemetry/config_new/connector/mod.rs new file mode 100644 index 0000000000..6242ccc39d --- /dev/null +++ b/apollo-router/src/plugins/telemetry/config_new/connector/mod.rs @@ -0,0 +1,13 @@ +//! Connectors telemetry. + +use crate::services::http::HttpRequest; +use crate::services::http::HttpResponse; + +pub(crate) mod attributes; +pub(crate) mod events; +pub(crate) mod instruments; +pub(crate) mod selectors; +pub(crate) mod spans; + +pub(crate) type ConnectorRequest = HttpRequest; +pub(crate) type ConnectorResponse = HttpResponse; diff --git a/apollo-router/src/plugins/telemetry/config_new/connector/selectors.rs b/apollo-router/src/plugins/telemetry/config_new/connector/selectors.rs new file mode 100644 index 0000000000..4d983916e8 --- /dev/null +++ b/apollo-router/src/plugins/telemetry/config_new/connector/selectors.rs @@ -0,0 +1,557 @@ +use derivative::Derivative; +use opentelemetry_api::Value; +use schemars::JsonSchema; +use serde::Deserialize; +use tower::BoxError; + +use crate::plugins::telemetry::config::AttributeValue; +use crate::plugins::telemetry::config_new::connector::ConnectorRequest; +use crate::plugins::telemetry::config_new::connector::ConnectorResponse; +use crate::plugins::telemetry::config_new::instruments::InstrumentValue; +use crate::plugins::telemetry::config_new::instruments::Standard; +use crate::plugins::telemetry::config_new::selectors::ErrorRepr; +use crate::plugins::telemetry::config_new::selectors::ResponseStatus; +use crate::plugins::telemetry::config_new::Selector; +use crate::plugins::telemetry::config_new::Stage; +use crate::services::connector_service::ConnectorInfo; +use crate::services::connector_service::CONNECTOR_INFO_CONTEXT_KEY; +use crate::Context; + +#[derive(Deserialize, JsonSchema, Clone, Debug, PartialEq)] +#[serde(deny_unknown_fields, rename_all = "snake_case")] +pub(crate) enum ConnectorSource { + /// The name of the connector source. + Name, +} + +#[derive(Deserialize, JsonSchema, Clone, Debug)] +#[serde(deny_unknown_fields, rename_all = "snake_case", untagged)] +pub(crate) enum ConnectorValue { + Standard(Standard), + Custom(ConnectorSelector), +} + +impl From<&ConnectorValue> for InstrumentValue { + fn from(value: &ConnectorValue) -> Self { + match value { + ConnectorValue::Standard(s) => InstrumentValue::Standard(s.clone()), + ConnectorValue::Custom(selector) => InstrumentValue::Custom(selector.clone()), + } + } +} +#[derive(Deserialize, JsonSchema, Clone, Derivative)] +#[serde(deny_unknown_fields, rename_all = "snake_case", untagged)] +#[derivative(Debug, PartialEq)] +pub(crate) enum ConnectorSelector { + SubgraphName { + /// The subgraph name + subgraph_name: bool, + }, + ConnectorSource { + /// The connector source. + connector_source: ConnectorSource, + }, + HttpRequestHeader { + /// The name of a connector HTTP request header. + connector_http_request_header: String, + #[serde(skip)] + #[allow(dead_code)] + /// Optional redaction pattern. + redact: Option, + /// Optional default value. + default: Option, + }, + ConnectorResponseHeader { + /// The name of a connector HTTP response header. + connector_http_response_header: String, + #[serde(skip)] + #[allow(dead_code)] + /// Optional redaction pattern. + redact: Option, + /// Optional default value. + default: Option, + }, + ConnectorResponseStatus { + /// The connector HTTP response status code. + connector_http_response_status: ResponseStatus, + }, + ConnectorHttpMethod { + /// The connector HTTP method. + connector_http_method: bool, + }, + ConnectorUrlTemplate { + /// The connector URL template. + connector_url_template: bool, + }, + StaticField { + /// A static value + r#static: AttributeValue, + }, + Error { + /// Critical error if it happens + error: ErrorRepr, + }, +} + +impl Selector for ConnectorSelector { + type Request = ConnectorRequest; + type Response = ConnectorResponse; + type EventResponse = (); + + fn on_request(&self, request: &Self::Request) -> Option { + let connector_info = request + .context + .get::<&str, ConnectorInfo>(CONNECTOR_INFO_CONTEXT_KEY); + match self { + ConnectorSelector::SubgraphName { subgraph_name } if *subgraph_name => connector_info + .ok() + .flatten() + .map(|info| info.subgraph_name.clone()) + .map(opentelemetry::Value::from), + ConnectorSelector::ConnectorSource { .. } => connector_info + .ok() + .flatten() + .and_then(|info| info.source_name.clone()) + .map(opentelemetry::Value::from), + ConnectorSelector::ConnectorHttpMethod { + connector_http_method, + } if *connector_http_method => connector_info + .ok() + .flatten() + .map(|info| info.http_method.clone()) + .map(opentelemetry::Value::from), + ConnectorSelector::ConnectorUrlTemplate { + connector_url_template, + } if *connector_url_template => connector_info + .ok() + .flatten() + .map(|info| info.url_template.clone()) + .map(opentelemetry::Value::from), + ConnectorSelector::HttpRequestHeader { + connector_http_request_header: connector_request_header, + default, + .. + } => request + .http_request + .headers() + .get(connector_request_header) + .and_then(|h| Some(h.to_str().ok()?.to_string())) + .or_else(|| default.clone()) + .map(opentelemetry::Value::from), + ConnectorSelector::StaticField { r#static } => Some(r#static.clone().into()), + _ => None, + } + } + + fn on_response(&self, response: &Self::Response) -> Option { + let connector_info = response + .context + .get::<&str, ConnectorInfo>(CONNECTOR_INFO_CONTEXT_KEY); + match self { + ConnectorSelector::SubgraphName { subgraph_name } if *subgraph_name => connector_info + .ok() + .flatten() + .map(|info| info.subgraph_name.clone()) + .map(opentelemetry::Value::from), + ConnectorSelector::ConnectorSource { .. } => connector_info + .ok() + .flatten() + .and_then(|info| info.source_name.clone()) + .map(opentelemetry::Value::from), + ConnectorSelector::ConnectorHttpMethod { + connector_http_method, + } if *connector_http_method => connector_info + .ok() + .flatten() + .map(|info| info.http_method.clone()) + .map(opentelemetry::Value::from), + ConnectorSelector::ConnectorUrlTemplate { + connector_url_template, + } if *connector_url_template => connector_info + .ok() + .flatten() + .map(|info| info.url_template.clone()) + .map(opentelemetry::Value::from), + ConnectorSelector::ConnectorResponseHeader { + connector_http_response_header: connector_response_header, + default, + .. + } => response + .http_response + .headers() + .get(connector_response_header) + .and_then(|h| Some(h.to_str().ok()?.to_string())) + .or_else(|| default.clone()) + .map(opentelemetry::Value::from), + ConnectorSelector::ConnectorResponseStatus { + connector_http_response_status: response_status, + } => match response_status { + ResponseStatus::Code => { + Some(Value::I64(response.http_response.status().as_u16() as i64)) + } + ResponseStatus::Reason => response + .http_response + .status() + .canonical_reason() + .map(|reason| reason.into()), + }, + ConnectorSelector::StaticField { r#static } => Some(r#static.clone().into()), + _ => None, + } + } + + fn on_error(&self, error: &BoxError, ctx: &Context) -> Option { + let connector_info = ctx.get::<&str, ConnectorInfo>(CONNECTOR_INFO_CONTEXT_KEY); + match self { + ConnectorSelector::SubgraphName { subgraph_name } if *subgraph_name => connector_info + .ok() + .flatten() + .map(|info| info.subgraph_name.clone()) + .map(opentelemetry::Value::from), + ConnectorSelector::ConnectorSource { .. } => connector_info + .ok() + .flatten() + .and_then(|info| info.source_name.clone()) + .map(opentelemetry::Value::from), + ConnectorSelector::ConnectorHttpMethod { + connector_http_method, + } if *connector_http_method => connector_info + .ok() + .flatten() + .map(|info| info.http_method.clone()) + .map(opentelemetry::Value::from), + ConnectorSelector::ConnectorUrlTemplate { + connector_url_template, + } if *connector_url_template => connector_info + .ok() + .flatten() + .map(|info| info.url_template.clone()) + .map(opentelemetry::Value::from), + ConnectorSelector::Error { .. } => Some(error.to_string().into()), + ConnectorSelector::StaticField { r#static } => Some(r#static.clone().into()), + _ => None, + } + } + + fn on_drop(&self) -> Option { + match self { + ConnectorSelector::StaticField { r#static } => Some(r#static.clone().into()), + _ => None, + } + } + + fn is_active(&self, stage: Stage) -> bool { + match stage { + Stage::Request => matches!( + self, + ConnectorSelector::HttpRequestHeader { .. } + | ConnectorSelector::SubgraphName { .. } + | ConnectorSelector::ConnectorSource { .. } + | ConnectorSelector::ConnectorHttpMethod { .. } + | ConnectorSelector::ConnectorUrlTemplate { .. } + | ConnectorSelector::StaticField { .. } + ), + Stage::Response => matches!( + self, + ConnectorSelector::ConnectorResponseHeader { .. } + | ConnectorSelector::ConnectorResponseStatus { .. } + | ConnectorSelector::SubgraphName { .. } + | ConnectorSelector::ConnectorSource { .. } + | ConnectorSelector::ConnectorHttpMethod { .. } + | ConnectorSelector::ConnectorUrlTemplate { .. } + | ConnectorSelector::StaticField { .. } + ), + Stage::ResponseEvent => false, + Stage::ResponseField => false, + Stage::Error => matches!( + self, + ConnectorSelector::Error { .. } + | ConnectorSelector::SubgraphName { .. } + | ConnectorSelector::ConnectorSource { .. } + | ConnectorSelector::ConnectorHttpMethod { .. } + | ConnectorSelector::ConnectorUrlTemplate { .. } + | ConnectorSelector::StaticField { .. } + ), + Stage::Drop => matches!(self, ConnectorSelector::StaticField { .. }), + } + } +} + +#[cfg(test)] +mod tests { + use apollo_federation::sources::connect::HTTPMethod; + use http::StatusCode; + + use super::ConnectorSelector; + use super::ConnectorSource; + use crate::plugins::telemetry::config_new::connector::ConnectorRequest; + use crate::plugins::telemetry::config_new::connector::ConnectorResponse; + use crate::plugins::telemetry::config_new::selectors::ResponseStatus; + use crate::plugins::telemetry::config_new::Selector; + use crate::services::connector_service::ConnectorInfo; + use crate::services::connector_service::CONNECTOR_INFO_CONTEXT_KEY; + use crate::services::http::HttpRequest; + use crate::services::http::HttpResponse; + use crate::services::router::body; + use crate::Context; + + const TEST_SUBGRAPH_NAME: &str = "test_subgraph_name"; + const TEST_SOURCE_NAME: &str = "test_source_name"; + const TEST_URL_TEMPLATE: &str = "/test"; + const TEST_HEADER_NAME: &str = "test_header_name"; + const TEST_HEADER_VALUE: &str = "test_header_value"; + const TEST_STATIC: &str = "test_static"; + + fn connector_info() -> ConnectorInfo { + ConnectorInfo { + subgraph_name: TEST_SUBGRAPH_NAME.to_string(), + source_name: Some(TEST_SOURCE_NAME.to_string()), + http_method: HTTPMethod::Get.as_str().to_string(), + url_template: TEST_URL_TEMPLATE.to_string(), + } + } + + fn context(connector_info: ConnectorInfo) -> Context { + let context = Context::default(); + context + .insert(CONNECTOR_INFO_CONTEXT_KEY, connector_info) + .unwrap(); + context + } + + fn http_request(context: Context) -> ConnectorRequest { + HttpRequest { + http_request: http::Request::builder().body(body::empty()).unwrap(), + context, + } + } + + fn http_request_with_header(context: Context) -> ConnectorRequest { + HttpRequest { + http_request: http::Request::builder() + .header(TEST_HEADER_NAME, TEST_HEADER_VALUE) + .body(body::empty()) + .unwrap(), + context, + } + } + + fn http_response(context: Context, status_code: StatusCode) -> ConnectorResponse { + HttpResponse { + http_response: http::Response::builder() + .status(status_code) + .body(body::empty()) + .unwrap(), + context, + } + } + + fn http_response_with_header(context: Context, status_code: StatusCode) -> ConnectorResponse { + HttpResponse { + http_response: http::Response::builder() + .status(status_code) + .header(TEST_HEADER_NAME, TEST_HEADER_VALUE) + .body(body::empty()) + .unwrap(), + context, + } + } + + #[test] + fn connector_on_request_static_field() { + let selector = ConnectorSelector::StaticField { + r#static: TEST_STATIC.into(), + }; + assert_eq!( + Some(TEST_STATIC.into()), + selector.on_request(&http_request(context(connector_info()))) + ); + } + + #[test] + fn connector_on_request_subgraph_name() { + let selector = ConnectorSelector::SubgraphName { + subgraph_name: true, + }; + assert_eq!( + Some(TEST_SUBGRAPH_NAME.into()), + selector.on_request(&http_request(context(connector_info()))) + ); + } + + #[test] + fn connector_on_request_connector_source() { + let selector = ConnectorSelector::ConnectorSource { + connector_source: ConnectorSource::Name, + }; + assert_eq!( + Some(TEST_SOURCE_NAME.into()), + selector.on_request(&http_request(context(connector_info()))) + ); + } + + #[test] + fn connector_on_request_url_template() { + let selector = ConnectorSelector::ConnectorUrlTemplate { + connector_url_template: true, + }; + assert_eq!( + Some(TEST_URL_TEMPLATE.into()), + selector.on_request(&http_request(context(connector_info()))) + ); + } + + #[test] + fn connector_on_request_header_defaulted() { + let selector = ConnectorSelector::HttpRequestHeader { + connector_http_request_header: TEST_HEADER_NAME.to_string(), + redact: None, + default: Some("defaulted".into()), + }; + assert_eq!( + Some("defaulted".into()), + selector.on_request(&http_request(context(connector_info()))) + ); + } + + #[test] + fn connector_on_request_header_with_value() { + let selector = ConnectorSelector::HttpRequestHeader { + connector_http_request_header: TEST_HEADER_NAME.to_string(), + redact: None, + default: None, + }; + assert_eq!( + Some(TEST_HEADER_VALUE.into()), + selector.on_request(&http_request_with_header(context(connector_info()))) + ); + } + + #[test] + fn connector_on_response_static_field() { + let selector = ConnectorSelector::StaticField { + r#static: TEST_STATIC.into(), + }; + assert_eq!( + Some(TEST_STATIC.into()), + selector.on_response(&http_response(context(connector_info()), StatusCode::OK)) + ); + } + + #[test] + fn connector_on_response_subgraph_name() { + let selector = ConnectorSelector::SubgraphName { + subgraph_name: true, + }; + assert_eq!( + Some(TEST_SUBGRAPH_NAME.into()), + selector.on_response(&http_response(context(connector_info()), StatusCode::OK)) + ); + } + + #[test] + fn connector_on_response_connector_source() { + let selector = ConnectorSelector::ConnectorSource { + connector_source: ConnectorSource::Name, + }; + assert_eq!( + Some(TEST_SOURCE_NAME.into()), + selector.on_response(&http_response(context(connector_info()), StatusCode::OK)) + ); + } + + #[test] + fn connector_on_response_url_template() { + let selector = ConnectorSelector::ConnectorUrlTemplate { + connector_url_template: true, + }; + assert_eq!( + Some(TEST_URL_TEMPLATE.into()), + selector.on_response(&http_response(context(connector_info()), StatusCode::OK)) + ); + } + + #[test] + fn connector_on_response_header_defaulted() { + let selector = ConnectorSelector::ConnectorResponseHeader { + connector_http_response_header: TEST_HEADER_NAME.to_string(), + redact: None, + default: Some("defaulted".into()), + }; + assert_eq!( + Some("defaulted".into()), + selector.on_response(&http_response(context(connector_info()), StatusCode::OK)) + ); + } + + #[test] + fn connector_on_response_header_with_value() { + let selector = ConnectorSelector::ConnectorResponseHeader { + connector_http_response_header: TEST_HEADER_NAME.to_string(), + redact: None, + default: None, + }; + assert_eq!( + Some(TEST_HEADER_VALUE.into()), + selector.on_response(&http_response_with_header( + context(connector_info()), + StatusCode::OK + )) + ); + } + + #[test] + fn connector_on_response_status_code() { + let selector = ConnectorSelector::ConnectorResponseStatus { + connector_http_response_status: ResponseStatus::Code, + }; + assert_eq!( + Some(200.into()), + selector.on_response(&http_response(context(connector_info()), StatusCode::OK)) + ); + } + + #[test] + fn connector_on_response_status_reason_ok() { + let selector = ConnectorSelector::ConnectorResponseStatus { + connector_http_response_status: ResponseStatus::Reason, + }; + assert_eq!( + Some("OK".into()), + selector.on_response(&http_response(context(connector_info()), StatusCode::OK)) + ); + } + + #[test] + fn connector_on_response_status_code_not_found() { + let selector = ConnectorSelector::ConnectorResponseStatus { + connector_http_response_status: ResponseStatus::Reason, + }; + assert_eq!( + Some("Not Found".into()), + selector.on_response(&http_response( + context(connector_info()), + StatusCode::NOT_FOUND + )) + ); + } + + #[test] + fn connector_on_response_http_method() { + let selector = ConnectorSelector::ConnectorHttpMethod { + connector_http_method: true, + }; + assert_eq!( + Some(HTTPMethod::Get.as_str().into()), + selector.on_response(&http_response(context(connector_info()), StatusCode::OK)) + ); + } + + #[test] + fn connector_on_drop_static_field() { + let selector = ConnectorSelector::StaticField { + r#static: TEST_STATIC.into(), + }; + assert_eq!(Some(TEST_STATIC.into()), selector.on_drop()); + } +} diff --git a/apollo-router/src/plugins/telemetry/config_new/connector/spans.rs b/apollo-router/src/plugins/telemetry/config_new/connector/spans.rs new file mode 100644 index 0000000000..db58f9f4f9 --- /dev/null +++ b/apollo-router/src/plugins/telemetry/config_new/connector/spans.rs @@ -0,0 +1,27 @@ +use schemars::JsonSchema; +use serde::Deserialize; + +use crate::plugins::telemetry::config_new::attributes::DefaultAttributeRequirementLevel; +use crate::plugins::telemetry::config_new::conditional::Conditional; +use crate::plugins::telemetry::config_new::connector::attributes::ConnectorAttributes; +use crate::plugins::telemetry::config_new::connector::selectors::ConnectorSelector; +use crate::plugins::telemetry::config_new::extendable::Extendable; +use crate::plugins::telemetry::config_new::DefaultForLevel; +use crate::plugins::telemetry::otlp::TelemetryDataKind; + +#[derive(Deserialize, JsonSchema, Clone, Default, Debug)] +#[serde(deny_unknown_fields, default)] +pub(crate) struct ConnectorSpans { + /// Custom attributes that are attached to the connector span. + pub(crate) attributes: Extendable>, +} + +impl DefaultForLevel for ConnectorSpans { + fn defaults_for_level( + &mut self, + requirement_level: DefaultAttributeRequirementLevel, + kind: TelemetryDataKind, + ) { + self.attributes.defaults_for_level(requirement_level, kind); + } +} diff --git a/apollo-router/src/plugins/telemetry/config_new/events.rs b/apollo-router/src/plugins/telemetry/config_new/events.rs index 763686d807..f3f4ef453c 100644 --- a/apollo-router/src/plugins/telemetry/config_new/events.rs +++ b/apollo-router/src/plugins/telemetry/config_new/events.rs @@ -6,6 +6,7 @@ use std::sync::Arc; use http::HeaderValue; use opentelemetry::Key; use opentelemetry::KeyValue; +use opentelemetry_semantic_conventions::trace::HTTP_REQUEST_METHOD; use parking_lot::Mutex; use schemars::JsonSchema; use serde::Deserialize; @@ -21,7 +22,19 @@ use crate::graphql; use crate::plugins::telemetry::config_new::attributes::RouterAttributes; use crate::plugins::telemetry::config_new::attributes::SubgraphAttributes; use crate::plugins::telemetry::config_new::attributes::SupergraphAttributes; +use crate::plugins::telemetry::config_new::attributes::HTTP_REQUEST_BODY; +use crate::plugins::telemetry::config_new::attributes::HTTP_REQUEST_HEADERS; +use crate::plugins::telemetry::config_new::attributes::HTTP_REQUEST_URI; +use crate::plugins::telemetry::config_new::attributes::HTTP_REQUEST_VERSION; +use crate::plugins::telemetry::config_new::attributes::HTTP_RESPONSE_BODY; +use crate::plugins::telemetry::config_new::attributes::HTTP_RESPONSE_HEADERS; +use crate::plugins::telemetry::config_new::attributes::HTTP_RESPONSE_STATUS; +use crate::plugins::telemetry::config_new::attributes::HTTP_RESPONSE_VERSION; use crate::plugins::telemetry::config_new::conditions::Condition; +use crate::plugins::telemetry::config_new::connector::attributes::ConnectorAttributes; +use crate::plugins::telemetry::config_new::connector::events::ConnectorEvents; +use crate::plugins::telemetry::config_new::connector::events::ConnectorEventsConfig; +use crate::plugins::telemetry::config_new::connector::selectors::ConnectorSelector; use crate::plugins::telemetry::config_new::extendable::Extendable; use crate::plugins::telemetry::config_new::selectors::RouterSelector; use crate::plugins::telemetry::config_new::selectors::SubgraphSelector; @@ -32,6 +45,13 @@ use crate::services::subgraph; use crate::services::supergraph; use crate::Context; +#[derive(Default, Clone)] +pub(crate) struct DisplayRouterRequest(pub(crate) EventLevel); +#[derive(Default, Clone)] +pub(crate) struct DisplayRouterResponse(pub(crate) bool); +#[derive(Default, Clone)] +pub(crate) struct RouterResponseBodyExtensionType(pub(crate) String); + /// Events are #[derive(Deserialize, JsonSchema, Clone, Default, Debug)] #[serde(deny_unknown_fields, default)] @@ -42,6 +62,8 @@ pub(crate) struct Events { supergraph: Extendable>, /// Supergraph service events subgraph: Extendable>, + /// Connector events + connector: Extendable>, } impl Events { @@ -135,6 +157,10 @@ impl Events { } } + pub(crate) fn new_connector_events(&self) -> ConnectorEvents { + super::connector::events::new_connector_events(&self.connector) + } + pub(crate) fn validate(&self) -> Result<(), String> { if let StandardEventConfig::Conditional { condition, .. } = &self.router.attributes.request { @@ -164,6 +190,16 @@ impl Events { { condition.validate(Some(Stage::Response))?; } + if let StandardEventConfig::Conditional { condition, .. } = + &self.connector.attributes.request + { + condition.validate(Some(Stage::Request))?; + } + if let StandardEventConfig::Conditional { condition, .. } = + &self.connector.attributes.response + { + condition.validate(Some(Stage::Response))?; + } for (name, custom_event) in &self.router.custom { custom_event.validate().map_err(|err| { format!("configuration error for router custom event {name:?}: {err}") @@ -179,6 +215,11 @@ impl Events { format!("configuration error for subgraph custom event {name:?}: {err}") })?; } + for (name, custom_event) in &self.connector.custom { + custom_event.validate().map_err(|err| { + format!("configuration error for connector HTTP custom event {name:?}: {err}") + })?; + } Ok(()) } @@ -203,10 +244,10 @@ where Attributes: Selectors + Default, Sel: Selector + Debug, { - request: StandardEvent, - response: StandardEvent, - error: StandardEvent, - custom: Vec>, + pub(super) request: StandardEvent, + pub(super) response: StandardEvent, + pub(super) error: StandardEvent, + pub(super) custom: Vec>, } impl Instrumented @@ -223,43 +264,17 @@ impl Instrumented return; } } - let mut attrs = Vec::with_capacity(5); - #[cfg(test)] - let mut headers: indexmap::IndexMap = request - .router_request - .headers() - .clone() - .into_iter() - .filter_map(|(name, val)| Some((name?.to_string(), val))) - .collect(); - #[cfg(test)] - headers.sort_keys(); - #[cfg(not(test))] - let headers = request.router_request.headers(); - attrs.push(KeyValue::new( - Key::from_static_str("http.request.headers"), - opentelemetry::Value::String(format!("{:?}", headers).into()), - )); - attrs.push(KeyValue::new( - Key::from_static_str("http.request.method"), - opentelemetry::Value::String(format!("{}", request.router_request.method()).into()), - )); - attrs.push(KeyValue::new( - Key::from_static_str("http.request.uri"), - opentelemetry::Value::String(format!("{}", request.router_request.uri()).into()), - )); - attrs.push(KeyValue::new( - Key::from_static_str("http.request.version"), - opentelemetry::Value::String( - format!("{:?}", request.router_request.version()).into(), - ), - )); - attrs.push(KeyValue::new( - Key::from_static_str("http.request.body"), - opentelemetry::Value::String(format!("{:?}", request.router_request.body()).into()), - )); - log_event(self.request.level(), "router.request", attrs, ""); + request + .context + .extensions() + .with_lock(|mut ext| ext.insert(DisplayRouterRequest(self.request.level()))); + } + if self.response.level() != EventLevel::Off { + request + .context + .extensions() + .with_lock(|mut ext| ext.insert(DisplayRouterResponse(true))); } for custom_event in &self.custom { custom_event.on_request(request); @@ -288,21 +303,29 @@ impl Instrumented #[cfg(not(test))] let headers = response.response.headers(); attrs.push(KeyValue::new( - Key::from_static_str("http.response.headers"), + HTTP_RESPONSE_HEADERS, opentelemetry::Value::String(format!("{:?}", headers).into()), )); attrs.push(KeyValue::new( - Key::from_static_str("http.response.status"), + HTTP_RESPONSE_STATUS, opentelemetry::Value::String(format!("{}", response.response.status()).into()), )); attrs.push(KeyValue::new( - Key::from_static_str("http.response.version"), + HTTP_RESPONSE_VERSION, opentelemetry::Value::String(format!("{:?}", response.response.version()).into()), )); - attrs.push(KeyValue::new( - Key::from_static_str("http.response.body"), - opentelemetry::Value::String(format!("{:?}", response.response.body()).into()), - )); + + if let Some(body) = response + .context + .extensions() + .with_lock(|mut ext| ext.remove::()) + { + attrs.push(KeyValue::new( + HTTP_RESPONSE_BODY, + opentelemetry::Value::String(body.0.into()), + )); + } + log_event(self.response.level(), "router.response", attrs, ""); } for custom_event in &self.custom { @@ -367,29 +390,29 @@ impl Instrumented #[cfg(not(test))] let headers = request.supergraph_request.headers(); attrs.push(KeyValue::new( - Key::from_static_str("http.request.headers"), + HTTP_REQUEST_HEADERS, opentelemetry::Value::String(format!("{:?}", headers).into()), )); attrs.push(KeyValue::new( - Key::from_static_str("http.request.method"), + HTTP_REQUEST_METHOD, opentelemetry::Value::String( format!("{}", request.supergraph_request.method()).into(), ), )); attrs.push(KeyValue::new( - Key::from_static_str("http.request.uri"), + HTTP_REQUEST_URI, opentelemetry::Value::String( format!("{}", request.supergraph_request.uri()).into(), ), )); attrs.push(KeyValue::new( - Key::from_static_str("http.request.version"), + HTTP_REQUEST_VERSION, opentelemetry::Value::String( format!("{:?}", request.supergraph_request.version()).into(), ), )); attrs.push(KeyValue::new( - Key::from_static_str("http.request.body"), + HTTP_REQUEST_BODY, opentelemetry::Value::String( serde_json::to_string(request.supergraph_request.body()) .unwrap_or_default() @@ -622,21 +645,21 @@ where E: Debug, { /// The log level of the event. - level: EventLevel, + pub(super) level: EventLevel, /// The event message. - message: Arc, + pub(super) message: Arc, /// When to trigger the event. - on: EventOn, + pub(super) on: EventOn, /// The event attributes. #[serde(default = "Extendable::empty_arc::")] - attributes: Arc>, + pub(super) attributes: Arc>, /// The event conditions. #[serde(default = "Condition::empty::")] - condition: Condition, + pub(super) condition: Condition, } impl Event @@ -671,22 +694,22 @@ where A: Selectors + Default, T: Selector + Debug, { - inner: Mutex>, + pub(super) inner: Mutex>, } -struct CustomEventInner +pub(super) struct CustomEventInner where A: Selectors + Default, T: Selector + Debug, { - name: String, - level: EventLevel, - event_on: EventOn, - message: Arc, - selectors: Option>>, - condition: Condition, - attributes: Vec, - _phantom: PhantomData, + pub(super) name: String, + pub(super) level: EventLevel, + pub(super) event_on: EventOn, + pub(super) message: Arc, + pub(super) selectors: Option>>, + pub(super) condition: Condition, + pub(super) attributes: Vec, + pub(super) _phantom: PhantomData, } impl Instrumented @@ -815,8 +838,10 @@ pub(crate) fn log_event(level: EventLevel, kind: &str, attributes: Vec #[cfg(test)] mod tests { + use apollo_federation::sources::connect::HTTPMethod; use http::header::CONTENT_LENGTH; use http::HeaderValue; + use router::body; use tracing::instrument::WithSubscriber; use super::*; @@ -826,6 +851,10 @@ mod tests { use crate::graphql; use crate::plugins::telemetry::Telemetry; use crate::plugins::test::PluginTestHarness; + use crate::services::connector_service::ConnectorInfo; + use crate::services::connector_service::CONNECTOR_INFO_CONTEXT_KEY; + use crate::services::http::HttpRequest; + use crate::services::http::HttpResponse; #[tokio::test(flavor = "multi_thread")] async fn test_router_events() { @@ -1153,4 +1182,88 @@ mod tests { .with_subscriber(assert_snapshot_subscriber!()) .await } + + #[tokio::test(flavor = "multi_thread")] + async fn test_connector_events_request() { + let test_harness: PluginTestHarness = PluginTestHarness::builder() + .config(include_str!("../testdata/custom_events.router.yaml")) + .build() + .await; + + async { + let connector_info = ConnectorInfo { + subgraph_name: "subgraph".to_string(), + source_name: Some("source".to_string()), + http_method: HTTPMethod::Get.as_str().to_string(), + url_template: "/test".to_string(), + }; + let context = Context::default(); + context + .insert(CONNECTOR_INFO_CONTEXT_KEY, connector_info) + .unwrap(); + let mut http_request = http::Request::builder().body(body::empty()).unwrap(); + http_request + .headers_mut() + .insert("x-log-request", HeaderValue::from_static("log")); + let http_request = HttpRequest { + http_request, + context: context.clone(), + }; + test_harness + .call_http_client("subgraph", http_request, |http_request| HttpResponse { + http_response: http::Response::builder() + .status(200) + .header("x-log-request", HeaderValue::from_static("log")) + .body(body::empty()) + .expect("expecting valid response"), + context: http_request.context.clone(), + }) + .await + .expect("expecting successful response"); + } + .with_subscriber(assert_snapshot_subscriber!()) + .await + } + + #[tokio::test(flavor = "multi_thread")] + async fn test_connector_events_response() { + let test_harness: PluginTestHarness = PluginTestHarness::builder() + .config(include_str!("../testdata/custom_events.router.yaml")) + .build() + .await; + + async { + let connector_info = ConnectorInfo { + subgraph_name: "subgraph".to_string(), + source_name: Some("source".to_string()), + http_method: HTTPMethod::Get.as_str().to_string(), + url_template: "/test".to_string(), + }; + let context = Context::default(); + context + .insert(CONNECTOR_INFO_CONTEXT_KEY, connector_info) + .unwrap(); + let mut http_request = http::Request::builder().body(body::empty()).unwrap(); + http_request + .headers_mut() + .insert("x-log-response", HeaderValue::from_static("log")); + let http_request = HttpRequest { + http_request, + context: context.clone(), + }; + test_harness + .call_http_client("subgraph", http_request, |http_request| HttpResponse { + http_response: http::Response::builder() + .status(200) + .header("x-log-response", HeaderValue::from_static("log")) + .body(body::empty()) + .expect("expecting valid response"), + context: http_request.context.clone(), + }) + .await + .expect("expecting successful response"); + } + .with_subscriber(assert_snapshot_subscriber!()) + .await + } } diff --git a/apollo-router/src/plugins/telemetry/config_new/experimental_when_header.rs b/apollo-router/src/plugins/telemetry/config_new/experimental_when_header.rs deleted file mode 100644 index 11bbf8dcb1..0000000000 --- a/apollo-router/src/plugins/telemetry/config_new/experimental_when_header.rs +++ /dev/null @@ -1,89 +0,0 @@ -// Note that this configuration will be removed when events are implemented. - -use regex::Regex; -use schemars::JsonSchema; -use serde::Deserialize; - -use crate::plugin::serde::deserialize_regex; -use crate::services::SupergraphRequest; - -#[derive(Clone, Debug, Deserialize, JsonSchema)] -#[serde(untagged, deny_unknown_fields, rename_all = "snake_case")] -pub(crate) enum HeaderLoggingCondition { - /// Match header value given a regex to display logs - Matching { - /// Header name - name: String, - /// Regex to match the header value - #[schemars(with = "String", rename = "match")] - #[serde(deserialize_with = "deserialize_regex", rename = "match")] - matching: Regex, - /// Display request/response headers (default: false) - #[serde(default)] - headers: bool, - /// Display request/response body (default: false) - #[serde(default)] - body: bool, - }, - /// Match header value given a value to display logs - Value { - /// Header name - name: String, - /// Header value - value: String, - /// Display request/response headers (default: false) - #[serde(default)] - headers: bool, - /// Display request/response body (default: false) - #[serde(default)] - body: bool, - }, -} - -impl HeaderLoggingCondition { - /// Returns if we should display the request/response headers and body given the `SupergraphRequest` - pub(crate) fn should_log(&self, req: &SupergraphRequest) -> (bool, bool) { - match self { - HeaderLoggingCondition::Matching { - name, - matching: matched, - headers, - body, - } => { - let header_match = req - .supergraph_request - .headers() - .get(name) - .and_then(|h| h.to_str().ok()) - .map(|h| matched.is_match(h)) - .unwrap_or_default(); - - if header_match { - (*headers, *body) - } else { - (false, false) - } - } - HeaderLoggingCondition::Value { - name, - value, - headers, - body, - } => { - let header_match = req - .supergraph_request - .headers() - .get(name) - .and_then(|h| h.to_str().ok()) - .map(|h| value.as_str() == h) - .unwrap_or_default(); - - if header_match { - (*headers, *body) - } else { - (false, false) - } - } - } - } -} diff --git a/apollo-router/src/plugins/telemetry/config_new/fixtures/connector/custom_counter_with_conditions/metrics.snap b/apollo-router/src/plugins/telemetry/config_new/fixtures/connector/custom_counter_with_conditions/metrics.snap new file mode 100644 index 0000000000..df5e1cec39 --- /dev/null +++ b/apollo-router/src/plugins/telemetry/config_new/fixtures/connector/custom_counter_with_conditions/metrics.snap @@ -0,0 +1,35 @@ +--- +source: apollo-router/src/plugins/telemetry/config_new/instruments.rs +description: Custom counter with conditions +expression: "&metrics.all()" +info: + telemetry: + instrumentation: + instruments: + default_requirement_level: none + connector: + not.found.count: + description: Count of 404 responses from the user API + type: counter + unit: count + value: unit + attributes: + url_template: + connector_url_template: true + condition: + all: + - eq: + - 404 + - connector_http_response_status: code + - eq: + - user_api + - connector_source: name +--- +- name: not.found.count + description: Count of 404 responses from the user API + unit: count + data: + datapoints: + - value: 1 + attributes: + url_template: "/user/{$this.userid}" diff --git a/apollo-router/src/plugins/telemetry/config_new/fixtures/connector/custom_counter_with_conditions/router.yaml b/apollo-router/src/plugins/telemetry/config_new/fixtures/connector/custom_counter_with_conditions/router.yaml new file mode 100644 index 0000000000..a62a2ddce8 --- /dev/null +++ b/apollo-router/src/plugins/telemetry/config_new/fixtures/connector/custom_counter_with_conditions/router.yaml @@ -0,0 +1,21 @@ +telemetry: + instrumentation: + instruments: + default_requirement_level: none + connector: + not.found.count: + description: "Count of 404 responses from the user API" + type: counter + unit: count + value: unit + attributes: + "url_template": + connector_url_template: true + condition: + all: + - eq: + - 404 + - connector_http_response_status: code + - eq: + - "user_api" + - connector_source: name \ No newline at end of file diff --git a/apollo-router/src/plugins/telemetry/config_new/fixtures/connector/custom_counter_with_conditions/test.yaml b/apollo-router/src/plugins/telemetry/config_new/fixtures/connector/custom_counter_with_conditions/test.yaml new file mode 100644 index 0000000000..37f88836a0 --- /dev/null +++ b/apollo-router/src/plugins/telemetry/config_new/fixtures/connector/custom_counter_with_conditions/test.yaml @@ -0,0 +1,23 @@ +description: Custom counter with conditions +events: + - - context: + map: + "apollo_router::connector::info": + subgraph_name: users + source_name: user_api + http_method: GET + url_template: "/user/{$this.userid}" + - http_request: + uri: "/user/1" + method: GET + - http_response: + status: 200 + body: | + { "username": "foo" } + - http_request: + uri: "/user/1" + method: GET + - http_response: + status: 404 + body: | + { "error": "not found" } diff --git a/apollo-router/src/plugins/telemetry/config_new/fixtures/connector/custom_histogram/metrics.snap b/apollo-router/src/plugins/telemetry/config_new/fixtures/connector/custom_histogram/metrics.snap new file mode 100644 index 0000000000..625db3923b --- /dev/null +++ b/apollo-router/src/plugins/telemetry/config_new/fixtures/connector/custom_histogram/metrics.snap @@ -0,0 +1,33 @@ +--- +source: apollo-router/src/plugins/telemetry/config_new/instruments.rs +description: Both subgraph and connector HTTP client duration metrics +expression: "&metrics.all()" +info: + telemetry: + instrumentation: + instruments: + default_requirement_level: none + connector: + rate.limit: + value: + connector_http_response_header: x-ratelimit-remaining + unit: count + type: histogram + description: Rate limit remaining + condition: + all: + - eq: + - 200 + - connector_http_response_status: code + - eq: + - user_api + - connector_source: name +--- +- name: rate.limit + description: Rate limit remaining + unit: count + data: + datapoints: + - sum: 1499 + count: 2 + attributes: {} diff --git a/apollo-router/src/plugins/telemetry/config_new/fixtures/connector/custom_histogram/router.yaml b/apollo-router/src/plugins/telemetry/config_new/fixtures/connector/custom_histogram/router.yaml new file mode 100644 index 0000000000..c91eacbc00 --- /dev/null +++ b/apollo-router/src/plugins/telemetry/config_new/fixtures/connector/custom_histogram/router.yaml @@ -0,0 +1,19 @@ +telemetry: + instrumentation: + instruments: + default_requirement_level: none + connector: + rate.limit: + value: + connector_http_response_header: "x-ratelimit-remaining" + unit: count + type: histogram + description: "Rate limit remaining" + condition: + all: + - eq: + - 200 + - connector_http_response_status: code + - eq: + - "user_api" + - connector_source: name diff --git a/apollo-router/src/plugins/telemetry/config_new/fixtures/connector/custom_histogram/test.yaml b/apollo-router/src/plugins/telemetry/config_new/fixtures/connector/custom_histogram/test.yaml new file mode 100644 index 0000000000..ec048a9937 --- /dev/null +++ b/apollo-router/src/plugins/telemetry/config_new/fixtures/connector/custom_histogram/test.yaml @@ -0,0 +1,27 @@ +description: Both subgraph and connector HTTP client duration metrics +events: + - - context: + map: + "apollo_router::connector::info": + subgraph_name: users + source_name: user_api + http_method: GET + url_template: "/users" + - http_request: + uri: "/users" + method: GET + - http_response: + status: 200 + headers: + x-ratelimit-remaining: 999 + body: | + { "username": "foo" } + - http_request: + uri: "/users" + method: GET + - http_response: + status: 200 + headers: + x-ratelimit-remaining: 500 + body: | + { "username": "foo" } \ No newline at end of file diff --git a/apollo-router/src/plugins/telemetry/config_new/fixtures/connector/http_client_request_duration/metrics.snap b/apollo-router/src/plugins/telemetry/config_new/fixtures/connector/http_client_request_duration/metrics.snap new file mode 100644 index 0000000000..db1d610440 --- /dev/null +++ b/apollo-router/src/plugins/telemetry/config_new/fixtures/connector/http_client_request_duration/metrics.snap @@ -0,0 +1,42 @@ +--- +source: apollo-router/src/plugins/telemetry/config_new/instruments.rs +description: Connector HTTP client duration metric +expression: "&metrics.all()" +info: + telemetry: + instrumentation: + instruments: + default_requirement_level: none + connector: + http.client.request.duration: + attributes: + subgraph.name: true + connector.source: + connector_source: name + connector.http.method: true + connector.url.template: true + custom.request.header.attribute: + connector_http_request_header: custom_request_header + custom.response.header.attribute: + connector_http_response_header: custom_response_header + custom.response.status.attribute: + connector_http_response_status: code + custom.static.attribute: + static: custom_value +--- +- name: http.client.request.duration + description: Duration of HTTP client requests. + unit: s + data: + datapoints: + - sum: 0.1 + count: 1 + attributes: + connector.http.method: GET + connector.source: posts_api + connector.url.template: /posts + custom.request.header.attribute: custom_request_header_value + custom.response.header.attribute: custom_response_header_value + custom.response.status.attribute: 200 + custom.static.attribute: custom_value + subgraph.name: posts diff --git a/apollo-router/src/plugins/telemetry/config_new/fixtures/connector/http_client_request_duration/router.yaml b/apollo-router/src/plugins/telemetry/config_new/fixtures/connector/http_client_request_duration/router.yaml new file mode 100644 index 0000000000..276dfe9e6d --- /dev/null +++ b/apollo-router/src/plugins/telemetry/config_new/fixtures/connector/http_client_request_duration/router.yaml @@ -0,0 +1,20 @@ +telemetry: + instrumentation: + instruments: + default_requirement_level: none + connector: + http.client.request.duration: + attributes: + subgraph.name: true + connector.source: + connector_source: name + connector.http.method: true + connector.url.template: true + custom.request.header.attribute: + connector_http_request_header: "custom_request_header" + custom.response.header.attribute: + connector_http_response_header: "custom_response_header" + custom.response.status.attribute: + connector_http_response_status: code + custom.static.attribute: + static: "custom_value" \ No newline at end of file diff --git a/apollo-router/src/plugins/telemetry/config_new/fixtures/connector/http_client_request_duration/test.yaml b/apollo-router/src/plugins/telemetry/config_new/fixtures/connector/http_client_request_duration/test.yaml new file mode 100644 index 0000000000..4673c57c64 --- /dev/null +++ b/apollo-router/src/plugins/telemetry/config_new/fixtures/connector/http_client_request_duration/test.yaml @@ -0,0 +1,20 @@ +description: Connector HTTP client duration metric +events: + - - context: + map: + "apollo_router::connector::info": + subgraph_name: posts + source_name: posts_api + http_method: GET + url_template: "/posts" + - http_request: + uri: "/posts" + method: GET + headers: + custom_request_header: custom_request_header_value + - http_response: + status: 200 + body: | + { "foo": "bar" } + headers: + custom_response_header: custom_response_header_value diff --git a/apollo-router/src/plugins/telemetry/config_new/fixtures/connector/subgraph_and_connector/metrics.snap b/apollo-router/src/plugins/telemetry/config_new/fixtures/connector/subgraph_and_connector/metrics.snap new file mode 100644 index 0000000000..bbf1d2ad82 --- /dev/null +++ b/apollo-router/src/plugins/telemetry/config_new/fixtures/connector/subgraph_and_connector/metrics.snap @@ -0,0 +1,31 @@ +--- +source: apollo-router/src/plugins/telemetry/config_new/instruments.rs +description: Both subgraph and connector HTTP client duration metrics +expression: "&metrics.all()" +info: + telemetry: + instrumentation: + instruments: + default_requirement_level: none + subgraph: + http.client.request.duration: + attributes: + subgraph.name: true + connector: + http.client.request.duration: + attributes: + subgraph.name: true +--- +- name: http.client.request.duration + description: Duration of HTTP client requests. + unit: s + data: + datapoints: + - sum: 0.1 + count: 1 + attributes: + subgraph.name: products + - sum: 0.1 + count: 1 + attributes: + subgraph.name: reviews diff --git a/apollo-router/src/plugins/telemetry/config_new/fixtures/connector/subgraph_and_connector/router.yaml b/apollo-router/src/plugins/telemetry/config_new/fixtures/connector/subgraph_and_connector/router.yaml new file mode 100644 index 0000000000..054f4931aa --- /dev/null +++ b/apollo-router/src/plugins/telemetry/config_new/fixtures/connector/subgraph_and_connector/router.yaml @@ -0,0 +1,12 @@ +telemetry: + instrumentation: + instruments: + default_requirement_level: none + subgraph: + http.client.request.duration: + attributes: + subgraph.name: true + connector: + http.client.request.duration: + attributes: + subgraph.name: true \ No newline at end of file diff --git a/apollo-router/src/plugins/telemetry/config_new/fixtures/connector/subgraph_and_connector/test.yaml b/apollo-router/src/plugins/telemetry/config_new/fixtures/connector/subgraph_and_connector/test.yaml new file mode 100644 index 0000000000..62c554512a --- /dev/null +++ b/apollo-router/src/plugins/telemetry/config_new/fixtures/connector/subgraph_and_connector/test.yaml @@ -0,0 +1,42 @@ +description: Both subgraph and connector HTTP client duration metrics +events: + - - router_request: + uri: "/hello" + method: GET + body: | + hello + - supergraph_request: + uri: "/hello" + method: GET + query: "query { hello }" + - subgraph_request: + query: "query { hello }" + operation_name: "Products" + operation_kind: query + subgraph_name: "products" + - subgraph_response: + status: 200 + data: + hello: "world" + - context: + map: + "apollo_router::connector::info": + subgraph_name: reviews + source_name: reviews_api + http_method: GET + url_template: "/reviews" + - http_request: + uri: "/reviews" + method: GET + - http_response: + status: 200 + body: | + { "foo": "bar" } + - supergraph_response: + status: 200 + data: + hello: "world" + - router_response: + body: | + hello + status: 200 diff --git a/apollo-router/src/plugins/telemetry/config_new/fixtures/graphql/custom_histogram/metrics.snap b/apollo-router/src/plugins/telemetry/config_new/fixtures/graphql/custom_histogram/metrics.snap index 0b139624ac..bc8edad7c7 100644 --- a/apollo-router/src/plugins/telemetry/config_new/fixtures/graphql/custom_histogram/metrics.snap +++ b/apollo-router/src/plugins/telemetry/config_new/fixtures/graphql/custom_histogram/metrics.snap @@ -12,8 +12,7 @@ info: type: histogram unit: unit value: - field_custom: - list_length: value + list_length: value --- - name: custom_counter description: count of requests @@ -21,4 +20,5 @@ info: data: datapoints: - sum: 5 + count: 2 attributes: {} diff --git a/apollo-router/src/plugins/telemetry/config_new/fixtures/graphql/custom_histogram_with_custom_attributes/metrics.snap b/apollo-router/src/plugins/telemetry/config_new/fixtures/graphql/custom_histogram_with_custom_attributes/metrics.snap index aec235fbd7..c7254d9c48 100644 --- a/apollo-router/src/plugins/telemetry/config_new/fixtures/graphql/custom_histogram_with_custom_attributes/metrics.snap +++ b/apollo-router/src/plugins/telemetry/config_new/fixtures/graphql/custom_histogram_with_custom_attributes/metrics.snap @@ -25,12 +25,14 @@ info: data: datapoints: - sum: 1 + count: 1 attributes: custom_attribute: name graphql.field.name: name graphql.field.type: String graphql.type.name: Product - sum: 1 + count: 1 attributes: custom_attribute: products graphql.field.name: products diff --git a/apollo-router/src/plugins/telemetry/config_new/fixtures/graphql/custom_histogram_with_custom_conditions/metrics.snap b/apollo-router/src/plugins/telemetry/config_new/fixtures/graphql/custom_histogram_with_custom_conditions/metrics.snap index a495657612..db096af812 100644 --- a/apollo-router/src/plugins/telemetry/config_new/fixtures/graphql/custom_histogram_with_custom_conditions/metrics.snap +++ b/apollo-router/src/plugins/telemetry/config_new/fixtures/graphql/custom_histogram_with_custom_conditions/metrics.snap @@ -25,5 +25,6 @@ info: data: datapoints: - sum: 1 + count: 1 attributes: graphql.field.name: products diff --git a/apollo-router/src/plugins/telemetry/config_new/fixtures/graphql/field.length/metrics.snap b/apollo-router/src/plugins/telemetry/config_new/fixtures/graphql/field.length/metrics.snap index d3e0270014..6cff0b5d6c 100644 --- a/apollo-router/src/plugins/telemetry/config_new/fixtures/graphql/field.length/metrics.snap +++ b/apollo-router/src/plugins/telemetry/config_new/fixtures/graphql/field.length/metrics.snap @@ -15,4 +15,5 @@ info: data: datapoints: - sum: 3 + count: 1 attributes: {} diff --git a/apollo-router/src/plugins/telemetry/config_new/fixtures/router/attribute.error.type/metrics.snap b/apollo-router/src/plugins/telemetry/config_new/fixtures/router/attribute.error.type/metrics.snap index f32638093b..07378dd323 100644 --- a/apollo-router/src/plugins/telemetry/config_new/fixtures/router/attribute.error.type/metrics.snap +++ b/apollo-router/src/plugins/telemetry/config_new/fixtures/router/attribute.error.type/metrics.snap @@ -18,5 +18,6 @@ info: data: datapoints: - sum: 0.1 + count: 1 attributes: error.type: Internal Server Error diff --git a/apollo-router/src/plugins/telemetry/config_new/fixtures/router/attribute.on_graphql_error/metrics.snap b/apollo-router/src/plugins/telemetry/config_new/fixtures/router/attribute.on_graphql_error/metrics.snap index 6af70de078..3190810cc8 100644 --- a/apollo-router/src/plugins/telemetry/config_new/fixtures/router/attribute.on_graphql_error/metrics.snap +++ b/apollo-router/src/plugins/telemetry/config_new/fixtures/router/attribute.on_graphql_error/metrics.snap @@ -19,5 +19,6 @@ info: data: datapoints: - sum: 0.1 + count: 1 attributes: on.graphql.error: true diff --git a/apollo-router/src/plugins/telemetry/config_new/fixtures/router/custom_histogram_custom_value/metrics.snap b/apollo-router/src/plugins/telemetry/config_new/fixtures/router/custom_histogram_custom_value/metrics.snap index 957fec0e25..165bc54edd 100644 --- a/apollo-router/src/plugins/telemetry/config_new/fixtures/router/custom_histogram_custom_value/metrics.snap +++ b/apollo-router/src/plugins/telemetry/config_new/fixtures/router/custom_histogram_custom_value/metrics.snap @@ -22,4 +22,5 @@ info: data: datapoints: - sum: 10 + count: 1 attributes: {} diff --git a/apollo-router/src/plugins/telemetry/config_new/fixtures/router/custom_histogram_duration/metrics.snap b/apollo-router/src/plugins/telemetry/config_new/fixtures/router/custom_histogram_duration/metrics.snap index 815e578506..c5b3045a46 100644 --- a/apollo-router/src/plugins/telemetry/config_new/fixtures/router/custom_histogram_duration/metrics.snap +++ b/apollo-router/src/plugins/telemetry/config_new/fixtures/router/custom_histogram_duration/metrics.snap @@ -21,4 +21,5 @@ info: data: datapoints: - sum: 0.1 + count: 1 attributes: {} diff --git a/apollo-router/src/plugins/telemetry/config_new/fixtures/router/custom_histogram_duration_aborted_request/metrics.snap b/apollo-router/src/plugins/telemetry/config_new/fixtures/router/custom_histogram_duration_aborted_request/metrics.snap index a01097bae5..8e18798177 100644 --- a/apollo-router/src/plugins/telemetry/config_new/fixtures/router/custom_histogram_duration_aborted_request/metrics.snap +++ b/apollo-router/src/plugins/telemetry/config_new/fixtures/router/custom_histogram_duration_aborted_request/metrics.snap @@ -21,4 +21,5 @@ info: data: datapoints: - sum: 0.1 + count: 1 attributes: {} diff --git a/apollo-router/src/plugins/telemetry/config_new/fixtures/router/custom_histogram_duration_aborted_request_with_condition_on_request/metrics.snap b/apollo-router/src/plugins/telemetry/config_new/fixtures/router/custom_histogram_duration_aborted_request_with_condition_on_request/metrics.snap index 54cb6228f9..f11de25a03 100644 --- a/apollo-router/src/plugins/telemetry/config_new/fixtures/router/custom_histogram_duration_aborted_request_with_condition_on_request/metrics.snap +++ b/apollo-router/src/plugins/telemetry/config_new/fixtures/router/custom_histogram_duration_aborted_request_with_condition_on_request/metrics.snap @@ -25,4 +25,5 @@ info: data: datapoints: - sum: 0.1 + count: 1 attributes: {} diff --git a/apollo-router/src/plugins/telemetry/config_new/fixtures/router/custom_histogram_unit/metrics.snap b/apollo-router/src/plugins/telemetry/config_new/fixtures/router/custom_histogram_unit/metrics.snap index 5d4f3f4e4e..642610641d 100644 --- a/apollo-router/src/plugins/telemetry/config_new/fixtures/router/custom_histogram_unit/metrics.snap +++ b/apollo-router/src/plugins/telemetry/config_new/fixtures/router/custom_histogram_unit/metrics.snap @@ -21,4 +21,5 @@ info: data: datapoints: - sum: 1 + count: 1 attributes: {} diff --git a/apollo-router/src/plugins/telemetry/config_new/fixtures/router/custom_histogram_unit_aborted_request/metrics.snap b/apollo-router/src/plugins/telemetry/config_new/fixtures/router/custom_histogram_unit_aborted_request/metrics.snap index 5d4f3f4e4e..13a2628c14 100644 --- a/apollo-router/src/plugins/telemetry/config_new/fixtures/router/custom_histogram_unit_aborted_request/metrics.snap +++ b/apollo-router/src/plugins/telemetry/config_new/fixtures/router/custom_histogram_unit_aborted_request/metrics.snap @@ -1,6 +1,6 @@ --- source: apollo-router/src/plugins/telemetry/config_new/instruments.rs -description: Custom histogram unit +description: "Custom histogram where router response doesn't happen. This should still increment the metric on Drop." expression: "&metrics.all()" info: telemetry: @@ -21,4 +21,5 @@ info: data: datapoints: - sum: 1 + count: 1 attributes: {} diff --git a/apollo-router/src/plugins/telemetry/config_new/fixtures/router/custom_histogram_unit_aborted_request_with_condition_on_request/metrics.snap b/apollo-router/src/plugins/telemetry/config_new/fixtures/router/custom_histogram_unit_aborted_request_with_condition_on_request/metrics.snap index c32ce4a968..5b6a730710 100644 --- a/apollo-router/src/plugins/telemetry/config_new/fixtures/router/custom_histogram_unit_aborted_request_with_condition_on_request/metrics.snap +++ b/apollo-router/src/plugins/telemetry/config_new/fixtures/router/custom_histogram_unit_aborted_request_with_condition_on_request/metrics.snap @@ -25,4 +25,5 @@ info: data: datapoints: - sum: 1 + count: 1 attributes: {} diff --git a/apollo-router/src/plugins/telemetry/config_new/fixtures/router/custom_histogram_with_attributes/metrics.snap b/apollo-router/src/plugins/telemetry/config_new/fixtures/router/custom_histogram_with_attributes/metrics.snap index 66de37a22b..513b748d0a 100644 --- a/apollo-router/src/plugins/telemetry/config_new/fixtures/router/custom_histogram_with_attributes/metrics.snap +++ b/apollo-router/src/plugins/telemetry/config_new/fixtures/router/custom_histogram_with_attributes/metrics.snap @@ -25,6 +25,7 @@ info: data: datapoints: - sum: 1 + count: 1 attributes: custom_attribute: custom_value http.request.method: GET diff --git a/apollo-router/src/plugins/telemetry/config_new/fixtures/router/custom_histogram_with_conditions/metrics.snap b/apollo-router/src/plugins/telemetry/config_new/fixtures/router/custom_histogram_with_conditions/metrics.snap index 5377b7f289..2960a408be 100644 --- a/apollo-router/src/plugins/telemetry/config_new/fixtures/router/custom_histogram_with_conditions/metrics.snap +++ b/apollo-router/src/plugins/telemetry/config_new/fixtures/router/custom_histogram_with_conditions/metrics.snap @@ -29,6 +29,7 @@ info: data: datapoints: - sum: 1 + count: 1 attributes: custom_attribute: allowed http.request.method: GET diff --git a/apollo-router/src/plugins/telemetry/config_new/fixtures/router/http.server.request.body.size/metrics.snap b/apollo-router/src/plugins/telemetry/config_new/fixtures/router/http.server.request.body.size/metrics.snap index ced697f058..2518b56f59 100644 --- a/apollo-router/src/plugins/telemetry/config_new/fixtures/router/http.server.request.body.size/metrics.snap +++ b/apollo-router/src/plugins/telemetry/config_new/fixtures/router/http.server.request.body.size/metrics.snap @@ -17,6 +17,7 @@ info: data: datapoints: - sum: 35 + count: 1 attributes: http.request.method: GET http.response.status_code: 200 diff --git a/apollo-router/src/plugins/telemetry/config_new/fixtures/router/http.server.request.body.size_with_custom_attributes/metrics.snap b/apollo-router/src/plugins/telemetry/config_new/fixtures/router/http.server.request.body.size_with_custom_attributes/metrics.snap index ced697f058..2518b56f59 100644 --- a/apollo-router/src/plugins/telemetry/config_new/fixtures/router/http.server.request.body.size_with_custom_attributes/metrics.snap +++ b/apollo-router/src/plugins/telemetry/config_new/fixtures/router/http.server.request.body.size_with_custom_attributes/metrics.snap @@ -17,6 +17,7 @@ info: data: datapoints: - sum: 35 + count: 1 attributes: http.request.method: GET http.response.status_code: 200 diff --git a/apollo-router/src/plugins/telemetry/config_new/fixtures/router/http.server.request.duration/metrics.snap b/apollo-router/src/plugins/telemetry/config_new/fixtures/router/http.server.request.duration/metrics.snap index 1cdde4fc8b..acb5a25f5c 100644 --- a/apollo-router/src/plugins/telemetry/config_new/fixtures/router/http.server.request.duration/metrics.snap +++ b/apollo-router/src/plugins/telemetry/config_new/fixtures/router/http.server.request.duration/metrics.snap @@ -16,6 +16,7 @@ info: data: datapoints: - sum: 0.1 + count: 1 attributes: http.request.method: GET http.response.status_code: 200 diff --git a/apollo-router/src/plugins/telemetry/config_new/fixtures/router/http.server.request.duration_with_custom_attributes/metrics.snap b/apollo-router/src/plugins/telemetry/config_new/fixtures/router/http.server.request.duration_with_custom_attributes/metrics.snap index c586792480..4ee5036659 100644 --- a/apollo-router/src/plugins/telemetry/config_new/fixtures/router/http.server.request.duration_with_custom_attributes/metrics.snap +++ b/apollo-router/src/plugins/telemetry/config_new/fixtures/router/http.server.request.duration_with_custom_attributes/metrics.snap @@ -23,6 +23,7 @@ info: data: datapoints: - sum: 0.1 + count: 1 attributes: graphql.operation.name: TestQuery http.response.status_code: 200 diff --git a/apollo-router/src/plugins/telemetry/config_new/fixtures/router/http.server.response.body.size/metrics.snap b/apollo-router/src/plugins/telemetry/config_new/fixtures/router/http.server.response.body.size/metrics.snap index 18d1ce2f0a..245131f105 100644 --- a/apollo-router/src/plugins/telemetry/config_new/fixtures/router/http.server.response.body.size/metrics.snap +++ b/apollo-router/src/plugins/telemetry/config_new/fixtures/router/http.server.response.body.size/metrics.snap @@ -17,6 +17,7 @@ info: data: datapoints: - sum: 35 + count: 1 attributes: http.request.method: GET http.response.status_code: 200 diff --git a/apollo-router/src/plugins/telemetry/config_new/fixtures/schema.json b/apollo-router/src/plugins/telemetry/config_new/fixtures/schema.json index efa2b5fa12..ecc1409f59 100644 --- a/apollo-router/src/plugins/telemetry/config_new/fixtures/schema.json +++ b/apollo-router/src/plugins/telemetry/config_new/fixtures/schema.json @@ -443,6 +443,78 @@ } }, "additionalProperties": false + }, + { + "type": "object", + "required": [ + "http_request" + ], + "properties": { + "http_request": { + "type": "object", + "required": [ + "method", + "uri" + ], + "properties": { + "body": { + "type": [ + "string", + "null" + ] + }, + "headers": { + "default": {}, + "type": "object", + "additionalProperties": { + "type": "string" + } + }, + "method": { + "type": "string" + }, + "uri": { + "type": "string" + } + }, + "additionalProperties": false + } + }, + "additionalProperties": false + }, + { + "type": "object", + "required": [ + "http_response" + ], + "properties": { + "http_response": { + "type": "object", + "required": [ + "body", + "status" + ], + "properties": { + "body": { + "type": "string" + }, + "headers": { + "default": {}, + "type": "object", + "additionalProperties": { + "type": "string" + } + }, + "status": { + "type": "integer", + "format": "uint16", + "minimum": 0.0 + } + }, + "additionalProperties": false + } + }, + "additionalProperties": false } ] }, diff --git a/apollo-router/src/plugins/telemetry/config_new/fixtures/subgraph/custom_histogram_custom_value/metrics.snap b/apollo-router/src/plugins/telemetry/config_new/fixtures/subgraph/custom_histogram_custom_value/metrics.snap index 957fec0e25..c6df2c801b 100644 --- a/apollo-router/src/plugins/telemetry/config_new/fixtures/subgraph/custom_histogram_custom_value/metrics.snap +++ b/apollo-router/src/plugins/telemetry/config_new/fixtures/subgraph/custom_histogram_custom_value/metrics.snap @@ -6,15 +6,14 @@ info: telemetry: instrumentation: instruments: - router: - http.server.active_requests: false - http.server.request.duration: false + default_requirement_level: none + subgraph: custom.histogram: description: histogram of requests type: histogram unit: unit value: - request_header: count_header + subgraph_request_header: count_header --- - name: custom.histogram description: histogram of requests @@ -22,4 +21,5 @@ info: data: datapoints: - sum: 10 + count: 1 attributes: {} diff --git a/apollo-router/src/plugins/telemetry/config_new/fixtures/subgraph/custom_histogram_duration copy/metrics.snap b/apollo-router/src/plugins/telemetry/config_new/fixtures/subgraph/custom_histogram_duration copy/metrics.snap index ae36e59b7d..f4ffc8c504 100644 --- a/apollo-router/src/plugins/telemetry/config_new/fixtures/subgraph/custom_histogram_duration copy/metrics.snap +++ b/apollo-router/src/plugins/telemetry/config_new/fixtures/subgraph/custom_histogram_duration copy/metrics.snap @@ -19,5 +19,6 @@ info: data: datapoints: - sum: 0.1 + count: 1 attributes: apollo_subgraph_name: products diff --git a/apollo-router/src/plugins/telemetry/config_new/fixtures/subgraph/custom_histogram_duration/metrics.snap b/apollo-router/src/plugins/telemetry/config_new/fixtures/subgraph/custom_histogram_duration/metrics.snap index 815e578506..7eff2b686b 100644 --- a/apollo-router/src/plugins/telemetry/config_new/fixtures/subgraph/custom_histogram_duration/metrics.snap +++ b/apollo-router/src/plugins/telemetry/config_new/fixtures/subgraph/custom_histogram_duration/metrics.snap @@ -6,9 +6,8 @@ info: telemetry: instrumentation: instruments: - router: - http.server.active_requests: false - http.server.request.duration: false + default_requirement_level: none + subgraph: custom.histogram.duration: description: histogram of requests type: histogram @@ -21,4 +20,5 @@ info: data: datapoints: - sum: 0.1 + count: 1 attributes: {} diff --git a/apollo-router/src/plugins/telemetry/config_new/fixtures/subgraph/custom_histogram_duration_aborted_request/metrics.snap b/apollo-router/src/plugins/telemetry/config_new/fixtures/subgraph/custom_histogram_duration_aborted_request/metrics.snap index 815e578506..a76d8fa27b 100644 --- a/apollo-router/src/plugins/telemetry/config_new/fixtures/subgraph/custom_histogram_duration_aborted_request/metrics.snap +++ b/apollo-router/src/plugins/telemetry/config_new/fixtures/subgraph/custom_histogram_duration_aborted_request/metrics.snap @@ -1,14 +1,13 @@ --- source: apollo-router/src/plugins/telemetry/config_new/instruments.rs -description: Custom histogram duration +description: "Custom histogram where subgraph response doesn't happen. This should still increment the metric on Drop." expression: "&metrics.all()" info: telemetry: instrumentation: instruments: - router: - http.server.active_requests: false - http.server.request.duration: false + default_requirement_level: none + subgraph: custom.histogram.duration: description: histogram of requests type: histogram @@ -21,4 +20,5 @@ info: data: datapoints: - sum: 0.1 + count: 1 attributes: {} diff --git a/apollo-router/src/plugins/telemetry/config_new/fixtures/subgraph/custom_histogram_unit/metrics.snap b/apollo-router/src/plugins/telemetry/config_new/fixtures/subgraph/custom_histogram_unit/metrics.snap index 5d4f3f4e4e..97055c3348 100644 --- a/apollo-router/src/plugins/telemetry/config_new/fixtures/subgraph/custom_histogram_unit/metrics.snap +++ b/apollo-router/src/plugins/telemetry/config_new/fixtures/subgraph/custom_histogram_unit/metrics.snap @@ -6,9 +6,8 @@ info: telemetry: instrumentation: instruments: - router: - http.server.active_requests: false - http.server.request.duration: false + default_requirement_level: none + subgraph: custom.histogram: description: histogram of requests type: histogram @@ -21,4 +20,5 @@ info: data: datapoints: - sum: 1 + count: 1 attributes: {} diff --git a/apollo-router/src/plugins/telemetry/config_new/fixtures/subgraph/custom_histogram_unit_aborted_request/metrics.snap b/apollo-router/src/plugins/telemetry/config_new/fixtures/subgraph/custom_histogram_unit_aborted_request/metrics.snap index 5d4f3f4e4e..93633beaae 100644 --- a/apollo-router/src/plugins/telemetry/config_new/fixtures/subgraph/custom_histogram_unit_aborted_request/metrics.snap +++ b/apollo-router/src/plugins/telemetry/config_new/fixtures/subgraph/custom_histogram_unit_aborted_request/metrics.snap @@ -1,14 +1,13 @@ --- source: apollo-router/src/plugins/telemetry/config_new/instruments.rs -description: Custom histogram unit +description: "Custom histogram where subgraph response doesn't happen. This should still increment the metric on Drop." expression: "&metrics.all()" info: telemetry: instrumentation: instruments: - router: - http.server.active_requests: false - http.server.request.duration: false + default_requirement_level: none + subgraph: custom.histogram: description: histogram of requests type: histogram @@ -21,4 +20,5 @@ info: data: datapoints: - sum: 1 + count: 1 attributes: {} diff --git a/apollo-router/src/plugins/telemetry/config_new/fixtures/subgraph/custom_histogram_with_attributes/metrics.snap b/apollo-router/src/plugins/telemetry/config_new/fixtures/subgraph/custom_histogram_with_attributes/metrics.snap index a64336ff0e..7d7df87966 100644 --- a/apollo-router/src/plugins/telemetry/config_new/fixtures/subgraph/custom_histogram_with_attributes/metrics.snap +++ b/apollo-router/src/plugins/telemetry/config_new/fixtures/subgraph/custom_histogram_with_attributes/metrics.snap @@ -24,6 +24,7 @@ info: data: datapoints: - sum: 1 + count: 1 attributes: custom_attribute: custom_value subgraph.graphql.document: "query { hello }" diff --git a/apollo-router/src/plugins/telemetry/config_new/fixtures/subgraph/custom_histogram_with_conditions/metrics.snap b/apollo-router/src/plugins/telemetry/config_new/fixtures/subgraph/custom_histogram_with_conditions/metrics.snap index 5d2531612b..dd45cb8f47 100644 --- a/apollo-router/src/plugins/telemetry/config_new/fixtures/subgraph/custom_histogram_with_conditions/metrics.snap +++ b/apollo-router/src/plugins/telemetry/config_new/fixtures/subgraph/custom_histogram_with_conditions/metrics.snap @@ -28,6 +28,7 @@ info: data: datapoints: - sum: 1 + count: 1 attributes: custom_attribute: allowed subgraph.graphql.document: "query { hello }" diff --git a/apollo-router/src/plugins/telemetry/config_new/fixtures/supergraph/custom_histogram_custom_value/metrics.snap b/apollo-router/src/plugins/telemetry/config_new/fixtures/supergraph/custom_histogram_custom_value/metrics.snap index 957fec0e25..343a0aba2b 100644 --- a/apollo-router/src/plugins/telemetry/config_new/fixtures/supergraph/custom_histogram_custom_value/metrics.snap +++ b/apollo-router/src/plugins/telemetry/config_new/fixtures/supergraph/custom_histogram_custom_value/metrics.snap @@ -6,9 +6,8 @@ info: telemetry: instrumentation: instruments: - router: - http.server.active_requests: false - http.server.request.duration: false + default_requirement_level: none + supergraph: custom.histogram: description: histogram of requests type: histogram @@ -22,4 +21,5 @@ info: data: datapoints: - sum: 10 + count: 1 attributes: {} diff --git a/apollo-router/src/plugins/telemetry/config_new/fixtures/supergraph/custom_histogram_duration/metrics.snap b/apollo-router/src/plugins/telemetry/config_new/fixtures/supergraph/custom_histogram_duration/metrics.snap index 815e578506..9793783c2e 100644 --- a/apollo-router/src/plugins/telemetry/config_new/fixtures/supergraph/custom_histogram_duration/metrics.snap +++ b/apollo-router/src/plugins/telemetry/config_new/fixtures/supergraph/custom_histogram_duration/metrics.snap @@ -6,9 +6,8 @@ info: telemetry: instrumentation: instruments: - router: - http.server.active_requests: false - http.server.request.duration: false + default_requirement_level: none + supergraph: custom.histogram.duration: description: histogram of requests type: histogram @@ -21,4 +20,5 @@ info: data: datapoints: - sum: 0.1 + count: 1 attributes: {} diff --git a/apollo-router/src/plugins/telemetry/config_new/fixtures/supergraph/custom_histogram_duration_aborted_request/metrics.snap b/apollo-router/src/plugins/telemetry/config_new/fixtures/supergraph/custom_histogram_duration_aborted_request/metrics.snap index 815e578506..9793783c2e 100644 --- a/apollo-router/src/plugins/telemetry/config_new/fixtures/supergraph/custom_histogram_duration_aborted_request/metrics.snap +++ b/apollo-router/src/plugins/telemetry/config_new/fixtures/supergraph/custom_histogram_duration_aborted_request/metrics.snap @@ -6,9 +6,8 @@ info: telemetry: instrumentation: instruments: - router: - http.server.active_requests: false - http.server.request.duration: false + default_requirement_level: none + supergraph: custom.histogram.duration: description: histogram of requests type: histogram @@ -21,4 +20,5 @@ info: data: datapoints: - sum: 0.1 + count: 1 attributes: {} diff --git a/apollo-router/src/plugins/telemetry/config_new/fixtures/supergraph/custom_histogram_supergraph_aliases/metrics.snap b/apollo-router/src/plugins/telemetry/config_new/fixtures/supergraph/custom_histogram_supergraph_aliases/metrics.snap index da07e887b0..1a457a28c2 100644 --- a/apollo-router/src/plugins/telemetry/config_new/fixtures/supergraph/custom_histogram_supergraph_aliases/metrics.snap +++ b/apollo-router/src/plugins/telemetry/config_new/fixtures/supergraph/custom_histogram_supergraph_aliases/metrics.snap @@ -20,4 +20,5 @@ info: data: datapoints: - sum: 1 + count: 1 attributes: {} diff --git a/apollo-router/src/plugins/telemetry/config_new/fixtures/supergraph/custom_histogram_supergraph_depth/metrics.snap b/apollo-router/src/plugins/telemetry/config_new/fixtures/supergraph/custom_histogram_supergraph_depth/metrics.snap index 8f2f035d01..9da9d7b79e 100644 --- a/apollo-router/src/plugins/telemetry/config_new/fixtures/supergraph/custom_histogram_supergraph_depth/metrics.snap +++ b/apollo-router/src/plugins/telemetry/config_new/fixtures/supergraph/custom_histogram_supergraph_depth/metrics.snap @@ -20,4 +20,5 @@ info: data: datapoints: - sum: 2 + count: 1 attributes: {} diff --git a/apollo-router/src/plugins/telemetry/config_new/fixtures/supergraph/custom_histogram_supergraph_height/metrics.snap b/apollo-router/src/plugins/telemetry/config_new/fixtures/supergraph/custom_histogram_supergraph_height/metrics.snap index f5c1b7e5f3..69b6ae5878 100644 --- a/apollo-router/src/plugins/telemetry/config_new/fixtures/supergraph/custom_histogram_supergraph_height/metrics.snap +++ b/apollo-router/src/plugins/telemetry/config_new/fixtures/supergraph/custom_histogram_supergraph_height/metrics.snap @@ -20,4 +20,5 @@ info: data: datapoints: - sum: 3 + count: 1 attributes: {} diff --git a/apollo-router/src/plugins/telemetry/config_new/fixtures/supergraph/custom_histogram_supergraph_root_fields/metrics.snap b/apollo-router/src/plugins/telemetry/config_new/fixtures/supergraph/custom_histogram_supergraph_root_fields/metrics.snap index 330921cc09..4c2da50527 100644 --- a/apollo-router/src/plugins/telemetry/config_new/fixtures/supergraph/custom_histogram_supergraph_root_fields/metrics.snap +++ b/apollo-router/src/plugins/telemetry/config_new/fixtures/supergraph/custom_histogram_supergraph_root_fields/metrics.snap @@ -20,4 +20,5 @@ info: data: datapoints: - sum: 4 + count: 1 attributes: {} diff --git a/apollo-router/src/plugins/telemetry/config_new/fixtures/supergraph/custom_histogram_unit/metrics.snap b/apollo-router/src/plugins/telemetry/config_new/fixtures/supergraph/custom_histogram_unit/metrics.snap index 5d4f3f4e4e..60f1ceae6e 100644 --- a/apollo-router/src/plugins/telemetry/config_new/fixtures/supergraph/custom_histogram_unit/metrics.snap +++ b/apollo-router/src/plugins/telemetry/config_new/fixtures/supergraph/custom_histogram_unit/metrics.snap @@ -1,14 +1,13 @@ --- source: apollo-router/src/plugins/telemetry/config_new/instruments.rs -description: Custom histogram unit +description: "Custom histogram where supergraph response doesn't happen. This should still increment the metric on Drop." expression: "&metrics.all()" info: telemetry: instrumentation: instruments: - router: - http.server.active_requests: false - http.server.request.duration: false + default_requirement_level: none + supergraph: custom.histogram: description: histogram of requests type: histogram @@ -21,4 +20,5 @@ info: data: datapoints: - sum: 1 + count: 1 attributes: {} diff --git a/apollo-router/src/plugins/telemetry/config_new/fixtures/supergraph/custom_histogram_unit_aborted_request/metrics.snap b/apollo-router/src/plugins/telemetry/config_new/fixtures/supergraph/custom_histogram_unit_aborted_request/metrics.snap index 5d4f3f4e4e..60f1ceae6e 100644 --- a/apollo-router/src/plugins/telemetry/config_new/fixtures/supergraph/custom_histogram_unit_aborted_request/metrics.snap +++ b/apollo-router/src/plugins/telemetry/config_new/fixtures/supergraph/custom_histogram_unit_aborted_request/metrics.snap @@ -1,14 +1,13 @@ --- source: apollo-router/src/plugins/telemetry/config_new/instruments.rs -description: Custom histogram unit +description: "Custom histogram where supergraph response doesn't happen. This should still increment the metric on Drop." expression: "&metrics.all()" info: telemetry: instrumentation: instruments: - router: - http.server.active_requests: false - http.server.request.duration: false + default_requirement_level: none + supergraph: custom.histogram: description: histogram of requests type: histogram @@ -21,4 +20,5 @@ info: data: datapoints: - sum: 1 + count: 1 attributes: {} diff --git a/apollo-router/src/plugins/telemetry/config_new/fixtures/supergraph/custom_histogram_with_attributes/metrics.snap b/apollo-router/src/plugins/telemetry/config_new/fixtures/supergraph/custom_histogram_with_attributes/metrics.snap index 14b31dba79..f202af208d 100644 --- a/apollo-router/src/plugins/telemetry/config_new/fixtures/supergraph/custom_histogram_with_attributes/metrics.snap +++ b/apollo-router/src/plugins/telemetry/config_new/fixtures/supergraph/custom_histogram_with_attributes/metrics.snap @@ -24,6 +24,7 @@ info: data: datapoints: - sum: 1 + count: 1 attributes: custom_attribute: custom_value graphql.document: "query { hello }" diff --git a/apollo-router/src/plugins/telemetry/config_new/fixtures/supergraph/custom_histogram_with_conditions/metrics.snap b/apollo-router/src/plugins/telemetry/config_new/fixtures/supergraph/custom_histogram_with_conditions/metrics.snap index 6afdc30d2f..28485b9341 100644 --- a/apollo-router/src/plugins/telemetry/config_new/fixtures/supergraph/custom_histogram_with_conditions/metrics.snap +++ b/apollo-router/src/plugins/telemetry/config_new/fixtures/supergraph/custom_histogram_with_conditions/metrics.snap @@ -28,6 +28,7 @@ info: data: datapoints: - sum: 1 + count: 1 attributes: custom_attribute: allowed graphql.document: "query { hello }" diff --git a/apollo-router/src/plugins/telemetry/config_new/instruments.rs b/apollo-router/src/plugins/telemetry/config_new/instruments.rs index 6dad942576..dbaf2477ca 100644 --- a/apollo-router/src/plugins/telemetry/config_new/instruments.rs +++ b/apollo-router/src/plugins/telemetry/config_new/instruments.rs @@ -38,6 +38,11 @@ use crate::plugins::telemetry::config_new::attributes::RouterAttributes; use crate::plugins::telemetry::config_new::attributes::SubgraphAttributes; use crate::plugins::telemetry::config_new::attributes::SupergraphAttributes; use crate::plugins::telemetry::config_new::conditions::Condition; +use crate::plugins::telemetry::config_new::connector::attributes::ConnectorAttributes; +use crate::plugins::telemetry::config_new::connector::instruments::ConnectorInstruments; +use crate::plugins::telemetry::config_new::connector::instruments::ConnectorInstrumentsConfig; +use crate::plugins::telemetry::config_new::connector::selectors::ConnectorSelector; +use crate::plugins::telemetry::config_new::connector::selectors::ConnectorValue; use crate::plugins::telemetry::config_new::cost::CostInstruments; use crate::plugins::telemetry::config_new::cost::CostInstrumentsConfig; use crate::plugins::telemetry::config_new::extendable::Extendable; @@ -81,6 +86,11 @@ pub(crate) struct InstrumentsConfig { SubgraphInstrumentsConfig, Instrument, >, + /// Connector service instruments. + pub(crate) connector: Extendable< + ConnectorInstrumentsConfig, + Instrument, + >, /// GraphQL response field instruments. pub(crate) graphql: Extendable< GraphQLInstrumentsConfig, @@ -98,9 +108,9 @@ const HTTP_SERVER_REQUEST_BODY_SIZE_METRIC: &str = "http.server.request.body.siz const HTTP_SERVER_RESPONSE_BODY_SIZE_METRIC: &str = "http.server.response.body.size"; const HTTP_SERVER_ACTIVE_REQUESTS: &str = "http.server.active_requests"; -const HTTP_CLIENT_REQUEST_DURATION_METRIC: &str = "http.client.request.duration"; -const HTTP_CLIENT_REQUEST_BODY_SIZE_METRIC: &str = "http.client.request.body.size"; -const HTTP_CLIENT_RESPONSE_BODY_SIZE_METRIC: &str = "http.client.response.body.size"; +pub(super) const HTTP_CLIENT_REQUEST_DURATION_METRIC: &str = "http.client.request.duration"; +pub(super) const HTTP_CLIENT_REQUEST_BODY_SIZE_METRIC: &str = "http.client.request.body.size"; +pub(super) const HTTP_CLIENT_RESPONSE_BODY_SIZE_METRIC: &str = "http.client.response.body.size"; impl InstrumentsConfig { pub(crate) fn validate(&self) -> Result<(), String> { @@ -129,6 +139,11 @@ impl InstrumentsConfig { format!("error for custom cache instrument {name:?} in condition: {err}") })?; } + for (name, custom) in &self.connector.custom { + custom.condition.validate(None).map_err(|err| { + format!("error for custom connector instrument {name:?} in condition: {err}") + })?; + } Ok(()) } @@ -144,6 +159,8 @@ impl InstrumentsConfig { .defaults_for_levels(self.default_requirement_level, TelemetryDataKind::Metrics); self.graphql .defaults_for_levels(self.default_requirement_level, TelemetryDataKind::Metrics); + self.connector + .defaults_for_levels(self.default_requirement_level, TelemetryDataKind::Metrics); } pub(crate) fn new_builtin_router_instruments(&self) -> HashMap { @@ -691,6 +708,49 @@ impl InstrumentsConfig { } } + pub(crate) fn new_connector_instruments( + &self, + static_instruments: Arc>, + ) -> ConnectorInstruments { + ConnectorInstruments::new(&self.connector, static_instruments) + } + + pub(crate) fn new_builtin_connector_instruments(&self) -> HashMap { + let meter = metrics::meter_provider().meter(METER_NAME); + let mut static_instruments = ConnectorInstruments::new_builtin(&self.connector); + + for (instrument_name, instrument) in &self.connector.custom { + match instrument.ty { + InstrumentType::Counter => { + static_instruments.insert( + instrument_name.clone(), + StaticInstrument::CounterF64( + meter + .f64_counter(instrument_name.clone()) + .with_description(instrument.description.clone()) + .with_unit(Unit::new(instrument.unit.clone())) + .init(), + ), + ); + } + InstrumentType::Histogram => { + static_instruments.insert( + instrument_name.clone(), + StaticInstrument::Histogram( + meter + .f64_histogram(instrument_name.clone()) + .with_description(instrument.description.clone()) + .with_unit(Unit::new(instrument.unit.clone())) + .init(), + ), + ); + } + } + } + + static_instruments + } + pub(crate) fn new_builtin_graphql_instruments(&self) -> HashMap { let meter = metrics::meter_provider().meter(METER_NAME); let mut static_instruments = HashMap::with_capacity(self.graphql.custom.len()); @@ -2570,6 +2630,7 @@ mod tests { use http::StatusCode; use http::Uri; use multimap::MultiMap; + use router::body; use rust_embed::RustEmbed; use schemars::gen::SchemaGenerator; use serde::Deserialize; @@ -2594,6 +2655,8 @@ mod tests { use crate::plugins::telemetry::APOLLO_PRIVATE_QUERY_DEPTH; use crate::plugins::telemetry::APOLLO_PRIVATE_QUERY_HEIGHT; use crate::plugins::telemetry::APOLLO_PRIVATE_QUERY_ROOT_FIELDS; + use crate::services::http::HttpRequest; + use crate::services::http::HttpResponse; use crate::services::OperationKind; use crate::services::RouterRequest; use crate::services::RouterResponse; @@ -2703,6 +2766,19 @@ mod tests { ResponseField { typed_value: TypedValueMirror, }, + HttpRequest { + method: String, + uri: String, + #[serde(default)] + headers: HashMap, + body: Option, + }, + HttpResponse { + status: u16, + #[serde(default)] + headers: HashMap, + body: String, + }, } #[derive(Deserialize, JsonSchema)] @@ -2887,6 +2963,7 @@ mod tests { let mut router_instruments = None; let mut supergraph_instruments = None; let mut subgraph_instruments = None; + let mut connector_instruments = None; let mut cache_instruments: Option = None; let graphql_instruments: GraphQLInstruments = config .new_graphql_instruments(Arc::new( @@ -2906,7 +2983,7 @@ mod tests { .method(Method::from_str(&method).expect("method")) .uri(Uri::from_str(&uri).expect("uri")) .headers(convert_headers(headers)) - .body(body) + .body(router::body::from_bytes(body)) .build() .unwrap(); router_instruments = Some(config.new_router_instruments( @@ -3127,6 +3204,50 @@ mod tests { } } } + Event::HttpRequest { + method, + uri, + headers, + body, + } => { + let mut http_request = http::Request::builder() + .method(Method::from_str(&method).expect("method")) + .uri(Uri::from_str(&uri).expect("uri")) + .body(body.map(body::from_bytes).unwrap_or(body::empty())) + .unwrap(); + *http_request.headers_mut() = convert_http_headers(headers); + let request = HttpRequest { + http_request, + context: context.clone(), + }; + connector_instruments = Some({ + let connector_instruments = config + .new_connector_instruments(Arc::new( + config.new_builtin_connector_instruments(), + )); + connector_instruments.on_request(&request); + connector_instruments + }); + } + Event::HttpResponse { + status, + headers, + body, + } => { + let mut http_response = http::Response::builder() + .status(StatusCode::from_u16(status).expect("status")) + .body(router::body::from_bytes(body)) + .unwrap(); + *http_response.headers_mut() = convert_http_headers(headers); + let response = HttpResponse { + http_response, + context: context.clone(), + }; + connector_instruments + .take() + .expect("http request must have been made first") + .on_response(&response); + } } } } diff --git a/apollo-router/src/plugins/telemetry/config_new/logging.rs b/apollo-router/src/plugins/telemetry/config_new/logging.rs index db2dda6588..307f1bef06 100644 --- a/apollo-router/src/plugins/telemetry/config_new/logging.rs +++ b/apollo-router/src/plugins/telemetry/config_new/logging.rs @@ -17,12 +17,9 @@ use serde::de::Visitor; use serde::Deserialize; use serde::Deserializer; -use crate::configuration::ConfigurationError; use crate::plugins::telemetry::config::AttributeValue; use crate::plugins::telemetry::config::TraceIdFormat; -use crate::plugins::telemetry::config_new::experimental_when_header::HeaderLoggingCondition; use crate::plugins::telemetry::resource::ConfigResource; -use crate::services::SupergraphRequest; /// Logging configuration. #[derive(Deserialize, JsonSchema, Clone, Default, Debug)] @@ -35,44 +32,6 @@ pub(crate) struct Logging { #[serde(skip)] /// Settings for logging to a file. pub(crate) file: File, - - /// Log configuration to log request and response for subgraphs and supergraph - /// Note that this will be removed when events are implemented. - #[serde(rename = "experimental_when_header")] - pub(crate) when_header: Vec, -} - -impl Logging { - pub(crate) fn validate(&self) -> Result<(), ConfigurationError> { - let misconfiguration = self.when_header.iter().any(|cfg| match cfg { - HeaderLoggingCondition::Matching { headers, body, .. } - | HeaderLoggingCondition::Value { headers, body, .. } => !body && !headers, - }); - - if misconfiguration { - Err(ConfigurationError::InvalidConfiguration { - message: "'experimental_when_header' configuration for logging is invalid", - error: String::from( - "body and headers must not be both false because it doesn't enable any logs", - ), - }) - } else { - Ok(()) - } - } - - /// Returns if we should display the request/response headers and body given the `SupergraphRequest` - pub(crate) fn should_log(&self, req: &SupergraphRequest) -> (bool, bool) { - self.when_header - .iter() - .fold((false, false), |(log_headers, log_body), current| { - let (current_log_headers, current_log_body) = current.should_log(req); - ( - log_headers || current_log_headers, - log_body || current_log_body, - ) - }) - } } #[derive(Clone, Debug, Deserialize, JsonSchema, Default)] @@ -470,13 +429,10 @@ pub(crate) enum Rollover { #[cfg(test)] mod test { - use regex::Regex; use serde_json::json; - use crate::plugins::telemetry::config_new::experimental_when_header::HeaderLoggingCondition; use crate::plugins::telemetry::config_new::logging::Format; - use crate::plugins::telemetry::config_new::logging::Logging; - use crate::services::SupergraphRequest; + #[test] fn format_de() { let format = serde_json::from_value::(json!("text")).unwrap(); @@ -488,92 +444,4 @@ mod test { let format = serde_json::from_value::(json!({"json":{}})).unwrap(); assert_eq!(format, Format::Json(Default::default())); } - - #[test] - fn test_logging_conf_validation() { - let logging_conf = Logging { - when_header: vec![HeaderLoggingCondition::Value { - name: "test".to_string(), - value: String::new(), - headers: true, - body: false, - }], - ..Default::default() - }; - - logging_conf.validate().unwrap(); - - let logging_conf = Logging { - when_header: vec![HeaderLoggingCondition::Value { - name: "test".to_string(), - value: String::new(), - headers: false, - body: false, - }], - ..Default::default() - }; - - let validate_res = logging_conf.validate(); - assert!(validate_res.is_err()); - assert_eq!(validate_res.unwrap_err().to_string(), "'experimental_when_header' configuration for logging is invalid: body and headers must not be both false because it doesn't enable any logs"); - } - - #[test] - fn test_logging_conf_should_log() { - let logging_conf = Logging { - when_header: vec![HeaderLoggingCondition::Matching { - name: "test".to_string(), - matching: Regex::new("^foo*").unwrap(), - headers: true, - body: false, - }], - ..Default::default() - }; - let req = SupergraphRequest::fake_builder() - .header("test", "foobar") - .build() - .unwrap(); - assert_eq!(logging_conf.should_log(&req), (true, false)); - - let logging_conf = Logging { - when_header: vec![HeaderLoggingCondition::Value { - name: "test".to_string(), - value: String::from("foobar"), - headers: true, - body: false, - }], - ..Default::default() - }; - assert_eq!(logging_conf.should_log(&req), (true, false)); - - let logging_conf = Logging { - when_header: vec![ - HeaderLoggingCondition::Matching { - name: "test".to_string(), - matching: Regex::new("^foo*").unwrap(), - headers: true, - body: false, - }, - HeaderLoggingCondition::Matching { - name: "test".to_string(), - matching: Regex::new("^*bar$").unwrap(), - headers: false, - body: true, - }, - ], - ..Default::default() - }; - assert_eq!(logging_conf.should_log(&req), (true, true)); - - let logging_conf = Logging { - when_header: vec![HeaderLoggingCondition::Matching { - name: "testtest".to_string(), - matching: Regex::new("^foo*").unwrap(), - headers: true, - body: false, - }], - ..Default::default() - }; - assert_eq!(logging_conf.should_log(&req), (false, false)); - } } diff --git a/apollo-router/src/plugins/telemetry/config_new/mod.rs b/apollo-router/src/plugins/telemetry/config_new/mod.rs index d619dafcaf..8551d8bcad 100644 --- a/apollo-router/src/plugins/telemetry/config_new/mod.rs +++ b/apollo-router/src/plugins/telemetry/config_new/mod.rs @@ -20,9 +20,9 @@ pub(crate) mod conditions; pub(crate) mod cache; mod conditional; +pub(crate) mod connector; pub(crate) mod cost; pub(crate) mod events; -mod experimental_when_header; pub(crate) mod extendable; pub(crate) mod graphql; pub(crate) mod instruments; diff --git a/apollo-router/src/plugins/telemetry/config_new/snapshots/apollo_router__plugins__telemetry__config_new__events__tests__connector_events_request@logs.snap b/apollo-router/src/plugins/telemetry/config_new/snapshots/apollo_router__plugins__telemetry__config_new__events__tests__connector_events_request@logs.snap new file mode 100644 index 0000000000..0a8dbbea06 --- /dev/null +++ b/apollo-router/src/plugins/telemetry/config_new/snapshots/apollo_router__plugins__telemetry__config_new__events__tests__connector_events_request@logs.snap @@ -0,0 +1,12 @@ +--- +source: apollo-router/src/plugins/telemetry/config_new/events.rs +expression: yaml +--- +- fields: + kind: connector.request + level: INFO + message: "" +- fields: + kind: my.request.event + level: INFO + message: my request event message diff --git a/apollo-router/src/plugins/telemetry/config_new/snapshots/apollo_router__plugins__telemetry__config_new__events__tests__connector_events_response@logs.snap b/apollo-router/src/plugins/telemetry/config_new/snapshots/apollo_router__plugins__telemetry__config_new__events__tests__connector_events_response@logs.snap new file mode 100644 index 0000000000..3ff31c6764 --- /dev/null +++ b/apollo-router/src/plugins/telemetry/config_new/snapshots/apollo_router__plugins__telemetry__config_new__events__tests__connector_events_response@logs.snap @@ -0,0 +1,12 @@ +--- +source: apollo-router/src/plugins/telemetry/config_new/events.rs +expression: yaml +--- +- fields: + kind: connector.response + level: WARN + message: "" +- fields: + kind: my.response.event + level: ERROR + message: my response event message diff --git a/apollo-router/src/plugins/telemetry/config_new/snapshots/apollo_router__plugins__telemetry__config_new__events__tests__router_events@logs.snap b/apollo-router/src/plugins/telemetry/config_new/snapshots/apollo_router__plugins__telemetry__config_new__events__tests__router_events@logs.snap index d06b607b54..6bd8e9677f 100644 --- a/apollo-router/src/plugins/telemetry/config_new/snapshots/apollo_router__plugins__telemetry__config_new__events__tests__router_events@logs.snap +++ b/apollo-router/src/plugins/telemetry/config_new/snapshots/apollo_router__plugins__telemetry__config_new__events__tests__router_events@logs.snap @@ -2,26 +2,6 @@ source: apollo-router/src/plugins/telemetry/config_new/events.rs expression: yaml --- -- fields: - kind: router.request - level: INFO - message: "" - span: - http.flavor: HTTP/1.1 - http.method: GET - http.request.method: GET - http.route: "http://example.com/" - name: router - otel.kind: INTERNAL - trace_id: "" - spans: - - http.flavor: HTTP/1.1 - http.method: GET - http.request.method: GET - http.route: "http://example.com/" - name: router - otel.kind: INTERNAL - trace_id: "" - fields: kind: my.request_event level: INFO diff --git a/apollo-router/src/plugins/telemetry/config_new/spans.rs b/apollo-router/src/plugins/telemetry/config_new/spans.rs index 0abf12b797..20436480d9 100644 --- a/apollo-router/src/plugins/telemetry/config_new/spans.rs +++ b/apollo-router/src/plugins/telemetry/config_new/spans.rs @@ -2,6 +2,7 @@ use schemars::JsonSchema; use serde::Deserialize; use super::conditional::Conditional; +use super::connector::spans::ConnectorSpans; use crate::plugins::telemetry::config_new::attributes::DefaultAttributeRequirementLevel; use crate::plugins::telemetry::config_new::attributes::RouterAttributes; use crate::plugins::telemetry::config_new::attributes::SubgraphAttributes; @@ -35,6 +36,10 @@ pub(crate) struct Spans { /// Attributes to include on the subgraph span. /// Subgraph spans contain information about the subgraph request and response and therefore contain subgraph specific attributes. pub(crate) subgraph: SubgraphSpans, + + /// Attributes to include on the connector span. + /// Connector spans contain information about the connector request and response and therefore contain connector specific attributes. + pub(crate) connector: ConnectorSpans, } impl Spans { diff --git a/apollo-router/src/plugins/telemetry/consts.rs b/apollo-router/src/plugins/telemetry/consts.rs index c82d7b202b..dbf34f4532 100644 --- a/apollo-router/src/plugins/telemetry/consts.rs +++ b/apollo-router/src/plugins/telemetry/consts.rs @@ -19,6 +19,7 @@ pub(crate) const QUERY_PLANNING_SPAN_NAME: &str = "query_planning"; pub(crate) const HTTP_REQUEST_SPAN_NAME: &str = "http_request"; pub(crate) const SUBGRAPH_REQUEST_SPAN_NAME: &str = "subgraph_request"; pub(crate) const QUERY_PARSING_SPAN_NAME: &str = "parse_query"; +pub(crate) const CONNECT_SPAN_NAME: &str = "connect"; pub(crate) const BUILT_IN_SPAN_NAMES: [&str; 9] = [ REQUEST_SPAN_NAME, diff --git a/apollo-router/src/plugins/telemetry/dynamic_attribute.rs b/apollo-router/src/plugins/telemetry/dynamic_attribute.rs index d9cde3ca21..8b893f1add 100644 --- a/apollo-router/src/plugins/telemetry/dynamic_attribute.rs +++ b/apollo-router/src/plugins/telemetry/dynamic_attribute.rs @@ -10,6 +10,7 @@ use super::consts::OTEL_KIND; use super::consts::OTEL_NAME; use super::consts::OTEL_STATUS_CODE; use super::consts::OTEL_STATUS_MESSAGE; +use super::formatters::APOLLO_CONNECTOR_PREFIX; use super::formatters::APOLLO_PRIVATE_PREFIX; use super::otel::layer::str_to_span_kind; use super::otel::layer::str_to_status; @@ -69,6 +70,11 @@ impl DynAttributeLayer { pub(crate) trait SpanDynAttribute { fn set_span_dyn_attribute(&self, key: Key, value: opentelemetry::Value); fn set_span_dyn_attributes(&self, attributes: impl IntoIterator); + fn set_span_dyn_attributes_for_span_name( + &self, + span_name: &str, + attributes: impl IntoIterator, + ); } impl SpanDynAttribute for ::tracing::Span { @@ -101,7 +107,7 @@ impl SpanDynAttribute for ::tracing::Span { } } } else { - if key.as_str().starts_with(APOLLO_PRIVATE_PREFIX) { + if key.as_str().starts_with(APOLLO_PRIVATE_PREFIX) || key.as_str().starts_with(APOLLO_CONNECTOR_PREFIX) { return; } let mut extensions = s.extensions_mut(); @@ -170,7 +176,98 @@ impl SpanDynAttribute for ::tracing::Span { } } else { let mut attributes = attributes - .filter(|kv| !kv.key.as_str().starts_with(APOLLO_PRIVATE_PREFIX)) + .filter(|kv| { + !kv.key.as_str().starts_with(APOLLO_PRIVATE_PREFIX) + && !kv.key.as_str().starts_with(APOLLO_CONNECTOR_PREFIX) + }) + .peekable(); + if attributes.peek().is_none() { + return; + } + let mut extensions = s.extensions_mut(); + match extensions.get_mut::() { + Some(registered_attributes) => { + registered_attributes.extend(attributes); + } + None => { + // Can't use ::tracing::error! because it could create deadlock on extensions + eprintln!("no LogAttributes, this is a bug"); + } + } + } + } + }; + } else { + ::tracing::error!("no Registry, this is a bug"); + } + }); + } + + fn set_span_dyn_attributes_for_span_name( + &self, + span_name: &str, + attributes: impl IntoIterator, + ) { + let mut attributes = attributes.into_iter().peekable(); + if attributes.peek().is_none() { + return; + } + self.with_subscriber(move |(id, dispatch)| { + if let Some(reg) = dispatch.downcast_ref::() { + match reg.span(id) { + None => eprintln!("no spanref, this is a bug"), + Some(mut s) => { + if s.name() != span_name { + while let Some(parent_span) = s.parent() { + if parent_span.name() == span_name { + s = parent_span; + break; + } + s = parent_span; + } + } + + if s.is_sampled() { + let mut extensions = s.extensions_mut(); + match extensions.get_mut::() { + Some(otel_data) => { + if otel_data.builder.attributes.is_none() { + otel_data.builder.attributes = Some( + attributes + .inspect(|attr| { + update_otel_data( + otel_data, + &attr.key, + &attr.value, + ) + }) + .collect(), + ); + } else { + let attributes: Vec = attributes + .inspect(|attr| { + update_otel_data(otel_data, &attr.key, &attr.value) + }) + .collect(); + otel_data + .builder + .attributes + .as_mut() + .unwrap() + .extend(attributes); + } + } + None => { + // Can't use ::tracing::error! because it could create deadlock on extensions + eprintln!("no OtelData, this is a bug"); + } + } + } else { + let mut attributes = attributes + .filter(|kv| { + !kv.key.as_str().starts_with(APOLLO_PRIVATE_PREFIX) + && !kv.key.as_str().starts_with(APOLLO_CONNECTOR_PREFIX) + }) .peekable(); if attributes.peek().is_none() { return; @@ -265,7 +362,10 @@ impl EventDynAttribute for ::tracing::Span { } } else { let mut attributes = attributes - .filter(|kv| !kv.key.as_str().starts_with(APOLLO_PRIVATE_PREFIX)) + .filter(|kv| { + !kv.key.as_str().starts_with(APOLLO_PRIVATE_PREFIX) + && !kv.key.as_str().starts_with(APOLLO_CONNECTOR_PREFIX) + }) .peekable(); if attributes.peek().is_none() { return; diff --git a/apollo-router/src/plugins/telemetry/endpoint.rs b/apollo-router/src/plugins/telemetry/endpoint.rs index 0314aab904..38da42a53a 100644 --- a/apollo-router/src/plugins/telemetry/endpoint.rs +++ b/apollo-router/src/plugins/telemetry/endpoint.rs @@ -66,6 +66,58 @@ impl UriEndpoint { .expect("uri cannot be invalid as it was constructed from existing parts") }) } + + // Temp pending Otel upgrade + pub(crate) fn to_uri_0_2(&self, default_endpoint: &http_0_2::Uri) -> Option { + self.uri.as_ref().and_then(|uri| { + let mut parts = http_0_2::Uri::from_str(&uri.to_string()).ok()?.into_parts(); + if parts.scheme.is_none() { + parts.scheme = default_endpoint.scheme().cloned(); + } + + match (&parts.authority, default_endpoint.authority()) { + (None, Some(default_authority)) => { + parts.authority = Some(default_authority.clone()); + } + (Some(authority), Some(default_authority)) => { + let host = if authority.host().is_empty() { + default_authority.host() + } else { + authority.host() + }; + let port = if authority.port().is_none() { + default_authority.port() + } else { + authority.port() + }; + + if let Some(port) = port { + parts.authority = Some( + http_0_2::uri::Authority::from_str( + format!("{}:{}", host, port).as_str(), + ) + .expect("host and port must have come from a valid uri, qed"), + ) + } else { + parts.authority = Some( + http_0_2::uri::Authority::from_str(host) + .expect("host must have come from a valid uri, qed"), + ); + } + } + _ => {} + } + + if parts.path_and_query.is_none() { + parts.path_and_query = default_endpoint.path_and_query().cloned(); + } + + Some( + http_0_2::Uri::from_parts(parts) + .expect("uri cannot be invalid as it was constructed from existing parts"), + ) + }) + } } impl<'de> Deserialize<'de> for UriEndpoint { diff --git a/apollo-router/src/plugins/telemetry/fmt_layer.rs b/apollo-router/src/plugins/telemetry/fmt_layer.rs index c1909bb0ce..330453a4cf 100644 --- a/apollo-router/src/plugins/telemetry/fmt_layer.rs +++ b/apollo-router/src/plugins/telemetry/fmt_layer.rs @@ -264,8 +264,10 @@ mod tests { use std::sync::Mutex; use std::sync::MutexGuard; + use apollo_federation::sources::connect::HTTPMethod; use http::header::CONTENT_LENGTH; use http::HeaderValue; + use tests::events::RouterResponseBodyExtensionType; use tracing::error; use tracing::info; use tracing::info_span; @@ -283,7 +285,12 @@ mod tests { use crate::plugins::telemetry::config_new::logging::TextFormat; use crate::plugins::telemetry::dynamic_attribute::SpanDynAttribute; use crate::plugins::telemetry::otel; + use crate::services::connector_service::ConnectorInfo; + use crate::services::connector_service::CONNECTOR_INFO_CONTEXT_KEY; + use crate::services::http::HttpRequest; + use crate::services::http::HttpResponse; use crate::services::router; + use crate::services::router::body; use crate::services::subgraph; use crate::services::supergraph; @@ -362,7 +369,41 @@ subgraph: subgraph_response_status: code "my.custom.attribute": subgraph_response_data: "$.*" - default: "missing""#; + default: "missing" + +connector: + # Standard events + request: info + response: warn + error: error + + # Custom events + my.connector.request.event: + message: "my request event message" + level: info + on: request + attributes: + subgraph.name: true + connector_source: + connector_source: name + http_method: + connector_http_method: true + url_template: + connector_url_template: true + my.connector.response.event: + message: "my response event message" + level: error + on: response + attributes: + subgraph.name: true + connector_source: + connector_source: name + http_method: + connector_http_method: true + url_template: + connector_url_template: true + response_status: + connector_http_response_status: code"#; #[derive(Default, Clone)] struct LogBuffer(Arc>>); @@ -787,6 +828,37 @@ subgraph: .build() .expect("expecting valid response"); subgraph_events.on_response(&subgraph_resp); + + let connector_info = ConnectorInfo { + subgraph_name: "connector_subgraph".to_string(), + source_name: Some("source".to_string()), + http_method: HTTPMethod::Get.as_str().to_string(), + url_template: "/test".to_string(), + }; + let context = crate::Context::default(); + context + .insert(CONNECTOR_INFO_CONTEXT_KEY, connector_info) + .unwrap(); + let mut http_request = http::Request::builder().body(body::empty()).unwrap(); + http_request + .headers_mut() + .insert("x-log-request", HeaderValue::from_static("log")); + let http_request = HttpRequest { + http_request, + context, + }; + let connector_events = event_config.new_connector_events(); + connector_events.on_request(&http_request); + + let http_response = HttpResponse { + http_response: http::Response::builder() + .status(200) + .header("x-log-response", HeaderValue::from_static("log")) + .body(body::empty()) + .expect("expecting valid response"), + context: Default::default(), + }; + connector_events.on_response(&http_response); }, ); @@ -853,12 +925,18 @@ subgraph: .build() .unwrap(); router_events.on_request(&router_req); - + let ctx = crate::Context::new(); + ctx.extensions().with_lock(|mut ext| { + ext.insert(RouterResponseBodyExtensionType( + r#"{"data": {"data": "res"}}"#.to_string(), + )); + }); let router_resp = router::Response::fake_builder() .header("custom-header", "val1") .header(CONTENT_LENGTH, "25") .header("x-log-request", HeaderValue::from_static("log")) .data(serde_json_bytes::json!({"data": "res"})) + .context(ctx) .build() .expect("expecting valid response"); router_events.on_response(&router_resp); @@ -926,6 +1004,37 @@ subgraph: .build() .expect("expecting valid response"); subgraph_events.on_response(&subgraph_resp); + + let connector_info = ConnectorInfo { + subgraph_name: "connector_subgraph".to_string(), + source_name: Some("source".to_string()), + http_method: HTTPMethod::Get.as_str().to_string(), + url_template: "/test".to_string(), + }; + let context = crate::Context::default(); + context + .insert(CONNECTOR_INFO_CONTEXT_KEY, connector_info) + .unwrap(); + let mut http_request = http::Request::builder().body(body::empty()).unwrap(); + http_request + .headers_mut() + .insert("x-log-request", HeaderValue::from_static("log")); + let http_request = HttpRequest { + http_request, + context, + }; + let connector_events = event_config.new_connector_events(); + connector_events.on_request(&http_request); + + let http_response = HttpResponse { + http_response: http::Response::builder() + .status(200) + .header("x-log-response", HeaderValue::from_static("log")) + .body(body::empty()) + .expect("expecting valid response"), + context: Default::default(), + }; + connector_events.on_response(&http_response); }, ); diff --git a/apollo-router/src/plugins/telemetry/formatters/json.rs b/apollo-router/src/plugins/telemetry/formatters/json.rs index 7062cf6254..ad7755555b 100644 --- a/apollo-router/src/plugins/telemetry/formatters/json.rs +++ b/apollo-router/src/plugins/telemetry/formatters/json.rs @@ -20,6 +20,7 @@ use tracing_subscriber::registry::SpanRef; use super::get_trace_and_span_id; use super::EventFormatter; +use super::APOLLO_CONNECTOR_PREFIX; use super::APOLLO_PRIVATE_PREFIX; use super::EXCLUDED_ATTRIBUTES; use crate::plugins::telemetry::config::AttributeValue; @@ -128,7 +129,9 @@ where if let Some(otel_attributes) = otel_attributes { for (key, value) in otel_attributes.iter().filter(|(key, _)| { let key_name = key.as_str(); - !key_name.starts_with(APOLLO_PRIVATE_PREFIX) && !self.1.contains(&key_name) + !key_name.starts_with(APOLLO_PRIVATE_PREFIX) + && !key_name.starts_with(APOLLO_CONNECTOR_PREFIX) + && !self.1.contains(&key_name) }) { serializer.serialize_entry(key.as_str(), &value.as_str())?; } @@ -147,7 +150,9 @@ where }; for kv in custom_attributes.iter().filter(|kv| { let key_name = kv.key.as_str(); - !key_name.starts_with(APOLLO_PRIVATE_PREFIX) && !self.1.contains(&key_name) + !key_name.starts_with(APOLLO_PRIVATE_PREFIX) + && !key_name.starts_with(APOLLO_CONNECTOR_PREFIX) + && !self.1.contains(&key_name) }) { match &kv.value { Value::Bool(value) => { @@ -403,6 +408,7 @@ where .filter(|(key, _)| { let key_name = key.as_str(); !key_name.starts_with(APOLLO_PRIVATE_PREFIX) + && !key_name.starts_with(APOLLO_CONNECTOR_PREFIX) && include_attributes.contains(key_name) }) .map(|(key, val)| (key.clone(), val.clone())), @@ -427,6 +433,7 @@ where .filter(|kv| { let key_name = kv.key.as_str(); !key_name.starts_with(APOLLO_PRIVATE_PREFIX) + && !key_name.starts_with(APOLLO_CONNECTOR_PREFIX) && include_attributes.contains(key_name) }) .map(|kv| (kv.key.clone(), kv.value.clone())), diff --git a/apollo-router/src/plugins/telemetry/formatters/mod.rs b/apollo-router/src/plugins/telemetry/formatters/mod.rs index f99bc6a91c..17a4abb5c2 100644 --- a/apollo-router/src/plugins/telemetry/formatters/mod.rs +++ b/apollo-router/src/plugins/telemetry/formatters/mod.rs @@ -32,6 +32,9 @@ use crate::metrics::layer::METRIC_PREFIX_VALUE; use crate::plugins::telemetry::otel::OtelData; pub(crate) const APOLLO_PRIVATE_PREFIX: &str = "apollo_private."; +// FIXME: this is a temporary solution to avoid exposing hardcoded attributes in connector spans instead of using the custom telemetry features. +// The reason this is introduced right now is to directly avoid people relying on these attributes and then creating a breaking change in the future. +pub(crate) const APOLLO_CONNECTOR_PREFIX: &str = "apollo.connector."; // This list comes from Otel https://opentelemetry.io/docs/specs/semconv/attributes-registry/code/ and pub(crate) const EXCLUDED_ATTRIBUTES: [&str; 5] = [ "code.filepath", diff --git a/apollo-router/src/plugins/telemetry/logging/mod.rs b/apollo-router/src/plugins/telemetry/logging/mod.rs index 83cf680c9b..55777ea9ba 100644 --- a/apollo-router/src/plugins/telemetry/logging/mod.rs +++ b/apollo-router/src/plugins/telemetry/logging/mod.rs @@ -19,7 +19,7 @@ mod test { let mut response = test_harness .call_router( router::Request::fake_builder() - .body("query { foo }") + .body(router::body::from_bytes("query { foo }")) .build() .expect("expecting valid request"), |_r| async { @@ -100,41 +100,4 @@ mod test { .with_subscriber(assert_snapshot_subscriber!()) .await } - - #[tokio::test(flavor = "multi_thread")] - async fn test_when_header() { - let test_harness: PluginTestHarness = PluginTestHarness::builder() - .config(include_str!( - "testdata/experimental_when_header.router.yaml" - )) - .build() - .await; - - async { - let mut response = test_harness - .call_supergraph( - supergraph::Request::fake_builder() - .header("custom-header1", "val1") - .header("custom-header2", "val2") - .query("query { foo }") - .build() - .expect("expecting valid request"), - |_r| { - tracing::info!("response"); - supergraph::Response::fake_builder() - .header("custom-header1", "val1") - .header("custom-header2", "val2") - .data(serde_json::json!({"data": "res"})) - .build() - .expect("expecting valid response") - }, - ) - .await - .expect("expecting successful response"); - - response.next_response().await; - } - .with_subscriber(assert_snapshot_subscriber!()) - .await - } } diff --git a/apollo-router/src/plugins/telemetry/logging/snapshots/apollo_router__plugins__telemetry__logging__test__when_header@logs.snap b/apollo-router/src/plugins/telemetry/logging/snapshots/apollo_router__plugins__telemetry__logging__test__when_header@logs.snap deleted file mode 100644 index 22bcfd4c1f..0000000000 --- a/apollo-router/src/plugins/telemetry/logging/snapshots/apollo_router__plugins__telemetry__logging__test__when_header@logs.snap +++ /dev/null @@ -1,71 +0,0 @@ ---- -source: apollo-router/src/plugins/telemetry/logging/mod.rs -expression: yaml ---- -- fields: - http.request.headers: "{\"content-type\": \"application/json\", \"custom-header1\": \"val1\", \"custom-header2\": \"val2\"}" - level: INFO - message: Supergraph request headers - span: - apollo_private.field_level_instrumentation_ratio: 0.01 - apollo_private.graphql.variables: "{}" - graphql.document: "query { foo }" - name: supergraph - otel.kind: INTERNAL - spans: - - apollo_private.field_level_instrumentation_ratio: 0.01 - apollo_private.graphql.variables: "{}" - graphql.document: "query { foo }" - name: supergraph - otel.kind: INTERNAL -- fields: - http.request.body: "Request { query: Some(\"query { foo }\"), operation_name: None, variables: {}, extensions: {} }" - level: INFO - message: Supergraph request body - span: - apollo_private.field_level_instrumentation_ratio: 0.01 - apollo_private.graphql.variables: "{}" - graphql.document: "query { foo }" - name: supergraph - otel.kind: INTERNAL - spans: - - apollo_private.field_level_instrumentation_ratio: 0.01 - apollo_private.graphql.variables: "{}" - graphql.document: "query { foo }" - name: supergraph - otel.kind: INTERNAL -- fields: {} - level: INFO - message: response - span: - apollo_private.field_level_instrumentation_ratio: 0.01 - apollo_private.graphql.variables: "{}" - graphql.document: "query { foo }" - name: supergraph - otel.kind: INTERNAL - spans: - - apollo_private.field_level_instrumentation_ratio: 0.01 - apollo_private.graphql.variables: "{}" - graphql.document: "query { foo }" - name: supergraph - otel.kind: INTERNAL -- fields: - http.response.headers: "{\"custom-header1\": \"val1\", \"custom-header2\": \"val2\"}" - level: INFO - message: Supergraph response headers - span: - apollo_private.field_level_instrumentation_ratio: 0.01 - apollo_private.graphql.variables: "{}" - graphql.document: "query { foo }" - name: supergraph - otel.kind: INTERNAL - spans: - - apollo_private.field_level_instrumentation_ratio: 0.01 - apollo_private.graphql.variables: "{}" - graphql.document: "query { foo }" - name: supergraph - otel.kind: INTERNAL -- fields: - http.response.body: "Response { label: None, data: Some(Object({\"data\": String(\"res\")})), path: None, errors: [], extensions: {}, has_next: None, subscribed: None, created_at: None, incremental: [] }" - level: INFO - message: Supergraph GraphQL response diff --git a/apollo-router/src/plugins/telemetry/logging/testdata/experimental_when_header.router.yaml b/apollo-router/src/plugins/telemetry/logging/testdata/experimental_when_header.router.yaml deleted file mode 100644 index 55c0431beb..0000000000 --- a/apollo-router/src/plugins/telemetry/logging/testdata/experimental_when_header.router.yaml +++ /dev/null @@ -1,9 +0,0 @@ -telemetry: - exporters: - logging: - experimental_when_header: - - name: "custom-header1" - match: "^val.*" - headers: true - body: true - diff --git a/apollo-router/src/plugins/telemetry/metrics/apollo/mod.rs b/apollo-router/src/plugins/telemetry/metrics/apollo/mod.rs index 3eacc09620..7585c2d5cd 100644 --- a/apollo-router/src/plugins/telemetry/metrics/apollo/mod.rs +++ b/apollo-router/src/plugins/telemetry/metrics/apollo/mod.rs @@ -11,6 +11,7 @@ use opentelemetry_otlp::MetricsExporterBuilder; use opentelemetry_otlp::WithExportConfig; use sys_info::hostname; use tonic::metadata::MetadataMap; +use tonic_0_9 as tonic; use tower::BoxError; use url::Url; diff --git a/apollo-router/src/plugins/telemetry/metrics/mod.rs b/apollo-router/src/plugins/telemetry/metrics/mod.rs index d6e6797e2e..f6b521212c 100644 --- a/apollo-router/src/plugins/telemetry/metrics/mod.rs +++ b/apollo-router/src/plugins/telemetry/metrics/mod.rs @@ -1,33 +1,15 @@ -use std::collections::HashMap; - -use ::serde::Deserialize; -use access_json::JSONQuery; -use http::header::HeaderName; -use http::response::Parts; -use http::HeaderMap; use multimap::MultiMap; use opentelemetry::sdk::metrics::reader::AggregationSelector; use opentelemetry::sdk::metrics::Aggregation; use opentelemetry::sdk::metrics::InstrumentKind; use opentelemetry::sdk::Resource; -use regex::Regex; -use schemars::JsonSchema; -use serde::Serialize; use tower::BoxError; -use crate::error::FetchError; -use crate::graphql; -use crate::graphql::Request; -use crate::plugin::serde::deserialize_header_name; -use crate::plugin::serde::deserialize_json_query; -use crate::plugin::serde::deserialize_regex; use crate::plugins::telemetry::apollo_exporter::Sender; -use crate::plugins::telemetry::config::AttributeValue; use crate::plugins::telemetry::config::Conf; use crate::plugins::telemetry::config::MetricsCommon; use crate::plugins::telemetry::resource::ConfigResource; use crate::router_factory::Endpoint; -use crate::Context; use crate::ListenAddr; pub(crate) mod apollo; @@ -36,397 +18,6 @@ pub(crate) mod otlp; pub(crate) mod prometheus; pub(crate) mod span_metrics_exporter; -#[derive(Debug, Clone, Deserialize, JsonSchema, Default)] -#[serde(deny_unknown_fields, default)] -/// Configuration to add custom attributes/labels on metrics -pub(crate) struct MetricsAttributesConf { - /// Configuration to forward header values or body values from router request/response in metric attributes/labels - pub(crate) supergraph: AttributesForwardConf, - /// Configuration to forward header values or body values from subgraph request/response in metric attributes/labels - pub(crate) subgraph: SubgraphAttributesConf, -} - -/// Configuration to add custom attributes/labels on metrics to subgraphs -#[derive(Debug, Clone, Deserialize, JsonSchema, Default)] -#[serde(deny_unknown_fields, default)] -pub(crate) struct SubgraphAttributesConf { - /// Attributes for all subgraphs - pub(crate) all: AttributesForwardConf, - /// Attributes per subgraph - pub(crate) subgraphs: HashMap, -} - -/// Configuration to add custom attributes/labels on metrics to subgraphs -#[derive(Debug, Clone, Deserialize, JsonSchema, Default)] -#[serde(deny_unknown_fields, default)] -pub(crate) struct AttributesForwardConf { - /// Configuration to insert custom attributes/labels in metrics - #[serde(rename = "static")] - pub(crate) insert: Vec, - /// Configuration to forward headers or body values from the request to custom attributes/labels in metrics - pub(crate) request: Forward, - /// Configuration to forward headers or body values from the response to custom attributes/labels in metrics - pub(crate) response: Forward, - /// Configuration to forward values from the context to custom attributes/labels in metrics - pub(crate) context: Vec, - /// Configuration to forward values from the error to custom attributes/labels in metrics - pub(crate) errors: ErrorsForward, -} - -#[derive(Clone, JsonSchema, Deserialize, Debug)] -#[serde(rename_all = "snake_case", deny_unknown_fields)] -/// Configuration to insert custom attributes/labels in metrics -pub(crate) struct Insert { - /// The name of the attribute to insert - pub(crate) name: String, - /// The value of the attribute to insert - pub(crate) value: AttributeValue, -} - -/// Configuration to forward from headers/body -#[derive(Debug, Clone, Deserialize, JsonSchema, Default)] -#[serde(deny_unknown_fields, default)] -pub(crate) struct Forward { - /// Forward header values as custom attributes/labels in metrics - pub(crate) header: Vec, - /// Forward body values as custom attributes/labels in metrics - pub(crate) body: Vec, -} - -#[derive(Debug, Clone, Deserialize, JsonSchema, Default)] -#[serde(deny_unknown_fields, default)] -pub(crate) struct ErrorsForward { - /// Will include the error message in a "message" attribute - pub(crate) include_messages: Option, - /// Forward extensions values as custom attributes/labels in metrics - pub(crate) extensions: Vec, -} - -schemar_fn!( - forward_header_matching, - String, - "Using a regex on the header name" -); - -#[derive(Clone, JsonSchema, Deserialize, Debug)] -#[serde(rename_all = "snake_case", deny_unknown_fields, untagged)] -/// Configuration to forward header values in metric labels -pub(crate) enum HeaderForward { - /// Match via header name - Named { - /// The name of the header - #[schemars(with = "String")] - #[serde(deserialize_with = "deserialize_header_name")] - named: HeaderName, - /// The optional output name - rename: Option, - /// The optional default value - default: Option, - }, - - /// Match via rgex - Matching { - /// Using a regex on the header name - #[schemars(schema_with = "forward_header_matching")] - #[serde(deserialize_with = "deserialize_regex")] - matching: Regex, - }, -} - -#[derive(Clone, JsonSchema, Deserialize, Debug)] -#[serde(deny_unknown_fields)] -/// Configuration to forward body values in metric attributes/labels -pub(crate) struct BodyForward { - /// The path in the body - #[schemars(with = "String")] - #[serde(deserialize_with = "deserialize_json_query")] - pub(crate) path: JSONQuery, - /// The name of the attribute - pub(crate) name: String, - /// The optional default value - pub(crate) default: Option, -} - -#[derive(Debug, Clone, Deserialize, JsonSchema)] -#[serde(deny_unknown_fields)] -/// Configuration to forward context values in metric attributes/labels -pub(crate) struct ContextForward { - /// The name of the value in the context - pub(crate) named: String, - /// The optional output name - pub(crate) rename: Option, - /// The optional default value - pub(crate) default: Option, -} - -impl HeaderForward { - pub(crate) fn get_attributes_from_headers( - &self, - headers: &HeaderMap, - ) -> HashMap { - let mut attributes = HashMap::new(); - match self { - HeaderForward::Named { - named, - rename, - default, - } => { - let value = headers.get(named); - if let Some(value) = value - .and_then(|v| { - v.to_str() - .ok() - .map(|v| AttributeValue::String(v.to_string())) - }) - .or_else(|| default.clone()) - { - attributes.insert(rename.clone().unwrap_or_else(|| named.to_string()), value); - } - } - HeaderForward::Matching { matching } => { - headers - .iter() - .filter(|(name, _)| matching.is_match(name.as_str())) - .for_each(|(name, value)| { - if let Ok(value) = value.to_str() { - attributes.insert( - name.to_string(), - AttributeValue::String(value.to_string()), - ); - } - }); - } - } - - attributes - } -} - -impl Forward { - pub(crate) fn merge(&mut self, to_merge: Self) { - self.body.extend(to_merge.body); - self.header.extend(to_merge.header); - } -} - -impl ErrorsForward { - pub(crate) fn merge(&mut self, to_merge: Self) { - self.extensions.extend(to_merge.extensions); - self.include_messages = to_merge.include_messages.or(self.include_messages); - } - - pub(crate) fn get_attributes_from_error( - &self, - err: &BoxError, - ) -> HashMap { - let mut attributes = HashMap::new(); - if let Some(fetch_error) = err - .source() - .and_then(|e| e.downcast_ref::()) - .or_else(|| err.downcast_ref::()) - { - let gql_error = fetch_error.to_graphql_error(None); - // Include error message - if self.include_messages.unwrap_or_default() { - attributes.insert( - "message".to_string(), - AttributeValue::String(gql_error.message), - ); - } - // Extract data from extensions - for ext_fw in &self.extensions { - let output = ext_fw.path.execute(&gql_error.extensions).unwrap(); - if let Some(val) = output { - if let Ok(val) = AttributeValue::try_from(val) { - attributes.insert(ext_fw.name.clone(), val); - } - } else if let Some(default_val) = &ext_fw.default { - attributes.insert(ext_fw.name.clone(), default_val.clone()); - } - } - } else if self.include_messages.unwrap_or_default() { - attributes.insert( - "message".to_string(), - AttributeValue::String(err.to_string()), - ); - } - - attributes - } -} - -impl AttributesForwardConf { - pub(crate) fn get_attributes_from_router_response( - &self, - parts: &Parts, - context: &Context, - first_response: &Option, - ) -> HashMap { - let mut attributes = HashMap::new(); - - // Fill from static - for Insert { name, value } in &self.insert { - attributes.insert(name.clone(), value.clone()); - } - // Fill from context - for ContextForward { - named, - default, - rename, - } in &self.context - { - match context.get::<_, AttributeValue>(named) { - Ok(Some(value)) => { - attributes.insert(rename.as_ref().unwrap_or(named).clone(), value); - } - _ => { - if let Some(default_val) = default { - attributes.insert( - rename.as_ref().unwrap_or(named).clone(), - default_val.clone(), - ); - } - } - }; - } - - // Fill from response - attributes.extend( - self.response - .header - .iter() - .fold(HashMap::new(), |mut acc, current| { - acc.extend(current.get_attributes_from_headers(&parts.headers)); - acc - }), - ); - - if let Some(body) = &first_response { - for body_fw in &self.response.body { - let output = body_fw.path.execute(body).unwrap(); - if let Some(val) = output { - if let Ok(val) = AttributeValue::try_from(val) { - attributes.insert(body_fw.name.clone(), val); - } - } else if let Some(default_val) = &body_fw.default { - attributes.insert(body_fw.name.clone(), default_val.clone()); - } - } - } - - attributes - } - - /// Get attributes from context - pub(crate) fn get_attributes_from_context( - &self, - context: &Context, - ) -> HashMap { - let mut attributes = HashMap::new(); - - for ContextForward { - named, - default, - rename, - } in &self.context - { - match context.get::<_, AttributeValue>(named) { - Ok(Some(value)) => { - attributes.insert(rename.as_ref().unwrap_or(named).clone(), value); - } - _ => { - if let Some(default_val) = default { - attributes.insert( - rename.as_ref().unwrap_or(named).clone(), - default_val.clone(), - ); - } - } - }; - } - - attributes - } - - pub(crate) fn get_attributes_from_response( - &self, - headers: &HeaderMap, - body: &T, - ) -> HashMap { - let mut attributes = HashMap::new(); - - // Fill from static - for Insert { name, value } in &self.insert { - attributes.insert(name.clone(), value.clone()); - } - - // Fill from response - attributes.extend( - self.response - .header - .iter() - .fold(HashMap::new(), |mut acc, current| { - acc.extend(current.get_attributes_from_headers(headers)); - acc - }), - ); - for body_fw in &self.response.body { - let output = body_fw.path.execute(body).unwrap(); - if let Some(val) = output { - if let Ok(val) = AttributeValue::try_from(val) { - attributes.insert(body_fw.name.clone(), val); - } - } else if let Some(default_val) = &body_fw.default { - attributes.insert(body_fw.name.clone(), default_val.clone()); - } - } - - attributes - } - - pub(crate) fn get_attributes_from_request( - &self, - headers: &HeaderMap, - body: &Request, - ) -> HashMap { - let mut attributes = HashMap::new(); - - // Fill from static - for Insert { name, value } in &self.insert { - attributes.insert(name.clone(), value.clone()); - } - // Fill from response - attributes.extend( - self.request - .header - .iter() - .fold(HashMap::new(), |mut acc, current| { - acc.extend(current.get_attributes_from_headers(headers)); - acc - }), - ); - for body_fw in &self.request.body { - let output = body_fw.path.execute(body).ok().flatten(); - if let Some(val) = output { - if let Ok(val) = AttributeValue::try_from(val) { - attributes.insert(body_fw.name.clone(), val); - } - } else if let Some(default_val) = &body_fw.default { - attributes.insert(body_fw.name.clone(), default_val.clone()); - } - } - - attributes - } - - pub(crate) fn get_attributes_from_error( - &self, - err: &BoxError, - ) -> HashMap { - self.errors.get_attributes_from_error(err) - } -} - pub(crate) struct MetricsBuilder { pub(crate) public_meter_provider_builder: opentelemetry::sdk::metrics::MeterProviderBuilder, pub(crate) apollo_meter_provider_builder: opentelemetry::sdk::metrics::MeterProviderBuilder, diff --git a/apollo-router/src/plugins/telemetry/metrics/prometheus.rs b/apollo-router/src/plugins/telemetry/metrics/prometheus.rs index f14e9d637c..0e8efc4c33 100644 --- a/apollo-router/src/plugins/telemetry/metrics/prometheus.rs +++ b/apollo-router/src/plugins/telemetry/metrics/prometheus.rs @@ -24,7 +24,6 @@ use crate::plugins::telemetry::metrics::MetricsBuilder; use crate::plugins::telemetry::metrics::MetricsConfigurator; use crate::router_factory::Endpoint; use crate::services::router; -use crate::services::router::Body; use crate::ListenAddr; /// Prometheus configuration @@ -199,7 +198,7 @@ impl Service for PrometheusService { response: http::Response::builder() .status(StatusCode::OK) .header(http::header::CONTENT_TYPE, "text/plain; version=0.0.4") - .body::(modified_stats.into()) + .body(router::body::from_bytes(modified_stats)) .map_err(BoxError::from)?, context: req.context, }) diff --git a/apollo-router/src/plugins/telemetry/mod.rs b/apollo-router/src/plugins/telemetry/mod.rs index 54622d43d2..f24fc0f915 100644 --- a/apollo-router/src/plugins/telemetry/mod.rs +++ b/apollo-router/src/plugins/telemetry/mod.rs @@ -8,8 +8,9 @@ use std::time::Instant; use ::tracing::info_span; use ::tracing::Span; -use axum::headers::HeaderName; +use axum_extra::headers::HeaderName; use config_new::cache::CacheInstruments; +use config_new::connector::instruments::ConnectorInstruments; use config_new::instruments::InstrumentsConfig; use config_new::instruments::StaticInstrument; use config_new::Selectors; @@ -73,7 +74,6 @@ use self::config_new::instruments::RouterInstruments; use self::config_new::instruments::SubgraphInstruments; use self::config_new::spans::Spans; use self::metrics::apollo::studio::SingleTypeStat; -use self::metrics::AttributesForwardConf; use self::reload::reload_fmt; pub(crate) use self::span_factory::SpanMode; use self::tracing::apollo_telemetry::APOLLO_PRIVATE_DURATION_NS; @@ -99,11 +99,13 @@ use crate::plugins::telemetry::apollo_exporter::proto::reports::StatsContext; use crate::plugins::telemetry::config::AttributeValue; use crate::plugins::telemetry::config::MetricsCommon; use crate::plugins::telemetry::config::TracingCommon; +use crate::plugins::telemetry::config_new::connector::events::ConnectorEvents; use crate::plugins::telemetry::config_new::cost::add_cost_attributes; use crate::plugins::telemetry::config_new::graphql::GraphQLInstruments; use crate::plugins::telemetry::config_new::instruments::SupergraphInstruments; use crate::plugins::telemetry::config_new::trace_id; use crate::plugins::telemetry::config_new::DatadogId; +use crate::plugins::telemetry::consts::CONNECT_SPAN_NAME; use crate::plugins::telemetry::consts::EXECUTION_SPAN_NAME; use crate::plugins::telemetry::consts::OTEL_NAME; use crate::plugins::telemetry::consts::OTEL_STATUS_CODE; @@ -132,11 +134,11 @@ use crate::plugins::telemetry::tracing::TracingConfigurator; use crate::query_planner::OperationKind; use crate::register_private_plugin; use crate::router_factory::Endpoint; +use crate::services::connector_service::CONNECTOR_INFO_CONTEXT_KEY; use crate::services::execution; +use crate::services::http::HttpRequest; use crate::services::router; use crate::services::subgraph; -use crate::services::subgraph::Request; -use crate::services::subgraph::Response; use crate::services::supergraph; use crate::services::ExecutionRequest; use crate::services::SubgraphRequest; @@ -173,8 +175,6 @@ pub(crate) const CLIENT_NAME: &str = "apollo_telemetry::client_name"; const CLIENT_VERSION: &str = "apollo_telemetry::client_version"; const SUBGRAPH_FTV1: &str = "apollo_telemetry::subgraph_ftv1"; pub(crate) const STUDIO_EXCLUDE: &str = "apollo_telemetry::studio::exclude"; -pub(crate) const LOGGING_DISPLAY_HEADERS: &str = "apollo_telemetry::logging::display_headers"; -pub(crate) const LOGGING_DISPLAY_BODY: &str = "apollo_telemetry::logging::display_body"; pub(crate) const SUPERGRAPH_SCHEMA_ID_CONTEXT_KEY: &str = "apollo::supergraph_schema_id"; const GLOBAL_TRACER_NAME: &str = "apollo-router"; const DEFAULT_EXPOSE_TRACE_ID_HEADER: &str = "apollo-trace-id"; @@ -203,6 +203,7 @@ pub(crate) struct Telemetry { router_custom_instruments: RwLock>>, supergraph_custom_instruments: RwLock>>, subgraph_custom_instruments: RwLock>>, + connector_custom_instruments: RwLock>>, cache_custom_instruments: RwLock>>, activation: Mutex, } @@ -262,6 +263,7 @@ struct BuiltinInstruments { router_custom_instruments: Arc>, supergraph_custom_instruments: Arc>, subgraph_custom_instruments: Arc>, + connector_custom_instruments: Arc>, cache_custom_instruments: Arc>, } @@ -271,6 +273,7 @@ fn create_builtin_instruments(config: &InstrumentsConfig) -> BuiltinInstruments router_custom_instruments: Arc::new(config.new_builtin_router_instruments()), supergraph_custom_instruments: Arc::new(config.new_builtin_supergraph_instruments()), subgraph_custom_instruments: Arc::new(config.new_builtin_subgraph_instruments()), + connector_custom_instruments: Arc::new(config.new_builtin_connector_instruments()), cache_custom_instruments: Arc::new(config.new_builtin_cache_instruments()), } } @@ -286,7 +289,6 @@ impl PluginPrivate for Telemetry { let mut config = init.config; config.instrumentation.spans.update_defaults(); config.instrumentation.instruments.update_defaults(); - config.exporters.logging.validate()?; if let Err(err) = config.instrumentation.validate() { ::tracing::warn!("Potential configuration error for 'instrumentation': {err}, please check the documentation on https://www.apollographql.com/docs/router/configuration/telemetry/instrumentation/events"); } @@ -305,6 +307,7 @@ impl PluginPrivate for Telemetry { router_custom_instruments, supergraph_custom_instruments, subgraph_custom_instruments, + connector_custom_instruments, cache_custom_instruments, } = create_builtin_instruments(&config.instrumentation.instruments); @@ -330,6 +333,7 @@ impl PluginPrivate for Telemetry { router_custom_instruments: RwLock::new(router_custom_instruments), supergraph_custom_instruments: RwLock::new(supergraph_custom_instruments), subgraph_custom_instruments: RwLock::new(subgraph_custom_instruments), + connector_custom_instruments: RwLock::new(connector_custom_instruments), cache_custom_instruments: RwLock::new(cache_custom_instruments), config: Arc::new(config), }) @@ -576,14 +580,20 @@ impl PluginPrivate for Telemetry { let static_supergraph_instruments = self.supergraph_custom_instruments.read().clone(); let static_graphql_instruments = self.graphql_custom_instruments.read().clone(); ServiceBuilder::new() - .instrument(move |supergraph_req: &SupergraphRequest| span_mode.create_supergraph( - &config_instrument.apollo, - supergraph_req, - field_level_instrumentation_ratio, - )) + .instrument(move |supergraph_req: &SupergraphRequest| { + span_mode.create_supergraph( + &config_instrument.apollo, + supergraph_req, + field_level_instrumentation_ratio, + ) + }) .map_response(move |mut resp: SupergraphResponse| { let config = config_map_res_first.clone(); - if let Some(usage_reporting) = resp.context.extensions().with_lock(|lock| lock.get::>().cloned()) { + if let Some(usage_reporting) = resp + .context + .extensions() + .with_lock(|lock| lock.get::>().cloned()) + { // Record the operation signature on the router span Span::current().record( APOLLO_PRIVATE_OPERATION_SIGNATURE.as_str(), @@ -591,52 +601,49 @@ impl PluginPrivate for Telemetry { ); } // To expose trace_id or not - let expose_trace_id_header = config.exporters.tracing.response_trace_id.enabled.then(|| { - config.exporters.tracing.response_trace_id - .header_name - .clone() - .unwrap_or_else(|| DEFAULT_EXPOSE_TRACE_ID_HEADER_NAME.clone()) - }); + let expose_trace_id_header = + config.exporters.tracing.response_trace_id.enabled.then(|| { + config + .exporters + .tracing + .response_trace_id + .header_name + .clone() + .unwrap_or_else(|| DEFAULT_EXPOSE_TRACE_ID_HEADER_NAME.clone()) + }); // Append the trace ID with the right format, based on the config let format_id = |trace_id: TraceId| { let id = match config.exporters.tracing.response_trace_id.format { - TraceIdFormat::Hexadecimal | TraceIdFormat::OpenTelemetry => format!("{:032x}", trace_id), - TraceIdFormat::Decimal => format!("{}", u128::from_be_bytes(trace_id.to_bytes())), + TraceIdFormat::Hexadecimal | TraceIdFormat::OpenTelemetry => { + format!("{:032x}", trace_id) + } + TraceIdFormat::Decimal => { + format!("{}", u128::from_be_bytes(trace_id.to_bytes())) + } TraceIdFormat::Datadog => trace_id.to_datadog(), TraceIdFormat::Uuid => Uuid::from_bytes(trace_id.to_bytes()).to_string(), }; HeaderValue::from_str(&id).ok() }; - if let (Some(header_name), Some(trace_id)) = ( - expose_trace_id_header, - trace_id().and_then(format_id), - ) { + if let (Some(header_name), Some(trace_id)) = + (expose_trace_id_header, trace_id().and_then(format_id)) + { resp.response.headers_mut().append(header_name, trace_id); } - if resp.context.contains_key(LOGGING_DISPLAY_HEADERS) { - let sorted_headers = resp - .response - .headers() - .iter() - .map(|(k, v)| (k.as_str(), v)) - .collect::>(); - ::tracing::info!(http.response.headers = ?sorted_headers, "Supergraph response headers"); - } - let display_body = resp.context.contains_key(LOGGING_DISPLAY_BODY); - resp.map_stream(move |gql_response| { - if display_body { - ::tracing::info!(http.response.body = ?gql_response, "Supergraph GraphQL response"); - } - gql_response - }) + resp }) .map_future_with_request_data( move |req: &SupergraphRequest| { - let custom_attributes = config.instrumentation.spans.supergraph.attributes.on_request(req); - Self::populate_context(config.clone(), field_level_instrumentation_ratio, req); + let custom_attributes = config + .instrumentation + .spans + .supergraph + .attributes + .on_request(req); + Self::populate_context(field_level_instrumentation_ratio, req); let custom_instruments = config .instrumentation .instruments @@ -644,15 +651,35 @@ impl PluginPrivate for Telemetry { custom_instruments.on_request(req); let custom_graphql_instruments: GraphQLInstruments = config .instrumentation - .instruments.new_graphql_instruments(static_graphql_instruments.clone()); + .instruments + .new_graphql_instruments(static_graphql_instruments.clone()); custom_graphql_instruments.on_request(req); let supergraph_events = config.instrumentation.events.new_supergraph_events(); supergraph_events.on_request(req); - (req.context.clone(), custom_instruments, custom_attributes, supergraph_events, custom_graphql_instruments) + ( + req.context.clone(), + custom_instruments, + custom_attributes, + supergraph_events, + custom_graphql_instruments, + ) }, - move |(ctx, custom_instruments, mut custom_attributes, supergraph_events, custom_graphql_instruments): (Context, SupergraphInstruments, Vec, SupergraphEvents, GraphQLInstruments), fut| { + move |( + ctx, + custom_instruments, + mut custom_attributes, + supergraph_events, + custom_graphql_instruments, + ): ( + Context, + SupergraphInstruments, + Vec, + SupergraphEvents, + GraphQLInstruments, + ), + fut| { let config = config_map_res.clone(); let sender = metrics_sender.clone(); let start = Instant::now(); @@ -665,30 +692,48 @@ impl PluginPrivate for Telemetry { span.set_span_dyn_attributes(custom_attributes); match &result { Ok(resp) => { - span.set_span_dyn_attributes(config.instrumentation.spans.supergraph.attributes.on_response(resp)); + span.set_span_dyn_attributes( + config + .instrumentation + .spans + .supergraph + .attributes + .on_response(resp), + ); custom_instruments.on_response(resp); supergraph_events.on_response(resp); custom_graphql_instruments.on_response(resp); - }, + } Err(err) => { - span.set_span_dyn_attributes(config.instrumentation.spans.supergraph.attributes.on_error(err, &ctx)); + span.set_span_dyn_attributes( + config + .instrumentation + .spans + .supergraph + .attributes + .on_error(err, &ctx), + ); custom_instruments.on_error(err, &ctx); supergraph_events.on_error(err, &ctx); custom_graphql_instruments.on_error(err, &ctx); - }, + } } result = Self::update_otel_metrics( config.clone(), ctx.clone(), result, - start.elapsed(), custom_instruments, supergraph_events, custom_graphql_instruments, ) .await; Self::update_metrics_on_response_events( - &ctx, config, field_level_instrumentation_ratio, sender, start, result, + &ctx, + config, + field_level_instrumentation_ratio, + sender, + start, + result, ) } }, @@ -725,9 +770,6 @@ impl PluginPrivate for Telemetry { let config = self.config.clone(); let span_mode = self.config.instrumentation.spans.mode; let conf = self.config.clone(); - let subgraph_attribute = KeyValue::new("subgraph", name.to_string()); - let subgraph_metrics_conf_req = self.create_subgraph_metrics_conf(name); - let subgraph_metrics_conf_resp = subgraph_metrics_conf_req.clone(); let subgraph_name = ByteString::from(name); let name = name.to_owned(); let static_subgraph_instruments = self.subgraph_custom_instruments.read().clone(); @@ -738,11 +780,6 @@ impl PluginPrivate for Telemetry { .map_response(move |resp| store_ftv1(&subgraph_name, resp)) .map_future_with_request_data( move |sub_request: &SubgraphRequest| { - Self::store_subgraph_request_attributes( - subgraph_metrics_conf_req.as_ref(), - sub_request, - ); - let custom_attributes = config .instrumentation .spans @@ -785,11 +822,7 @@ impl PluginPrivate for Telemetry { CacheInstruments, ), f: BoxFuture<'static, Result>| { - let subgraph_attribute = subgraph_attribute.clone(); - let subgraph_metrics_conf = subgraph_metrics_conf_resp.clone(); let conf = conf.clone(); - // Using Instant because it is guaranteed to be monotonically increasing. - let now = Instant::now(); async move { let span = Span::current(); span.set_span_dyn_attributes(custom_attributes); @@ -829,13 +862,6 @@ impl PluginPrivate for Telemetry { } } - Self::store_subgraph_response_attributes( - &context, - subgraph_attribute, - subgraph_metrics_conf.as_ref(), - now, - &result, - ); result } }, @@ -844,6 +870,100 @@ impl PluginPrivate for Telemetry { .boxed() } + fn http_client_service( + &self, + _subgraph_name: &str, + service: crate::services::http::BoxService, + ) -> crate::services::http::BoxService { + let req_fn_config = self.config.clone(); + let res_fn_config = self.config.clone(); + let static_connector_instruments = self.connector_custom_instruments.read().clone(); + ServiceBuilder::new() + .map_future_with_request_data( + move |http_request: &HttpRequest| { + if http_request + .context + .contains_key(CONNECTOR_INFO_CONTEXT_KEY) + { + let custom_instruments = req_fn_config + .instrumentation + .instruments + .new_connector_instruments(static_connector_instruments.clone()); + custom_instruments.on_request(http_request); + let custom_events = + req_fn_config.instrumentation.events.new_connector_events(); + custom_events.on_request(http_request); + + let custom_span_attributes = req_fn_config + .instrumentation + .spans + .connector + .attributes + .on_request(http_request); + + ( + http_request.context.clone(), + Some((custom_instruments, custom_events, custom_span_attributes)), + ) + } else { + (http_request.context.clone(), None) + } + }, + move |(context, custom_telemetry): ( + Context, + Option<(ConnectorInstruments, ConnectorEvents, Vec)>, + ), + f: BoxFuture< + 'static, + Result, + >| { + let conf = res_fn_config.clone(); + async move { + match custom_telemetry { + Some((custom_instruments, custom_events, custom_span_attributes)) => { + let span = Span::current(); + span.set_span_dyn_attributes_for_span_name( + CONNECT_SPAN_NAME, + custom_span_attributes, + ); + let result = f.await; + match &result { + Ok(http_response) => { + span.set_span_dyn_attributes_for_span_name( + CONNECT_SPAN_NAME, + conf.instrumentation + .spans + .connector + .attributes + .on_response(http_response), + ); + custom_instruments.on_response(http_response); + custom_events.on_response(http_response); + } + Err(err) => { + span.set_span_dyn_attributes_for_span_name( + CONNECT_SPAN_NAME, + conf.instrumentation + .spans + .connector + .attributes + .on_error(err, &context), + ); + custom_instruments.on_error(err, &context); + custom_events.on_error(err, &context); + } + } + result + } + _ => f.await, + } + } + }, + ) + .service(service) + .boxed() + } + fn web_endpoints(&self) -> MultiMap { self.custom_endpoints.clone() } @@ -889,6 +1009,7 @@ impl PluginPrivate for Telemetry { router_custom_instruments, supergraph_custom_instruments, subgraph_custom_instruments, + connector_custom_instruments, cache_custom_instruments, } = create_builtin_instruments(&self.config.instrumentation.instruments); @@ -896,6 +1017,7 @@ impl PluginPrivate for Telemetry { *self.router_custom_instruments.write() = router_custom_instruments; *self.supergraph_custom_instruments.write() = supergraph_custom_instruments; *self.subgraph_custom_instruments.write() = subgraph_custom_instruments; + *self.connector_custom_instruments.write() = connector_custom_instruments; *self.cache_custom_instruments.write() = cache_custom_instruments; reload_fmt(create_fmt_layer(&self.config)); @@ -1019,29 +1141,12 @@ impl Telemetry { config: Arc, context: Context, result: Result, - request_duration: Duration, custom_instruments: SupergraphInstruments, custom_events: SupergraphEvents, custom_graphql_instruments: GraphQLInstruments, ) -> Result { - let mut metric_attrs = context - .extensions() - .with_lock(|lock| lock.get::().cloned()) - .map(|attrs| { - attrs - .0 - .into_iter() - .map(|(attr_name, attr_value)| KeyValue::new(attr_name, attr_value)) - .collect::>() - }) - .unwrap_or_default(); let res = match result { Ok(response) => { - metric_attrs.push(KeyValue::new( - "status", - response.response.status().as_u16().to_string(), - )); - let ctx = context.clone(); // Wait for the first response of the stream let (parts, stream) = response.response.into_parts(); @@ -1068,19 +1173,6 @@ impl Telemetry { }); let (first_response, rest) = stream.into_future().await; - let attributes = config - .exporters - .metrics - .common - .attributes - .supergraph - .get_attributes_from_router_response(&parts, &context, &first_response); - - metric_attrs.extend(attributes.into_iter().map(|(k, v)| KeyValue::new(k, v))); - - if !parts.status.is_success() { - metric_attrs.push(KeyValue::new("error", parts.status.to_string())); - } let response = http::Response::from_parts( parts, once(ready(first_response.unwrap_or_default())) @@ -1090,55 +1182,14 @@ impl Telemetry { Ok(SupergraphResponse { context, response }) } - Err(err) => { - metric_attrs.push(KeyValue::new("status", "500")); - Err(err) - } + Err(err) => Err(err), }; - // http_requests_total - the total number of HTTP requests received - u64_counter!( - "apollo_router_http_requests_total", - "Total number of HTTP requests made.", - 1, - metric_attrs - ); - - f64_histogram!( - "apollo_router_http_request_duration_seconds", - "Duration of HTTP requests.", - request_duration.as_secs_f64(), - metric_attrs - ); res } - fn populate_context( - config: Arc, - field_level_instrumentation_ratio: f64, - req: &SupergraphRequest, - ) { + fn populate_context(field_level_instrumentation_ratio: f64, req: &SupergraphRequest) { let context = &req.context; - let http_request = &req.supergraph_request; - let headers = http_request.headers(); - - let (should_log_headers, should_log_body) = config.exporters.logging.should_log(req); - if should_log_headers { - let sorted_headers = req - .supergraph_request - .headers() - .iter() - .map(|(k, v)| (k.as_str(), v)) - .collect::>(); - ::tracing::info!(http.request.headers = ?sorted_headers, "Supergraph request headers"); - - let _ = req.context.insert(LOGGING_DISPLAY_HEADERS, true); - } - if should_log_body { - ::tracing::info!(http.request.body = ?req.supergraph_request.body(), "Supergraph request body"); - - let _ = req.context.insert(LOGGING_DISPLAY_BODY, true); - } // List of custom attributes for metrics let mut attributes: HashMap = HashMap::new(); @@ -1149,16 +1200,6 @@ impl Telemetry { ); } - let router_attributes_conf = &config.exporters.metrics.common.attributes.supergraph; - attributes.extend( - router_attributes_conf - .get_attributes_from_request(headers, req.supergraph_request.body()), - ); - attributes.extend(router_attributes_conf.get_attributes_from_context(context)); - - let _ = context - .extensions() - .with_lock(|mut lock| lock.insert(MetricsAttributes(attributes))); if rand::thread_rng().gen_bool(field_level_instrumentation_ratio) { context .extensions() @@ -1166,143 +1207,6 @@ impl Telemetry { } } - fn create_subgraph_metrics_conf(&self, name: &str) -> Arc { - let subgraph_cfg = &self.config.exporters.metrics.common.attributes.subgraph; - macro_rules! extend_config { - ($forward_kind: ident) => {{ - let mut cfg = subgraph_cfg.all.$forward_kind.clone(); - cfg.extend( - subgraph_cfg - .subgraphs - .get(&name.to_owned()) - .map(|s| s.$forward_kind.clone()) - .unwrap_or_default(), - ); - - cfg - }}; - } - macro_rules! merge_config { - ($forward_kind: ident) => {{ - let mut cfg = subgraph_cfg.all.$forward_kind.clone(); - cfg.merge( - subgraph_cfg - .subgraphs - .get(&name.to_owned()) - .map(|s| s.$forward_kind.clone()) - .unwrap_or_default(), - ); - - cfg - }}; - } - - Arc::new(AttributesForwardConf { - insert: extend_config!(insert), - request: merge_config!(request), - response: merge_config!(response), - errors: merge_config!(errors), - context: extend_config!(context), - }) - } - - fn store_subgraph_request_attributes( - attribute_forward_config: &AttributesForwardConf, - sub_request: &Request, - ) { - let mut attributes = HashMap::new(); - attributes.extend(attribute_forward_config.get_attributes_from_request( - sub_request.subgraph_request.headers(), - sub_request.subgraph_request.body(), - )); - attributes - .extend(attribute_forward_config.get_attributes_from_context(&sub_request.context)); - sub_request - .context - .extensions() - .with_lock(|mut lock| lock.insert(SubgraphMetricsAttributes(attributes))); - //.unwrap(); - } - - #[allow(clippy::too_many_arguments)] - fn store_subgraph_response_attributes( - context: &Context, - subgraph_attribute: KeyValue, - attribute_forward_config: &AttributesForwardConf, - now: Instant, - result: &Result, - ) { - let mut metric_attrs = context - .extensions() - .with_lock(|lock| lock.get::().cloned()) - .map(|attrs| { - attrs - .0 - .into_iter() - .map(|(attr_name, attr_value)| KeyValue::new(attr_name, attr_value)) - .collect::>() - }) - .unwrap_or_default(); - metric_attrs.push(subgraph_attribute); - // Fill attributes from context - metric_attrs.extend( - attribute_forward_config - .get_attributes_from_context(context) - .into_iter() - .map(|(k, v)| KeyValue::new(k, v)), - ); - - match &result { - Ok(response) => { - metric_attrs.push(KeyValue::new( - "status", - response.response.status().as_u16().to_string(), - )); - - // Fill attributes from response - metric_attrs.extend( - attribute_forward_config - .get_attributes_from_response( - response.response.headers(), - response.response.body(), - ) - .into_iter() - .map(|(k, v)| KeyValue::new(k, v)), - ); - - u64_counter!( - "apollo_router_http_requests_total", - "Total number of HTTP requests made.", - 1, - metric_attrs - ); - } - Err(err) => { - metric_attrs.push(KeyValue::new("status", "500")); - // Fill attributes from error - metric_attrs.extend( - attribute_forward_config - .get_attributes_from_error(err) - .into_iter() - .map(|(k, v)| KeyValue::new(k, v)), - ); - - u64_counter!( - "apollo_router_http_requests_total", - "Total number of HTTP requests made.", - 1, - metric_attrs - ); - } - } - f64_histogram!( - "apollo_router_http_request_duration_seconds", - "Duration of HTTP requests.", - now.elapsed().as_secs_f64(), - metric_attrs - ); - } - #[allow(clippy::too_many_arguments)] fn update_metrics_on_response_events( ctx: &Context, @@ -1331,27 +1235,6 @@ impl Telemetry { Default::default(), ); } - let mut metric_attrs = Vec::new(); - // Fill attributes from error - - metric_attrs.extend( - config - .exporters - .metrics - .common - .attributes - .supergraph - .get_attributes_from_error(&e) - .into_iter() - .map(|(k, v)| KeyValue::new(k, v)), - ); - - u64_counter!( - "apollo_router_http_requests_total", - "Total number of HTTP requests made.", - 1, - metric_attrs - ); Err(e) } @@ -2101,12 +1984,6 @@ pub(crate) fn add_query_attributes(context: &Context, custom_attributes: &mut Ve }); } -#[derive(Clone)] -struct MetricsAttributes(HashMap); - -#[derive(Clone)] -struct SubgraphMetricsAttributes(HashMap); - struct EnableSubgraphFtv1; // // Please ensure that any tests added to the tests module use the tokio multi-threaded test executor. @@ -2122,7 +1999,7 @@ mod tests { use std::sync::Mutex; use std::time::Duration; - use axum::headers::HeaderName; + use axum_extra::headers::HeaderName; use dashmap::DashMap; use http::header::CONTENT_TYPE; use http::HeaderMap; @@ -2176,7 +2053,7 @@ mod tests { use crate::plugins::telemetry::config::TraceIdFormat; use crate::plugins::telemetry::handle_error_internal; use crate::plugins::telemetry::EnableSubgraphFtv1; - use crate::services::router::body::get_body_bytes; + use crate::services::router; use crate::services::RouterRequest; use crate::services::RouterResponse; use crate::services::SubgraphRequest; @@ -2224,11 +2101,11 @@ mod tests { .into_router(); let http_req_prom = http::Request::get("http://localhost:9090/metrics") - .body(Default::default()) + .body(axum::body::Body::empty()) .unwrap(); let mut resp = web_endpoint.oneshot(http_req_prom).await.unwrap(); assert_eq!(resp.status(), StatusCode::OK); - let body = get_body_bytes(resp.body_mut()).await.unwrap(); + let body = router::body::into_bytes(resp.body_mut()).await.unwrap(); String::from_utf8_lossy(&body) .split('\n') .filter(|l| l.contains("bucket") && !l.contains("apollo_router_span_count")) @@ -2290,13 +2167,12 @@ mod tests { make_supergraph_request(plugin.as_ref()).await; assert_counter!( - "apollo_router_http_requests_total", + "http.request", 1, "another_test" = "my_default_value", "my_value" = 2, "myname" = "label_value", "renamed_value" = "my_value_set", - "status" = "200", "x-custom" = "coming_from_header" ); } @@ -2317,7 +2193,10 @@ mod tests { Ok(SupergraphResponse::fake_builder() .context(req.context) .status_code(StatusCode::BAD_REQUEST) - .data(json!({"errors": [{"message": "nope"}]})) + .errors(vec![crate::graphql::Error::builder() + .message("nope") + .extension_code("NOPE") + .build()]) .build() .unwrap()) }, @@ -2337,13 +2216,12 @@ mod tests { .unwrap(); assert_counter!( - "apollo_router_http_requests_total", + "http.request", 1, "another_test" = "my_default_value", - "error" = "400 Bad Request", + "error" = "nope", "myname" = "label_value", - "renamed_value" = "my_value_set", - "status" = "400" + "renamed_value" = "my_value_set" ); } .with_metrics() @@ -2927,6 +2805,7 @@ mod tests { .build() .unwrap(), ) + .subgraph_name("my_subgraph_name") .build(); let _subgraph_response = subgraph_service .ready() @@ -2936,15 +2815,15 @@ mod tests { .await .unwrap(); - assert_counter!( - "apollo_router_http_requests_total", + assert_histogram_count!( + "http.client.request.duration", 1, "error" = "custom_error_for_propagation", "my_key" = "my_custom_attribute_from_context", "query_from_request" = "query { test }", - "status" = "200", + "status" = 200, "subgraph" = "my_subgraph_name", - "unknown_data" = "default_value" + "subgraph_error_extended_code" = "FETCH_ERROR" ); } .with_metrics() @@ -2987,6 +2866,7 @@ mod tests { .build() .unwrap(), ) + .subgraph_name("my_subgraph_name_error") .build(); let _subgraph_response = subgraph_service .ready() @@ -2996,62 +2876,13 @@ mod tests { .await .expect_err("should be an error"); - assert_counter!( - "apollo_router_http_requests_total", + assert_histogram_count!( + "http.client.request.duration", 1, - "message" = "cannot contact the subgraph", - "status" = "500", + "message" = + "HTTP fetch failed from 'my_subgraph_name_error': cannot contact the subgraph", "subgraph" = "my_subgraph_name_error", - "subgraph_error_extended_code" = "SUBREQUEST_HTTP_ERROR" - ); - } - .with_metrics() - .await; - } - - #[tokio::test] - async fn test_subgraph_metrics_bad_request() { - async { - let plugin = - create_plugin_with_config(include_str!("testdata/custom_attributes.router.yaml")) - .await; - - let mut mock_bad_request_service = MockSupergraphService::new(); - mock_bad_request_service.expect_call().times(1).returning( - move |req: SupergraphRequest| { - Ok(SupergraphResponse::fake_builder() - .context(req.context) - .status_code(StatusCode::BAD_REQUEST) - .data(json!({"errors": [{"message": "nope"}]})) - .build() - .unwrap()) - }, - ); - - let mut bad_request_supergraph_service = - plugin.supergraph_service(BoxService::new(mock_bad_request_service)); - - let router_req = SupergraphRequest::fake_builder().header("test", "my_value_set"); - - let _router_response = bad_request_supergraph_service - .ready() - .await - .unwrap() - .call(router_req.build().unwrap()) - .await - .unwrap() - .next_response() - .await - .unwrap(); - - assert_counter!( - "apollo_router_http_requests_total", - 1, - "another_test" = "my_default_value", - "error" = "400 Bad Request", - "myname" = "label_value", - "renamed_value" = "my_value_set", - "status" = "400" + "query_from_request" = "query { test }" ); } .with_metrics() @@ -3076,16 +2907,17 @@ mod tests { .into_router(); let http_req_prom = http::Request::get("http://localhost:9090/WRONG/URL/metrics") - .body(Default::default()) + .body(crate::services::router::body::empty()) .unwrap(); - let resp = web_endpoint - .ready() - .await - .unwrap() - .call(http_req_prom) - .await - .unwrap(); + let resp = >>::ready( + &mut web_endpoint, + ) + .await + .unwrap() + .call(http_req_prom) + .await + .unwrap(); assert_eq!(resp.status(), StatusCode::NOT_FOUND); } .with_metrics() @@ -3097,6 +2929,8 @@ mod tests { async { let plugin = create_plugin_with_config(include_str!("testdata/prometheus.router.yaml")).await; + u64_histogram!("apollo.test.histo", "it's a test", 1u64); + make_supergraph_request(plugin.as_ref()).await; let prometheus_metrics = get_prometheus_metrics(plugin.as_ref()).await; assert_snapshot!(prometheus_metrics); @@ -3112,6 +2946,8 @@ mod tests { "testdata/prometheus_custom_buckets.router.yaml" )) .await; + u64_histogram!("apollo.test.histo", "it's a test", 1u64); + make_supergraph_request(plugin.as_ref()).await; let prometheus_metrics = get_prometheus_metrics(plugin.as_ref()).await; @@ -3129,6 +2965,7 @@ mod tests { )) .await; make_supergraph_request(plugin.as_ref()).await; + u64_histogram!("apollo.test.histo", "it's a test", 1u64); let prometheus_metrics = get_prometheus_metrics(plugin.as_ref()).await; assert_snapshot!(prometheus_metrics); diff --git a/apollo-router/src/plugins/telemetry/otlp.rs b/apollo-router/src/plugins/telemetry/otlp.rs index 50985c28e9..680d6f3bae 100644 --- a/apollo-router/src/plugins/telemetry/otlp.rs +++ b/apollo-router/src/plugins/telemetry/otlp.rs @@ -6,6 +6,7 @@ use std::sync::LazyLock; use http::uri::Parts; use http::uri::PathAndQuery; use http::Uri; +use http_0_2 as http; use opentelemetry::sdk::metrics::reader::TemporalitySelector; use opentelemetry::sdk::metrics::InstrumentKind; use opentelemetry_otlp::HttpExporterBuilder; @@ -19,6 +20,7 @@ use tonic::metadata::MetadataMap; use tonic::transport::Certificate; use tonic::transport::ClientTlsConfig; use tonic::transport::Identity; +use tonic_0_9 as tonic; use tower::BoxError; use url::Url; @@ -65,7 +67,7 @@ pub(crate) struct Config { pub(crate) temporality: Temporality, } -#[derive(Copy, Clone)] +#[derive(Copy, Clone, Debug)] pub(crate) enum TelemetryDataKind { Traces, Metrics, @@ -78,7 +80,7 @@ impl Config { ) -> Result { match self.protocol { Protocol::Grpc => { - let endpoint = self.endpoint.to_uri(&DEFAULT_GRPC_ENDPOINT); + let endpoint = self.endpoint.to_uri_0_2(&DEFAULT_GRPC_ENDPOINT); let grpc = self.grpc.clone(); let exporter = opentelemetry_otlp::new_exporter() .tonic() @@ -97,7 +99,7 @@ impl Config { let endpoint = add_missing_path( kind, self.endpoint - .to_uri(&DEFAULT_HTTP_ENDPOINT) + .to_uri_0_2(&DEFAULT_HTTP_ENDPOINT) .map(|e| e.into_parts()), )?; let http = self.http.clone(); @@ -179,7 +181,7 @@ pub(crate) struct GrpcExporter { pub(crate) key: Option, /// gRPC metadata - #[serde(with = "http_serde::header_map")] + #[serde(with = "http_serde_1_1::header_map")] #[schemars(schema_with = "header_map", default)] pub(crate) metadata: http::HeaderMap, } diff --git a/apollo-router/src/plugins/telemetry/snapshots/apollo_router__plugins__telemetry__fmt_layer__tests__json_logging_with_custom_events_with_instrumented.snap b/apollo-router/src/plugins/telemetry/snapshots/apollo_router__plugins__telemetry__fmt_layer__tests__json_logging_with_custom_events_with_instrumented.snap index 0169cc4c8d..529500dc37 100644 --- a/apollo-router/src/plugins/telemetry/snapshots/apollo_router__plugins__telemetry__fmt_layer__tests__json_logging_with_custom_events_with_instrumented.snap +++ b/apollo-router/src/plugins/telemetry/snapshots/apollo_router__plugins__telemetry__fmt_layer__tests__json_logging_with_custom_events_with_instrumented.snap @@ -4,9 +4,8 @@ expression: buff.to_string() --- {"timestamp":"[timestamp]","level":"INFO","trace_id":"00000000000000000000000000000000","span_id":"0000000000000000","http.response.body":"{\"foo\": \"bar\"}","http.response.body.size":125,"message":"my message","kind":"my_custom_event","target":"apollo_router::plugins::telemetry::config_new::events"} {"timestamp":"[timestamp]","level":"ERROR","trace_id":"00000000000000000000000000000000","span_id":"0000000000000000","message":"Hello from test","http.method":"GET","target":"apollo_router::plugins::telemetry::fmt_layer::tests"} -{"timestamp":"[timestamp]","level":"INFO","trace_id":"00000000000000000000000000000000","span_id":"0000000000000000","http.request.body":"Body(Empty)","http.request.headers":"{\"content-length\": \"0\", \"custom-header\": \"val1\", \"x-log-request\": \"log\"}","http.request.method":"GET","http.request.uri":"http://example.com/","http.request.version":"HTTP/1.1","message":"","kind":"router.request","target":"apollo_router::plugins::telemetry::config_new::events"} {"timestamp":"[timestamp]","level":"INFO","trace_id":"00000000000000000000000000000000","span_id":"0000000000000000","http.request.body.size":0,"message":"my event message","kind":"my.request_event","target":"apollo_router::plugins::telemetry::config_new::events"} -{"timestamp":"[timestamp]","level":"INFO","trace_id":"00000000000000000000000000000000","span_id":"0000000000000000","http.response.body":"Body(Full(b\"{\\\"data\\\":{\\\"data\\\":\\\"res\\\"}}\"))","http.response.headers":"{\"content-length\": \"25\", \"custom-header\": \"val1\", \"x-log-request\": \"log\"}","http.response.status":"200 OK","http.response.version":"HTTP/1.1","message":"","kind":"router.response","target":"apollo_router::plugins::telemetry::config_new::events"} +{"timestamp":"[timestamp]","level":"INFO","trace_id":"00000000000000000000000000000000","span_id":"0000000000000000","http.response.headers":"{\"content-length\": \"25\", \"custom-header\": \"val1\", \"x-log-request\": \"log\"}","http.response.status":"200 OK","http.response.version":"HTTP/1.1","message":"","kind":"router.response","target":"apollo_router::plugins::telemetry::config_new::events"} {"timestamp":"[timestamp]","level":"INFO","trace_id":"00000000000000000000000000000000","span_id":"0000000000000000","http.response.body.size":25,"message":"my response event message","kind":"my.response_event","target":"apollo_router::plugins::telemetry::config_new::events"} {"timestamp":"[timestamp]","level":"INFO","trace_id":"00000000000000000000000000000000","span_id":"0000000000000000","http.request.body":"{\"query\":\"query { foo }\"}","http.request.headers":"{\"content-type\": \"application/json\", \"x-log-request\": \"log\"}","http.request.method":"POST","http.request.uri":"http://default/","http.request.version":"HTTP/1.1","message":"","kind":"supergraph.request","target":"apollo_router::plugins::telemetry::config_new::events"} {"timestamp":"[timestamp]","level":"INFO","trace_id":"00000000000000000000000000000000","span_id":"0000000000000000","message":"my event message","kind":"my.request.event","target":"apollo_router::plugins::telemetry::config_new::events"} @@ -15,3 +14,7 @@ expression: buff.to_string() {"timestamp":"[timestamp]","level":"ERROR","trace_id":"00000000000000000000000000000000","span_id":"0000000000000000","my.custom.attribute":["{\"id\":1234,\"name\":\"first_name\"}","{\"id\":567,\"name\":\"second_name\"}"],"response_status":200,"subgraph.name":"subgraph","message":"my response event message","kind":"my.subgraph.response.event","target":"apollo_router::plugins::telemetry::config_new::events"} {"timestamp":"[timestamp]","level":"INFO","trace_id":"00000000000000000000000000000000","span_id":"0000000000000000","message":"my event message","kind":"my.subgraph.request.event","target":"apollo_router::plugins::telemetry::config_new::events"} {"timestamp":"[timestamp]","level":"ERROR","trace_id":"00000000000000000000000000000000","span_id":"0000000000000000","my.custom.attribute":"[[{\"id\":1234,\"name\":\"first_name\"},{\"id\":567,\"name\":\"second_name\"}],{\"foo\":\"bar\"}]","response_status":200,"subgraph.name":"subgraph_bis","message":"my response event message","kind":"my.subgraph.response.event","target":"apollo_router::plugins::telemetry::config_new::events"} +{"timestamp":"[timestamp]","level":"INFO","trace_id":"00000000000000000000000000000000","span_id":"0000000000000000","http.request.headers":"{\"x-log-request\": \"log\"}","http.request.method":"GET","http.request.uri":"/","http.request.version":"HTTP/1.1","message":"","kind":"connector.request","target":"apollo_router::plugins::telemetry::config_new::events"} +{"timestamp":"[timestamp]","level":"INFO","trace_id":"00000000000000000000000000000000","span_id":"0000000000000000","connector_source":"source","http_method":"GET","subgraph.name":"connector_subgraph","url_template":"/test","message":"my request event message","kind":"my.connector.request.event","target":"apollo_router::plugins::telemetry::config_new::events"} +{"timestamp":"[timestamp]","level":"WARN","trace_id":"00000000000000000000000000000000","span_id":"0000000000000000","http.response.headers":"{\"x-log-response\": \"log\"}","http.response.status":"200 OK","http.response.version":"HTTP/1.1","message":"","kind":"connector.response","target":"apollo_router::plugins::telemetry::config_new::events"} +{"timestamp":"[timestamp]","level":"ERROR","trace_id":"00000000000000000000000000000000","span_id":"0000000000000000","connector_source":"source","http_method":"GET","response_status":200,"subgraph.name":"connector_subgraph","url_template":"/test","message":"my response event message","kind":"my.connector.response.event","target":"apollo_router::plugins::telemetry::config_new::events"} diff --git a/apollo-router/src/plugins/telemetry/snapshots/apollo_router__plugins__telemetry__fmt_layer__tests__text_logging_with_custom_events_with_instrumented.snap b/apollo-router/src/plugins/telemetry/snapshots/apollo_router__plugins__telemetry__fmt_layer__tests__text_logging_with_custom_events_with_instrumented.snap index d329c82801..99e1b32eca 100644 --- a/apollo-router/src/plugins/telemetry/snapshots/apollo_router__plugins__telemetry__fmt_layer__tests__text_logging_with_custom_events_with_instrumented.snap +++ b/apollo-router/src/plugins/telemetry/snapshots/apollo_router__plugins__telemetry__fmt_layer__tests__text_logging_with_custom_events_with_instrumented.snap @@ -4,9 +4,8 @@ expression: buff.to_string() --- [timestamp] INFO http.response.body={"foo": "bar"} http.response.body.size=125 my message kind=my_custom_event [timestamp] ERROR Hello from test http.method="GET" -[timestamp] INFO http.request.body=Body(Empty) http.request.headers={"content-length": "0", "custom-header": "val1", "x-log-request": "log"} http.request.method=GET http.request.uri=http://example.com/ http.request.version=HTTP/1.1 kind=router.request [timestamp] INFO http.request.body.size=0 my event message kind=my.request_event -[timestamp] INFO http.response.body=Body(Full(b"{\"data\":{\"data\":\"res\"}}")) http.response.headers={"content-length": "25", "custom-header": "val1", "x-log-request": "log"} http.response.status=200 OK http.response.version=HTTP/1.1 kind=router.response +[timestamp] INFO http.response.body={"data": {"data": "res"}} http.response.headers={"content-length": "25", "custom-header": "val1", "x-log-request": "log"} http.response.status=200 OK http.response.version=HTTP/1.1 kind=router.response [timestamp] INFO http.response.body.size=25 my response event message kind=my.response_event [timestamp] INFO http.request.body={"query":"query { foo }"} http.request.headers={"content-type": "application/json", "x-log-request": "log"} http.request.method=POST http.request.uri=http://default/ http.request.version=HTTP/1.1 kind=supergraph.request [timestamp] INFO my event message kind=my.request.event @@ -15,3 +14,7 @@ expression: buff.to_string() [timestamp] ERROR my.custom.attribute=["{"id":1234,"name":"first_name"}","{"id":567,"name":"second_name"}"] response_status=200 subgraph.name=subgraph my response event message kind=my.subgraph.response.event [timestamp] INFO my event message kind=my.subgraph.request.event [timestamp] ERROR my.custom.attribute=[[{"id":1234,"name":"first_name"},{"id":567,"name":"second_name"}],{"foo":"bar"}] response_status=200 subgraph.name=subgraph_bis my response event message kind=my.subgraph.response.event +[timestamp] INFO http.request.headers={"x-log-request": "log"} http.request.method=GET http.request.uri=/ http.request.version=HTTP/1.1 kind=connector.request +[timestamp] INFO connector_source=source http_method=GET subgraph.name=connector_subgraph url_template=/test my request event message kind=my.connector.request.event +[timestamp] WARN http.response.headers={"x-log-response": "log"} http.response.status=200 OK http.response.version=HTTP/1.1 kind=connector.response +[timestamp] ERROR connector_source=source http_method=GET response_status=200 subgraph.name=connector_subgraph url_template=/test my response event message kind=my.connector.response.event diff --git a/apollo-router/src/plugins/telemetry/snapshots/apollo_router__plugins__telemetry__tests__it_test_prometheus_metrics.snap b/apollo-router/src/plugins/telemetry/snapshots/apollo_router__plugins__telemetry__tests__it_test_prometheus_metrics.snap index eae5de460a..f32b94a355 100644 --- a/apollo-router/src/plugins/telemetry/snapshots/apollo_router__plugins__telemetry__tests__it_test_prometheus_metrics.snap +++ b/apollo-router/src/plugins/telemetry/snapshots/apollo_router__plugins__telemetry__tests__it_test_prometheus_metrics.snap @@ -2,16 +2,16 @@ source: apollo-router/src/plugins/telemetry/mod.rs expression: prometheus_metrics --- -apollo_router_http_request_duration_seconds_bucket{status="200",otel_scope_name="apollo/router",le="+Inf"} 1 -apollo_router_http_request_duration_seconds_bucket{status="200",otel_scope_name="apollo/router",le="0.001"} 1 -apollo_router_http_request_duration_seconds_bucket{status="200",otel_scope_name="apollo/router",le="0.005"} 1 -apollo_router_http_request_duration_seconds_bucket{status="200",otel_scope_name="apollo/router",le="0.015"} 1 -apollo_router_http_request_duration_seconds_bucket{status="200",otel_scope_name="apollo/router",le="0.05"} 1 -apollo_router_http_request_duration_seconds_bucket{status="200",otel_scope_name="apollo/router",le="0.1"} 1 -apollo_router_http_request_duration_seconds_bucket{status="200",otel_scope_name="apollo/router",le="0.2"} 1 -apollo_router_http_request_duration_seconds_bucket{status="200",otel_scope_name="apollo/router",le="0.3"} 1 -apollo_router_http_request_duration_seconds_bucket{status="200",otel_scope_name="apollo/router",le="0.4"} 1 -apollo_router_http_request_duration_seconds_bucket{status="200",otel_scope_name="apollo/router",le="0.5"} 1 -apollo_router_http_request_duration_seconds_bucket{status="200",otel_scope_name="apollo/router",le="1"} 1 -apollo_router_http_request_duration_seconds_bucket{status="200",otel_scope_name="apollo/router",le="10"} 1 -apollo_router_http_request_duration_seconds_bucket{status="200",otel_scope_name="apollo/router",le="5"} 1 +apollo_test_histo_bucket{otel_scope_name="apollo/router",le="+Inf"} 1 +apollo_test_histo_bucket{otel_scope_name="apollo/router",le="0.001"} 0 +apollo_test_histo_bucket{otel_scope_name="apollo/router",le="0.005"} 0 +apollo_test_histo_bucket{otel_scope_name="apollo/router",le="0.015"} 0 +apollo_test_histo_bucket{otel_scope_name="apollo/router",le="0.05"} 0 +apollo_test_histo_bucket{otel_scope_name="apollo/router",le="0.1"} 0 +apollo_test_histo_bucket{otel_scope_name="apollo/router",le="0.2"} 0 +apollo_test_histo_bucket{otel_scope_name="apollo/router",le="0.3"} 0 +apollo_test_histo_bucket{otel_scope_name="apollo/router",le="0.4"} 0 +apollo_test_histo_bucket{otel_scope_name="apollo/router",le="0.5"} 0 +apollo_test_histo_bucket{otel_scope_name="apollo/router",le="1"} 1 +apollo_test_histo_bucket{otel_scope_name="apollo/router",le="10"} 1 +apollo_test_histo_bucket{otel_scope_name="apollo/router",le="5"} 1 diff --git a/apollo-router/src/plugins/telemetry/snapshots/apollo_router__plugins__telemetry__tests__it_test_prometheus_metrics_custom_buckets.snap b/apollo-router/src/plugins/telemetry/snapshots/apollo_router__plugins__telemetry__tests__it_test_prometheus_metrics_custom_buckets.snap index 3f346c2ad6..cbb3f06d58 100644 --- a/apollo-router/src/plugins/telemetry/snapshots/apollo_router__plugins__telemetry__tests__it_test_prometheus_metrics_custom_buckets.snap +++ b/apollo-router/src/plugins/telemetry/snapshots/apollo_router__plugins__telemetry__tests__it_test_prometheus_metrics_custom_buckets.snap @@ -2,7 +2,7 @@ source: apollo-router/src/plugins/telemetry/mod.rs expression: prometheus_metrics --- -apollo_router_http_request_duration_seconds_bucket{status="200",otel_scope_name="apollo/router",le="+Inf"} 1 -apollo_router_http_request_duration_seconds_bucket{status="200",otel_scope_name="apollo/router",le="10"} 1 -apollo_router_http_request_duration_seconds_bucket{status="200",otel_scope_name="apollo/router",le="20"} 1 -apollo_router_http_request_duration_seconds_bucket{status="200",otel_scope_name="apollo/router",le="5"} 1 +apollo_test_histo_bucket{otel_scope_name="apollo/router",le="+Inf"} 1 +apollo_test_histo_bucket{otel_scope_name="apollo/router",le="10"} 1 +apollo_test_histo_bucket{otel_scope_name="apollo/router",le="20"} 1 +apollo_test_histo_bucket{otel_scope_name="apollo/router",le="5"} 1 diff --git a/apollo-router/src/plugins/telemetry/snapshots/apollo_router__plugins__telemetry__tests__it_test_prometheus_metrics_custom_buckets_for_specific_metrics.snap b/apollo-router/src/plugins/telemetry/snapshots/apollo_router__plugins__telemetry__tests__it_test_prometheus_metrics_custom_buckets_for_specific_metrics.snap index 49dfbd2b76..3d349553e8 100644 --- a/apollo-router/src/plugins/telemetry/snapshots/apollo_router__plugins__telemetry__tests__it_test_prometheus_metrics_custom_buckets_for_specific_metrics.snap +++ b/apollo-router/src/plugins/telemetry/snapshots/apollo_router__plugins__telemetry__tests__it_test_prometheus_metrics_custom_buckets_for_specific_metrics.snap @@ -2,9 +2,9 @@ source: apollo-router/src/plugins/telemetry/mod.rs expression: prometheus_metrics --- -apollo_router_http_request_duration_seconds_bucket{otel_scope_name="apollo/router",le="+Inf"} 1 -apollo_router_http_request_duration_seconds_bucket{otel_scope_name="apollo/router",le="1"} 1 -apollo_router_http_request_duration_seconds_bucket{otel_scope_name="apollo/router",le="2"} 1 -apollo_router_http_request_duration_seconds_bucket{otel_scope_name="apollo/router",le="3"} 1 -apollo_router_http_request_duration_seconds_bucket{otel_scope_name="apollo/router",le="4"} 1 -apollo_router_http_request_duration_seconds_bucket{otel_scope_name="apollo/router",le="5"} 1 +apollo_test_histo_bucket{otel_scope_name="apollo/router",le="+Inf"} 1 +apollo_test_histo_bucket{otel_scope_name="apollo/router",le="1"} 1 +apollo_test_histo_bucket{otel_scope_name="apollo/router",le="2"} 1 +apollo_test_histo_bucket{otel_scope_name="apollo/router",le="3"} 1 +apollo_test_histo_bucket{otel_scope_name="apollo/router",le="4"} 1 +apollo_test_histo_bucket{otel_scope_name="apollo/router",le="5"} 1 diff --git a/apollo-router/src/plugins/telemetry/testdata/config.router.yaml b/apollo-router/src/plugins/telemetry/testdata/config.router.yaml index 39bc874c65..660ca5005f 100644 --- a/apollo-router/src/plugins/telemetry/testdata/config.router.yaml +++ b/apollo-router/src/plugins/telemetry/testdata/config.router.yaml @@ -162,83 +162,3 @@ telemetry: bool_arr: - true - false - metrics: - common: - attributes: - supergraph: - static: - - name: myname - value: label_value - request: - header: - - named: test - default: default_value - rename: renamed_value - body: - - path: .data.test - name: my_new_name - default: default_value - response: - header: - - named: test - default: default_value - rename: renamed_value - - named: test - default: default_value - rename: renamed_value - body: - - path: .data.test - name: my_new_name - default: default_value - subgraph: - all: - static: - - name: myname - value: label_value - request: - header: - - named: test - default: default_value - rename: renamed_value - body: - - path: .data.test - name: my_new_name - default: default_value - response: - header: - - named: test - default: default_value - rename: renamed_value - - named: test - default: default_value - rename: renamed_value - body: - - path: .data.test - name: my_new_name - default: default_value - subgraphs: - subgraph_name_test: - static: - - name: myname - value: label_value - request: - header: - - named: test - default: default_value - rename: renamed_value - body: - - path: .data.test - name: my_new_name - default: default_value - response: - header: - - named: test - default: default_value - rename: renamed_value - - named: test - default: default_value - rename: renamed_value - body: - - path: .data.test - name: my_new_name - default: default_value diff --git a/apollo-router/src/plugins/telemetry/testdata/custom_attributes.router.yaml b/apollo-router/src/plugins/telemetry/testdata/custom_attributes.router.yaml index ec2045ecba..d4628b155b 100644 --- a/apollo-router/src/plugins/telemetry/testdata/custom_attributes.router.yaml +++ b/apollo-router/src/plugins/telemetry/testdata/custom_attributes.router.yaml @@ -2,51 +2,43 @@ telemetry: apollo: client_name_header: name_header client_version_header: version_header - exporters: - metrics: - common: - service_name: apollo-router - attributes: - supergraph: - static: - - name: myname - value: label_value - request: - header: - - named: test - default: default_value - rename: renamed_value - - named: another_test - default: my_default_value - response: - header: - - named: x-custom - body: - - path: .data.data.my_value - name: my_value - subgraph: - all: - errors: - include_messages: true - extensions: - - name: subgraph_error_extended_code - path: .code - - name: message - path: .reason - subgraphs: - my_subgraph_name: - request: - body: - - path: .query - name: query_from_request - - path: .data - name: unknown_data - default: default_value - - path: .data2 - name: unknown_data_bis - response: - body: - - path: .errors[0].extensions.status - name: error - context: - - named: my_key + instrumentation: + instruments: + supergraph: + http.request: + value: event_unit + type: counter + unit: count + description: "supergraph requests" + attributes: + myname: + static: label_value + renamed_value: + request_header: test + default: default_value + another_test: + request_header: another_test + default: my_default_value + x-custom: + response_header: x-custom + my_value: + response_data: $.data.my_value + error: + response_errors: $[0].message + subgraph: + http.client.request.duration: + attributes: + subgraph.name: + alias: subgraph + subgraph.graphql.document: + alias: query_from_request + status: + subgraph_response_status: code + subgraph_error_extended_code: + subgraph_response_errors: $[0].extensions.code + message: + error: reason + error: + subgraph_response_errors: $[0].extensions.status + my_key: + response_context: my_key \ No newline at end of file diff --git a/apollo-router/src/plugins/telemetry/testdata/custom_events.router.yaml b/apollo-router/src/plugins/telemetry/testdata/custom_events.router.yaml index c3c23cb68f..cd97d0604e 100644 --- a/apollo-router/src/plugins/telemetry/testdata/custom_events.router.yaml +++ b/apollo-router/src/plugins/telemetry/testdata/custom_events.router.yaml @@ -144,4 +144,56 @@ telemetry: subgraph_response_status: code "my.custom.attribute": subgraph_response_data: "$.*" - default: "missing" \ No newline at end of file + default: "missing" + connector: + # Standard events + request: + level: info + condition: + eq: + - connector_http_request_header: x-log-request + - "log" + response: + level: warn + condition: + all: + - eq: + - connector_http_response_header: x-log-response + - "log" + - eq: + - subgraph_name: true + - "subgraph" + error: error + + # Custom events + my.disabled_request.event: + message: "my disabled event message" + level: off + on: request + my.request.event: + message: "my request event message" + level: info + on: request + condition: + eq: + - connector_http_request_header: x-log-request + - "log" + my.response.event: + message: "my response event message" + level: error + on: response + condition: + all: + - eq: + - connector_http_response_header: x-log-response + - "log" + - eq: + - 200 + - connector_http_response_status: code + - eq: + - subgraph_name: true + - "subgraph" + attributes: + subgraph.name: true + response_status: + connector_http_response_status: code \ No newline at end of file diff --git a/apollo-router/src/plugins/telemetry/testdata/prometheus_custom_buckets_specific_metrics.router.yaml b/apollo-router/src/plugins/telemetry/testdata/prometheus_custom_buckets_specific_metrics.router.yaml index c24056770f..23352c1cf6 100644 --- a/apollo-router/src/plugins/telemetry/testdata/prometheus_custom_buckets_specific_metrics.router.yaml +++ b/apollo-router/src/plugins/telemetry/testdata/prometheus_custom_buckets_specific_metrics.router.yaml @@ -7,7 +7,7 @@ telemetry: common: service_name: apollo-router views: - - name: apollo_router_http_request_duration_seconds + - name: apollo.test.histo unit: seconds description: duration of the http request aggregation: diff --git a/apollo-router/src/plugins/telemetry/tracing/apollo.rs b/apollo-router/src/plugins/telemetry/tracing/apollo.rs index b4f6589e37..f0100e774f 100644 --- a/apollo-router/src/plugins/telemetry/tracing/apollo.rs +++ b/apollo-router/src/plugins/telemetry/tracing/apollo.rs @@ -28,7 +28,7 @@ impl TracingConfigurator for Config { .endpoint(&self.endpoint) .otlp_endpoint(&self.experimental_otlp_endpoint) .otlp_tracing_protocol(&self.experimental_otlp_tracing_protocol) - .otlp_tracing_sampler(&self.experimental_otlp_tracing_sampler) + .otlp_tracing_sampler(&self.otlp_tracing_sampler) .apollo_key( self.apollo_key .as_ref() diff --git a/apollo-router/src/plugins/telemetry/tracing/apollo_telemetry.rs b/apollo-router/src/plugins/telemetry/tracing/apollo_telemetry.rs index 1178cde63e..63027325ec 100644 --- a/apollo-router/src/plugins/telemetry/tracing/apollo_telemetry.rs +++ b/apollo-router/src/plugins/telemetry/tracing/apollo_telemetry.rs @@ -95,6 +95,14 @@ use crate::query_planner::FLATTEN_SPAN_NAME; use crate::query_planner::PARALLEL_SPAN_NAME; use crate::query_planner::SEQUENCE_SPAN_NAME; use crate::query_planner::SUBSCRIBE_SPAN_NAME; +use crate::services::connector_service::APOLLO_CONNECTOR_DETAIL; +use crate::services::connector_service::APOLLO_CONNECTOR_FIELD_ALIAS; +use crate::services::connector_service::APOLLO_CONNECTOR_FIELD_NAME; +use crate::services::connector_service::APOLLO_CONNECTOR_FIELD_RETURN_TYPE; +use crate::services::connector_service::APOLLO_CONNECTOR_SELECTION; +use crate::services::connector_service::APOLLO_CONNECTOR_SOURCE_DETAIL; +use crate::services::connector_service::APOLLO_CONNECTOR_SOURCE_NAME; +use crate::services::connector_service::APOLLO_CONNECTOR_TYPE; pub(crate) const APOLLO_PRIVATE_REQUEST: Key = Key::from_static_str("apollo_private.request"); pub(crate) const APOLLO_PRIVATE_DURATION_NS: &str = "apollo_private.duration_ns"; @@ -154,12 +162,20 @@ const REPORTS_INCLUDE_ATTRS: [Key; 26] = [ ]; /// Additional attributes to include when sending to the OTLP protocol. -const OTLP_EXT_INCLUDE_ATTRS: [Key; 5] = [ +const OTLP_EXT_INCLUDE_ATTRS: [Key; 13] = [ OPERATION_SUBTYPE, EXT_TRACE_ID, opentelemetry_semantic_conventions::trace::HTTP_REQUEST_BODY_SIZE, opentelemetry_semantic_conventions::trace::HTTP_RESPONSE_BODY_SIZE, opentelemetry_semantic_conventions::trace::HTTP_RESPONSE_STATUS_CODE, + APOLLO_CONNECTOR_TYPE, + APOLLO_CONNECTOR_DETAIL, + APOLLO_CONNECTOR_SELECTION, + APOLLO_CONNECTOR_FIELD_NAME, + APOLLO_CONNECTOR_FIELD_ALIAS, + APOLLO_CONNECTOR_FIELD_RETURN_TYPE, + APOLLO_CONNECTOR_SOURCE_NAME, + APOLLO_CONNECTOR_SOURCE_DETAIL, ]; const REPORTS_INCLUDE_SPANS: [&str; 16] = [ diff --git a/apollo-router/src/plugins/telemetry/tracing/datadog/mod.rs b/apollo-router/src/plugins/telemetry/tracing/datadog/mod.rs index fd1b4447ae..f182704f61 100644 --- a/apollo-router/src/plugins/telemetry/tracing/datadog/mod.rs +++ b/apollo-router/src/plugins/telemetry/tracing/datadog/mod.rs @@ -201,7 +201,7 @@ impl TracingConfigurator for Config { .to_string(), ) .with_http_client( - reqwest::Client::builder() + reqwest_0_11::Client::builder() // https://github.com/open-telemetry/opentelemetry-rust-contrib/issues/7 // Set the idle timeout to something low to prevent termination of connections. .pool_idle_timeout(Duration::from_millis(1)) diff --git a/apollo-router/src/plugins/telemetry/tracing/datadog_exporter/exporter/mod.rs b/apollo-router/src/plugins/telemetry/tracing/datadog_exporter/exporter/mod.rs index 43b5a5d75d..d8d8c592c0 100644 --- a/apollo-router/src/plugins/telemetry/tracing/datadog_exporter/exporter/mod.rs +++ b/apollo-router/src/plugins/telemetry/tracing/datadog_exporter/exporter/mod.rs @@ -8,9 +8,7 @@ use std::sync::Arc; use std::time::Duration; use futures::future::BoxFuture; -use http::Method; -use http::Request; -use http::Uri; +use http_0_2 as http; pub use model::ApiVersion; pub use model::Error; pub use model::FieldMappingFn; @@ -74,7 +72,7 @@ impl Mapping { /// Datadog span exporter pub struct DatadogExporter { client: Arc, - request_url: Uri, + request_url: http_0_2::Uri, model_config: ModelConfig, api_version: ApiVersion, mapping: Mapping, @@ -84,7 +82,7 @@ pub struct DatadogExporter { impl DatadogExporter { fn new( model_config: ModelConfig, - request_url: Uri, + request_url: http::Uri, api_version: ApiVersion, client: Arc, mapping: Mapping, @@ -112,8 +110,8 @@ impl DatadogExporter { &self.mapping, &self.unified_tags, )?; - let req = Request::builder() - .method(Method::POST) + let req = http::Request::builder() + .method(http::Method::POST) .uri(self.request_url.clone()) .header(http::header::CONTENT_TYPE, self.api_version.content_type()) .header(DATADOG_TRACE_COUNT_HEADER, trace_count) @@ -236,7 +234,7 @@ impl DatadogPipelineBuilder { // parse the endpoint and append the path based on versions. // keep the query and host the same. - fn build_endpoint(agent_endpoint: &str, version: &str) -> Result { + fn build_endpoint(agent_endpoint: &str, version: &str) -> Result { // build agent endpoint based on version let mut endpoint = agent_endpoint .parse::() @@ -509,7 +507,7 @@ mod tests { impl HttpClient for DummyClient { async fn send( &self, - _request: Request>, + _request: http::Request>, ) -> Result, opentelemetry_http::HttpError> { Ok(http::Response::new("dummy response".into())) } diff --git a/apollo-router/src/plugins/telemetry/tracing/datadog_exporter/exporter/model/mod.rs b/apollo-router/src/plugins/telemetry/tracing/datadog_exporter/exporter/model/mod.rs index d6db4b72b4..f42962cd84 100644 --- a/apollo-router/src/plugins/telemetry/tracing/datadog_exporter/exporter/model/mod.rs +++ b/apollo-router/src/plugins/telemetry/tracing/datadog_exporter/exporter/model/mod.rs @@ -1,6 +1,7 @@ use std::fmt::Debug; use http::uri; +use http_0_2 as http; use opentelemetry_sdk::export::trace::SpanData; use opentelemetry_sdk::export::trace::{self}; use opentelemetry_sdk::export::ExportError; diff --git a/apollo-router/src/plugins/telemetry/tracing/mod.rs b/apollo-router/src/plugins/telemetry/tracing/mod.rs index d2dc62b138..8b4b72ddd6 100644 --- a/apollo-router/src/plugins/telemetry/tracing/mod.rs +++ b/apollo-router/src/plugins/telemetry/tracing/mod.rs @@ -16,6 +16,7 @@ use serde::Deserialize; use tower::BoxError; use super::config_new::spans::Spans; +use super::formatters::APOLLO_CONNECTOR_PREFIX; use super::formatters::APOLLO_PRIVATE_PREFIX; use crate::plugins::telemetry::config::TracingCommon; use crate::plugins::telemetry::tracing::datadog::DatadogSpanProcessor; @@ -51,17 +52,19 @@ impl SpanProcessor for ApolloFilterSpanProcessor { } fn on_end(&self, span: SpanData) { - if span - .attributes - .iter() - .any(|(key, _)| key.as_str().starts_with(APOLLO_PRIVATE_PREFIX)) - { + if span.attributes.iter().any(|(key, _)| { + key.as_str().starts_with(APOLLO_PRIVATE_PREFIX) + || key.as_str().starts_with(APOLLO_CONNECTOR_PREFIX) + }) { let attributes_len = span.attributes.len(); let span = SpanData { attributes: span .attributes .into_iter() - .filter(|(k, _)| !k.as_str().starts_with(APOLLO_PRIVATE_PREFIX)) + .filter(|(k, _)| { + !k.as_str().starts_with(APOLLO_PRIVATE_PREFIX) + && !k.as_str().starts_with(APOLLO_CONNECTOR_PREFIX) + }) .fold( EvictedHashMap::new(attributes_len as u32, attributes_len), |mut m, (k, v)| { diff --git a/apollo-router/src/plugins/traffic_shaping/mod.rs b/apollo-router/src/plugins/traffic_shaping/mod.rs index c0dffdcb6f..695018587d 100644 --- a/apollo-router/src/plugins/traffic_shaping/mod.rs +++ b/apollo-router/src/plugins/traffic_shaping/mod.rs @@ -22,6 +22,7 @@ use http::HeaderValue; use http::StatusCode; use schemars::JsonSchema; use serde::Deserialize; +use tower::util::future::EitherResponseFuture; use tower::util::Either; use tower::BoxError; use tower::Service; @@ -227,8 +228,8 @@ impl Plugin for TrafficShaping { } } -pub(crate) type TrafficShapingSubgraphFuture = Either< - Either< +pub(crate) type TrafficShapingSubgraphFuture = EitherResponseFuture< + EitherResponseFuture< BoxFuture<'static, Result>, BoxFuture<'static, Result>, >, @@ -344,8 +345,7 @@ impl TrafficShaping { .clone() }); - Either::A(ServiceBuilder::new() - + Either::Left(ServiceBuilder::new() .option_layer(config.shaping.deduplicate_query.unwrap_or_default().then( QueryDeduplicationLayer::default )) @@ -390,7 +390,7 @@ impl TrafficShaping { req })) } else { - Either::B(service) + Either::Right(service) } } diff --git a/apollo-router/src/protocols/websocket.rs b/apollo-router/src/protocols/websocket.rs index 13700e84ce..de49015cc9 100644 --- a/apollo-router/src/protocols/websocket.rs +++ b/apollo-router/src/protocols/websocket.rs @@ -1,4 +1,3 @@ -use std::borrow::Cow; use std::pin::Pin; use std::task::Poll; use std::time::Duration; @@ -344,12 +343,12 @@ where ClientMessage::CloseWebsocket => { future::ready(Ok(Message::Close(Some(CloseFrame{ code: CloseCode::Normal, - reason: Cow::default(), + reason: Default::default(), })))) }, message => { future::ready(match serde_json::to_string(&message) { - Ok(client_message_str) => Ok(Message::Text(client_message_str)), + Ok(client_message_str) => Ok(Message::text(client_message_str)), Err(err) => Err(Error::SerdeError(err)), }) }, @@ -689,7 +688,6 @@ mod tests { use axum::extract::WebSocketUpgrade; use axum::routing::get; use axum::Router; - use axum::Server; use futures::FutureExt; use http::HeaderValue; use tokio_tungstenite::connect_async; @@ -705,7 +703,7 @@ mod tests { port: Option, ) -> SocketAddr { let ws_handler = move |ws: WebSocketUpgrade| async move { - let res = ws.on_upgrade(move |mut socket| async move { + let res = ws.protocols(["graphql-transport-ws"]).on_upgrade(move |mut socket| async move { let connection_ack = socket.recv().await.unwrap().unwrap().into_text().unwrap(); let ack_msg: ClientMessage = serde_json::from_str(&connection_ack).unwrap(); if let ClientMessage::ConnectionInit { payload } = ack_msg { @@ -719,7 +717,7 @@ mod tests { if send_ping { // It turns out some servers may send Pings before they even ack the connection. socket - .send(AxumWsMessage::Text( + .send(AxumWsMessage::text( serde_json::to_string(&ServerMessage::Ping { payload: None }).unwrap(), )) .await @@ -730,7 +728,7 @@ mod tests { } socket - .send(AxumWsMessage::Text( + .send(AxumWsMessage::text( serde_json::to_string(&ServerMessage::ConnectionAck).unwrap(), )) .await @@ -753,9 +751,7 @@ mod tests { } socket - .send(AxumWsMessage::Text( - "coucou".to_string(), - )) + .send(AxumWsMessage::text("coucou")) .await .unwrap(); @@ -768,7 +764,7 @@ mod tests { tokio::time::sleep(duration).await; let ping_message = socket.next().await.unwrap().unwrap(); - assert_eq!(ping_message, AxumWsMessage::Text( + assert_eq!(ping_message, AxumWsMessage::text( serde_json::to_string(&ClientMessage::Ping { payload: None }).unwrap(), )); @@ -780,38 +776,38 @@ mod tests { } socket - .send(AxumWsMessage::Text( + .send(AxumWsMessage::text( serde_json::to_string(&ServerMessage::Next { id: client_id.clone().unwrap(), payload: graphql::Response::builder().data(serde_json_bytes::json!({"userWasCreated": {"username": "ada_lovelace"}})).build() }).unwrap(), )) .await .unwrap(); socket - .send(AxumWsMessage::Text( + .send(AxumWsMessage::text( serde_json::to_string(&ServerMessage::Ping { payload: None }).unwrap(), )) .await .unwrap(); let pong_message = socket.next().await.unwrap().unwrap(); - assert_eq!(pong_message, AxumWsMessage::Text( + assert_eq!(pong_message, AxumWsMessage::text( serde_json::to_string(&ClientMessage::Pong { payload: None }).unwrap(), )); socket - .send(AxumWsMessage::Text( + .send(AxumWsMessage::text( serde_json::to_string(&ServerMessage::Ping { payload: None }).unwrap(), )) .await .unwrap(); let pong_message = socket.next().await.unwrap().unwrap(); - assert_eq!(pong_message, AxumWsMessage::Text( + assert_eq!(pong_message, AxumWsMessage::text( serde_json::to_string(&ClientMessage::Pong { payload: None }).unwrap(), )); socket - .send(AxumWsMessage::Text( + .send(AxumWsMessage::text( serde_json::to_string(&ServerMessage::Complete { id: client_id.unwrap() }).unwrap(), )) .await @@ -827,13 +823,12 @@ mod tests { }; let app = Router::new().route("/ws", get(ws_handler)); - let server = Server::bind( - &format!("127.0.0.1:{}", port.unwrap_or_default()) - .parse() - .unwrap(), - ) - .serve(app.into_make_service()); - let local_addr = server.local_addr(); + let listener = + tokio::net::TcpListener::bind(format!("127.0.0.1:{}", port.unwrap_or_default())) + .await + .unwrap(); + let server = axum::serve(listener, app); + let local_addr = server.local_addr().unwrap(); tokio::spawn(async { server.await.unwrap() }); local_addr } @@ -843,7 +838,7 @@ mod tests { port: Option, ) -> SocketAddr { let ws_handler = move |ws: WebSocketUpgrade| async move { - let res = ws.on_upgrade(move |mut socket| async move { + let res = ws.protocols(["graphql-ws"]).on_upgrade(move |mut socket| async move { let init_connection = socket.recv().await.unwrap().unwrap().into_text().unwrap(); let init_msg: ClientMessage = serde_json::from_str(&init_connection).unwrap(); assert!(matches!(init_msg, ClientMessage::ConnectionInit { .. })); @@ -851,7 +846,7 @@ mod tests { if send_ping { // It turns out some servers may send Pings before they even ack the connection. socket - .send(AxumWsMessage::Text( + .send(AxumWsMessage::text( serde_json::to_string(&ServerMessage::Ping { payload: None }).unwrap(), )) .await @@ -861,13 +856,13 @@ mod tests { assert!(matches!(pong_message, ClientMessage::Pong { payload: None })); } socket - .send(AxumWsMessage::Text( + .send(AxumWsMessage::text( serde_json::to_string(&ServerMessage::ConnectionAck).unwrap(), )) .await .unwrap(); socket - .send(AxumWsMessage::Text( + .send(AxumWsMessage::text( serde_json::to_string(&ServerMessage::KeepAlive).unwrap(), )) .await @@ -890,20 +885,18 @@ mod tests { } socket - .send(AxumWsMessage::Text( - "coucou".to_string(), - )) + .send(AxumWsMessage::text("coucou")) .await .unwrap(); socket - .send(AxumWsMessage::Text( + .send(AxumWsMessage::text( serde_json::to_string(&ServerMessage::Next { id: client_id.clone().unwrap(), payload: graphql::Response::builder().data(serde_json_bytes::json!({"userWasCreated": {"username": "ada_lovelace"}})).build() }).unwrap(), )) .await .unwrap(); socket - .send(AxumWsMessage::Text( + .send(AxumWsMessage::text( serde_json::to_string(&ServerMessage::KeepAlive).unwrap(), )) .await @@ -924,13 +917,12 @@ mod tests { }; let app = Router::new().route("/ws", get(ws_handler)); - let server = Server::bind( - &format!("127.0.0.1:{}", port.unwrap_or_default()) - .parse() - .unwrap(), - ) - .serve(app.into_make_service()); - let local_addr = server.local_addr(); + let listener = + tokio::net::TcpListener::bind(format!("127.0.0.1:{}", port.unwrap_or_default())) + .await + .unwrap(); + let server = axum::serve(listener, app); + let local_addr = server.local_addr().unwrap(); tokio::spawn(async { server.await.unwrap() }); local_addr } @@ -958,7 +950,7 @@ mod tests { let socket_addr = emulate_correct_websocket_server_new_protocol(send_ping, heartbeat_interval, port) .await; - let url = url::Url::parse(format!("ws://{}/ws", socket_addr).as_str()).unwrap(); + let url = format!("ws://{}/ws", socket_addr); let mut request = url.into_client_request().unwrap(); request.headers_mut().insert( http::header::SEC_WEBSOCKET_PROTOCOL, @@ -1025,7 +1017,7 @@ mod tests { async fn test_ws_connection_old_proto(send_ping: bool, port: Option) { let socket_addr = emulate_correct_websocket_server_old_protocol(send_ping, port).await; - let url = url::Url::parse(format!("ws://{}/ws", socket_addr).as_str()).unwrap(); + let url = format!("ws://{}/ws", socket_addr); let mut request = url.into_client_request().unwrap(); request.headers_mut().insert( http::header::SEC_WEBSOCKET_PROTOCOL, diff --git a/apollo-router/src/query_planner/execution.rs b/apollo-router/src/query_planner/execution.rs index 6ea1e1c280..84fb19bb95 100644 --- a/apollo-router/src/query_planner/execution.rs +++ b/apollo-router/src/query_planner/execution.rs @@ -6,6 +6,7 @@ use futures::prelude::*; use tokio::sync::broadcast; use tokio::sync::mpsc; use tokio_stream::wrappers::BroadcastStream; +use tower::ServiceExt; use tracing::Instrument; use super::log; @@ -22,7 +23,9 @@ use crate::json_ext::Path; use crate::json_ext::Value; use crate::json_ext::ValueExt; use crate::plugins::subscription::SubscriptionConfig; +use crate::query_planner::fetch::FetchNode; use crate::query_planner::fetch::SubgraphSchemas; +use crate::query_planner::fetch::Variables; use crate::query_planner::FlattenNode; use crate::query_planner::Primary; use crate::query_planner::CONDITION_ELSE_SPAN_NAME; @@ -31,12 +34,15 @@ use crate::query_planner::CONDITION_SPAN_NAME; use crate::query_planner::DEFER_DEFERRED_SPAN_NAME; use crate::query_planner::DEFER_PRIMARY_SPAN_NAME; use crate::query_planner::DEFER_SPAN_NAME; -use crate::query_planner::FETCH_SPAN_NAME; use crate::query_planner::FLATTEN_SPAN_NAME; use crate::query_planner::PARALLEL_SPAN_NAME; use crate::query_planner::SEQUENCE_SPAN_NAME; -use crate::query_planner::SUBSCRIBE_SPAN_NAME; -use crate::services::SubgraphServiceFactory; +use crate::services::fetch; +use crate::services::fetch::ErrorMapping; +use crate::services::fetch::SubscriptionRequest; +use crate::services::fetch_service::FetchServiceFactory; +use crate::services::new_service::ServiceFactory; +use crate::services::FetchRequest; use crate::spec::Query; use crate::spec::Schema; use crate::Context; @@ -47,7 +53,7 @@ impl QueryPlan { pub(crate) async fn execute<'a>( &self, context: &'a Context, - service_factory: &'a Arc, + service_factory: &'a Arc, supergraph_request: &'a Arc>, schema: &'a Arc, subgraph_schemas: &'a Arc, @@ -104,7 +110,7 @@ impl QueryPlan { // holds the query plan executon arguments that do not change between calls pub(crate) struct ExecutionParameters<'a> { pub(crate) context: &'a Context, - pub(crate) service_factory: &'a Arc, + pub(crate) service_factory: &'a Arc, pub(crate) schema: &'a Arc, pub(crate) subgraph_schemas: &'a Arc, pub(crate) supergraph_request: &'a Arc>, @@ -201,33 +207,63 @@ impl PlanNode { value = v; errors = err; } - PlanNode::Subscription { primary, .. } => { - if parameters.subscription_handle.is_some() { - let fetch_time_offset = - parameters.context.created_at.elapsed().as_nanos() as i64; - errors = primary - .execute_recursively(parameters, current_dir, parent_value, sender) - .instrument(tracing::info_span!( - SUBSCRIBE_SPAN_NAME, - "otel.kind" = "INTERNAL", - "apollo.subgraph.name" = primary.service_name.as_ref(), - "apollo_private.sent_time_offset" = fetch_time_offset - )) - .await; - } else { + PlanNode::Subscription { + primary: subscription_node, + .. + } => { + if parameters.subscription_handle.is_none() { tracing::error!("No subscription handle provided for a subscription"); + value = Value::default(); errors = vec![Error::builder() .message("no subscription handle provided for a subscription") .extension_code("NO_SUBSCRIPTION_HANDLE") .build()]; - }; - - value = Value::default(); + } else { + match Variables::new( + &[], + &subscription_node.variable_usages, + parent_value, + current_dir, + parameters.supergraph_request, + parameters.schema, + &subscription_node.input_rewrites, + &None, + ) { + Some(variables) => { + let service = parameters.service_factory.create(); + let request = fetch::Request::Subscription( + SubscriptionRequest::builder() + .context(parameters.context.clone()) + .subscription_node(subscription_node.clone()) + .supergraph_request(parameters.supergraph_request.clone()) + .variables(variables) + .current_dir(current_dir.clone()) + .sender(sender) + .and_subscription_handle( + parameters.subscription_handle.clone(), + ) + .and_subscription_config( + parameters.subscription_config.clone(), + ) + .build(), + ); + (value, errors) = + match service.oneshot(request).await.map_to_graphql_error( + subscription_node.service_name.to_string(), + current_dir, + ) { + Ok(r) => r, + Err(e) => (Value::default(), vec![e]), + }; + } + None => { + value = Value::Object(Object::default()); + errors = Vec::new(); + } + }; + } } PlanNode::Fetch(fetch_node) => { - let fetch_time_offset = - parameters.context.created_at.elapsed().as_nanos() as i64; - // The client closed the connection, we are still executing the request pipeline, // but we won't send unused trafic to subgraph if parameters @@ -238,17 +274,48 @@ impl PlanNode { value = Value::Object(Object::default()); errors = Vec::new(); } else { - let (v, e) = fetch_node - .fetch_node(parameters, parent_value, current_dir) - .instrument(tracing::info_span!( - FETCH_SPAN_NAME, - "otel.kind" = "INTERNAL", - "apollo.subgraph.name" = fetch_node.service_name.as_ref(), - "apollo_private.sent_time_offset" = fetch_time_offset - )) - .await; - value = v; - errors = e; + match Variables::new( + &fetch_node.requires, + &fetch_node.variable_usages, + parent_value, + current_dir, + parameters.supergraph_request, + parameters.schema.as_ref(), + &fetch_node.input_rewrites, + &fetch_node.context_rewrites, + ) { + Some(variables) => { + let service = parameters.service_factory.create(); + let request = fetch::Request::Fetch( + FetchRequest::builder() + .context(parameters.context.clone()) + .fetch_node(fetch_node.clone()) + .supergraph_request(parameters.supergraph_request.clone()) + .variables(variables) + .current_dir(current_dir.clone()) + .build(), + ); + (value, errors) = + match service.oneshot(request).await.map_to_graphql_error( + fetch_node.service_name.to_string(), + current_dir, + ) { + Ok(r) => r, + Err(e) => (Value::default(), vec![e]), + }; + FetchNode::deferred_fetches( + current_dir, + &fetch_node.id, + parameters.deferred_fetches, + &value, + &errors, + ); + } + None => { + value = Value::Object(Object::default()); + errors = Vec::new(); + } + }; } } PlanNode::Defer { diff --git a/apollo-router/src/query_planner/fetch.rs b/apollo-router/src/query_planner/fetch.rs index b8d64fa2c5..849c98b505 100644 --- a/apollo-router/src/query_planner/fetch.rs +++ b/apollo-router/src/query_planner/fetch.rs @@ -9,15 +9,16 @@ use apollo_compiler::Name; use indexmap::IndexSet; use serde::Deserialize; use serde::Serialize; +use serde_json_bytes::ByteString; +use serde_json_bytes::Map; +use tokio::sync::broadcast::Sender; use tower::ServiceExt; use tracing::instrument; use tracing::Instrument; -use super::execution::ExecutionParameters; use super::rewrites; use super::selection::execute_selection_set; use super::selection::Selection; -use super::subgraph_context::build_operation_with_aliasing; use super::subgraph_context::ContextualArguments; use super::subgraph_context::SubgraphContext; use crate::error::Error; @@ -25,7 +26,6 @@ use crate::error::FetchError; use crate::error::ValidationErrors; use crate::graphql; use crate::graphql::Request; -use crate::http_ext; use crate::json_ext; use crate::json_ext::Object; use crate::json_ext::Path; @@ -33,6 +33,8 @@ use crate::json_ext::Value; use crate::json_ext::ValueExt; use crate::plugins::authorization::AuthorizationPlugin; use crate::plugins::authorization::CacheKeyMetadata; +use crate::services::fetch::ErrorMapping; +use crate::services::subgraph::BoxService; use crate::services::SubgraphRequest; use crate::spec::query::change::QueryHashVisitor; use crate::spec::Schema; @@ -269,6 +271,7 @@ impl Display for QueryHash { } } +#[derive(Default)] pub(crate) struct Variables { pub(crate) variables: Object, pub(crate) inverted_paths: Vec>, @@ -278,7 +281,7 @@ pub(crate) struct Variables { impl Variables { #[instrument(skip_all, level = "debug", name = "make_variables")] #[allow(clippy::too_many_arguments)] - pub(super) fn new( + pub(crate) fn new( requires: &[Selection], variable_usages: &[Arc], data: &Value, @@ -372,176 +375,68 @@ impl Variables { impl FetchNode { #[allow(clippy::too_many_arguments)] - pub(crate) async fn fetch_node<'a>( - &'a self, - parameters: &'a ExecutionParameters<'a>, - data: &'a Value, - current_dir: &'a Path, + pub(crate) async fn subgraph_fetch( + &self, + service: BoxService, + subgraph_request: SubgraphRequest, + current_dir: &Path, + schema: &Schema, + paths: Vec>, + operation_str: &str, + variables: Map, ) -> (Value, Vec) { - let FetchNode { - operation, - operation_kind, - operation_name, - service_name, - .. - } = self; - - let Variables { - variables, - inverted_paths: paths, - contextual_arguments, - } = match Variables::new( - &self.requires, - &self.variable_usages, - data, - current_dir, - // Needs the original request here - parameters.supergraph_request, - parameters.schema, - &self.input_rewrites, - &self.context_rewrites, - ) { - Some(variables) => variables, - None => { - return (Value::Object(Object::default()), Vec::new()); - } - }; - - let alias_query_string; // this exists outside the if block to allow the as_str() to be longer lived - let aliased_operation = if let Some(ctx_arg) = contextual_arguments { - if let Some(subgraph_schema) = - parameters.subgraph_schemas.get(&service_name.to_string()) - { - match build_operation_with_aliasing(operation, &ctx_arg, &subgraph_schema.schema) { - Ok(op) => { - alias_query_string = op.serialize().no_indent().to_string(); - alias_query_string.as_str() - } - Err(errors) => { - tracing::debug!( - "couldn't generate a valid executable document? {:?}", - errors - ); - operation.as_serialized() - } - } - } else { - tracing::debug!( - "couldn't find a subgraph schema for service {:?}", - &service_name - ); - operation.as_serialized() - } - } else { - operation.as_serialized() - }; - - let mut subgraph_request = SubgraphRequest::builder() - .supergraph_request(parameters.supergraph_request.clone()) - .subgraph_request( - http_ext::Request::builder() - .method(http::Method::POST) - .uri( - parameters - .schema - .subgraph_url(service_name) - .unwrap_or_else(|| { - panic!( - "schema uri for subgraph '{service_name}' should already have been checked" - ) - }) - .clone(), - ) - .body( - Request::builder() - .query(aliased_operation) - .and_operation_name(operation_name.as_ref().map(|n| n.to_string())) - .variables(variables.clone()) - .build(), - ) - .build() - .expect("it won't fail because the url is correct and already checked; qed"), - ) - .subgraph_name(self.service_name.to_string()) - .operation_kind(*operation_kind) - .context(parameters.context.clone()) - .build(); - subgraph_request.query_hash = self.schema_aware_hash.clone(); - subgraph_request.authorization = self.authorization.clone(); - - let service = parameters - .service_factory - .create(service_name) - .expect("we already checked that the service exists during planning; qed"); - let (_parts, response) = match service .oneshot(subgraph_request) .instrument(tracing::trace_span!("subfetch_stream")) .await - // TODO this is a problem since it restores details about failed service - // when errors have been redacted in the include_subgraph_errors module. - // Unfortunately, not easy to fix here, because at this point we don't - // know if we should be redacting errors for this subgraph... - .map_err(|e| match e.downcast::() { - Ok(inner) => match *inner { - FetchError::SubrequestHttpError { .. } => *inner, - _ => FetchError::SubrequestHttpError { - status_code: None, - service: service_name.to_string(), - reason: inner.to_string(), - }, - }, - Err(e) => FetchError::SubrequestHttpError { - status_code: None, - service: service_name.to_string(), - reason: e.to_string(), - }, - }) { + .map_to_graphql_error(self.service_name.to_string(), current_dir) + { Err(e) => { - return ( - Value::default(), - vec![e.to_graphql_error(Some(current_dir.to_owned()))], - ); + return (Value::default(), vec![e]); } Ok(res) => res.response.into_parts(), }; - super::log::trace_subfetch( - service_name, - operation.as_serialized(), - &variables, - &response, - ); + super::log::trace_subfetch(&self.service_name, operation_str, &variables, &response); if !response.is_primary() { return ( Value::default(), vec![FetchError::SubrequestUnexpectedPatchResponse { - service: service_name.to_string(), + service: self.service_name.to_string(), } .to_graphql_error(Some(current_dir.to_owned()))], ); } - let (value, errors) = - self.response_at_path(parameters.schema, current_dir, paths, response); - if let Some(id) = &self.id { - if let Some(sender) = parameters.deferred_fetches.get(id.as_str()) { + let (value, errors) = self.response_at_path(schema, current_dir, paths, response); + + (value, errors) + } + + pub(crate) fn deferred_fetches( + current_dir: &Path, + id: &Option, + deferred_fetches: &std::collections::HashMap)>>, + value: &Value, + errors: &[Error], + ) { + if let Some(id) = id { + if let Some(sender) = deferred_fetches.get(id.as_str()) { u64_counter!( "apollo.router.operations.defer.fetch", "Number of deferred responses fetched from subgraphs", 1 ); - if let Err(e) = sender.clone().send((value.clone(), errors.clone())) { - tracing::error!("error sending fetch result at path {} and id {:?} for deferred response building: {}", current_dir, self.id, e); + if let Err(e) = sender.clone().send((value.clone(), Vec::from(errors))) { + tracing::error!("error sending fetch result at path {} and id {:?} for deferred response building: {}", current_dir, id, e); } } } - (value, errors) } #[instrument(skip_all, level = "debug", name = "response_insert")] - fn response_at_path<'a>( + pub(crate) fn response_at_path<'a>( &'a self, schema: &Schema, current_dir: &'a Path, diff --git a/apollo-router/src/query_planner/mod.rs b/apollo-router/src/query_planner/mod.rs index 13b1b36af2..3a739e1101 100644 --- a/apollo-router/src/query_planner/mod.rs +++ b/apollo-router/src/query_planner/mod.rs @@ -6,6 +6,7 @@ pub(crate) use caching_query_planner::*; pub use plan::QueryPlan; pub(crate) use plan::*; pub(crate) use query_planner_service::*; +pub(crate) use subgraph_context::build_operation_with_aliasing; pub use self::fetch::OperationKind; diff --git a/apollo-router/src/query_planner/plan.rs b/apollo-router/src/query_planner/plan.rs index 472f246b5f..9b2b2f295e 100644 --- a/apollo-router/src/query_planner/plan.rs +++ b/apollo-router/src/query_planner/plan.rs @@ -2,6 +2,7 @@ use std::sync::atomic::AtomicUsize; use std::sync::atomic::Ordering; use std::sync::Arc; +use apollo_compiler::collections::HashSet; use apollo_compiler::validation::Valid; use serde::Deserialize; use serde::Serialize; @@ -504,6 +505,51 @@ impl PlanNode { } } + /// A version of `service_usage` that doesn't use recursion + /// and returns a `HashSet` instead of an `Iterator`. + pub(crate) fn service_usage_set(&self) -> HashSet<&str> { + let mut services = HashSet::default(); + let mut stack = vec![self]; + while let Some(node) = stack.pop() { + match node { + Self::Sequence { nodes } | Self::Parallel { nodes } => { + stack.extend(nodes.iter()); + } + Self::Fetch(fetch) => { + services.insert(fetch.service_name.as_ref()); + } + Self::Subscription { primary, rest } => { + services.insert(primary.service_name.as_ref()); + if let Some(rest) = rest { + stack.push(rest); + } + } + Self::Flatten(flatten) => { + stack.push(&flatten.node); + } + Self::Defer { primary, deferred } => { + if let Some(primary) = primary.node.as_ref() { + stack.push(primary); + } + stack.extend(deferred.iter().flat_map(|d| d.node.as_deref())); + } + Self::Condition { + if_clause, + else_clause, + .. + } => { + if let Some(if_clause) = if_clause { + stack.push(if_clause); + } + if let Some(else_clause) = else_clause { + stack.push(else_clause); + } + } + } + } + services + } + pub(crate) fn extract_authorization_metadata( &mut self, schema: &Valid, diff --git a/apollo-router/src/query_planner/query_planner_service.rs b/apollo-router/src/query_planner/query_planner_service.rs index 8be6890a69..0b22fde199 100644 --- a/apollo-router/src/query_planner/query_planner_service.rs +++ b/apollo-router/src/query_planner/query_planner_service.rs @@ -14,9 +14,9 @@ use apollo_federation::error::SingleFederationError; use apollo_federation::query_plan::query_planner::QueryPlanOptions; use apollo_federation::query_plan::query_planner::QueryPlanner; use futures::future::BoxFuture; +use opentelemetry::KeyValue; use opentelemetry_api::metrics::MeterProvider as _; use opentelemetry_api::metrics::ObservableGauge; -use opentelemetry_api::KeyValue; use serde_json_bytes::Value; use tower::Service; diff --git a/apollo-router/src/query_planner/subscription.rs b/apollo-router/src/query_planner/subscription.rs index f390ba1723..32b2aa8c2b 100644 --- a/apollo-router/src/query_planner/subscription.rs +++ b/apollo-router/src/query_planner/subscription.rs @@ -1,30 +1,14 @@ use std::sync::atomic::AtomicUsize; -use std::sync::atomic::Ordering; use std::sync::Arc; -use futures::future; use serde::Deserialize; use serde::Serialize; -use serde_json_bytes::Value; use tokio::sync::broadcast; -use tokio::sync::mpsc; -use tower::ServiceExt; -use tracing_futures::Instrument; -use super::execution::ExecutionParameters; use super::fetch::SubgraphSchemas; -use super::fetch::Variables; use super::rewrites; use super::OperationKind; -use crate::error::FetchError; use crate::error::ValidationErrors; -use crate::graphql::Error; -use crate::graphql::Request; -use crate::graphql::Response; -use crate::http_ext; -use crate::json_ext::Path; -use crate::services::subgraph::BoxGqlStream; -use crate::services::SubgraphRequest; use crate::services::SubscriptionTaskParams; pub(crate) const SUBSCRIPTION_EVENT_SPAN_NAME: &str = "subscription_event"; @@ -81,200 +65,6 @@ pub(crate) struct SubscriptionNode { } impl SubscriptionNode { - pub(crate) fn execute_recursively<'a>( - &'a self, - parameters: &'a ExecutionParameters<'a>, - current_dir: &'a Path, - parent_value: &'a Value, - sender: tokio::sync::mpsc::Sender, - ) -> future::BoxFuture<'a, Vec> { - if parameters.subscription_handle.is_none() { - tracing::error!("No subscription handle provided for a subscription"); - return Box::pin(async { - vec![Error::builder() - .message("no subscription handle provided for a subscription") - .extension_code("NO_SUBSCRIPTION_HANDLE") - .build()] - }); - }; - if let Some(max_opened_subscriptions) = parameters - .subscription_config - .as_ref() - .and_then(|s| s.max_opened_subscriptions) - { - if OPENED_SUBSCRIPTIONS.load(Ordering::Relaxed) >= max_opened_subscriptions { - return Box::pin(async { - vec![Error::builder() - .message("can't open new subscription, limit reached") - .extension_code("SUBSCRIPTION_MAX_LIMIT") - .build()] - }); - } - } - let subscription_handle = parameters - .subscription_handle - .as_ref() - .expect("checked above; qed"); - let mode = match parameters.subscription_config.as_ref() { - Some(config) => config - .mode - .get_subgraph_config(&self.service_name) - .map(|mode| (config.clone(), mode)), - None => { - return Box::pin(async { - vec![Error::builder() - .message("subscription support is not enabled") - .extension_code("SUBSCRIPTION_DISABLED") - .build()] - }); - } - }; - - Box::pin(async move { - let mut subscription_handle = subscription_handle.clone(); - - match mode { - Some((subscription_config, _mode)) => { - let (tx_handle, rx_handle) = mpsc::channel::(1); - - let subscription_conf_tx = match subscription_handle.subscription_conf_tx.take() - { - Some(sc) => sc, - None => { - return vec![Error::builder() - .message("no subscription conf sender provided for a subscription") - .extension_code("NO_SUBSCRIPTION_CONF_TX") - .build()]; - } - }; - - let subs_params = SubscriptionTaskParams { - client_sender: sender, - subscription_handle, - subscription_config, - stream_rx: rx_handle.into(), - service_name: self.service_name.to_string(), - }; - - if let Err(err) = subscription_conf_tx.send(subs_params).await { - return vec![Error::builder() - .message(format!("cannot send the subscription data: {err:?}")) - .extension_code("SUBSCRIPTION_DATA_SEND_ERROR") - .build()]; - } - - match self - .subgraph_call(parameters, current_dir, parent_value, tx_handle) - .await - { - Ok(e) => e, - Err(err) => { - failfast_error!("subgraph call fetch error: {}", err); - vec![err.to_graphql_error(Some(current_dir.to_owned()))] - } - } - } - None => { - vec![Error::builder() - .message(format!( - "subscription mode is not configured for subgraph {:?}", - self.service_name - )) - .extension_code("INVALID_SUBSCRIPTION_MODE") - .build()] - } - } - }) - } - - pub(crate) async fn subgraph_call<'a>( - &'a self, - parameters: &'a ExecutionParameters<'a>, - current_dir: &'a Path, - data: &Value, - tx_gql: mpsc::Sender, - ) -> Result, FetchError> { - let SubscriptionNode { - operation, - operation_name, - service_name, - .. - } = self; - - let Variables { variables, .. } = match Variables::new( - &[], - &self.variable_usages, - data, - current_dir, - // Needs the original request here - parameters.supergraph_request, - parameters.schema, - &self.input_rewrites, - &None, - ) { - Some(variables) => variables, - None => { - return Ok(Vec::new()); - } - }; - - let subgraph_request = SubgraphRequest::builder() - .supergraph_request(parameters.supergraph_request.clone()) - .subgraph_request( - http_ext::Request::builder() - .method(http::Method::POST) - .uri( - parameters - .schema - .subgraph_url(service_name) - .unwrap_or_else(|| { - panic!( - "schema uri for subgraph '{service_name}' should already have been checked" - ) - }) - .clone(), - ) - .body( - Request::builder() - .query(operation.as_serialized()) - .and_operation_name(operation_name.as_ref().map(|n| n.to_string())) - .variables(variables.clone()) - .build(), - ) - .build() - .expect("it won't fail because the url is correct and already checked; qed"), - ) - .operation_kind(OperationKind::Subscription) - .context(parameters.context.clone()) - .subgraph_name(self.service_name.to_string()) - .subscription_stream(tx_gql) - .and_connection_closed_signal(parameters.subscription_handle.as_ref().map(|s| s.closed_signal.resubscribe())) - .build(); - - let service = parameters - .service_factory - .create(service_name) - .expect("we already checked that the service exists during planning; qed"); - - let (_parts, response) = service - .oneshot(subgraph_request) - .instrument(tracing::trace_span!("subscription_call")) - .await - // TODO this is a problem since it restores details about failed service - // when errors have been redacted in the include_subgraph_errors module. - // Unfortunately, not easy to fix here, because at this point we don't - // know if we should be redacting errors for this subgraph... - .map_err(|e| FetchError::SubrequestHttpError { - service: service_name.to_string(), - reason: e.to_string(), - status_code: None, - })? - .response - .into_parts(); - - Ok(response.errors) - } - pub(crate) fn init_parsed_operation( &mut self, subgraph_schemas: &SubgraphSchemas, diff --git a/apollo-router/src/query_planner/tests.rs b/apollo-router/src/query_planner/tests.rs index d7cfaa4e36..4494f442bf 100644 --- a/apollo-router/src/query_planner/tests.rs +++ b/apollo-router/src/query_planner/tests.rs @@ -26,6 +26,8 @@ use crate::plugin::test::MockSubgraph; use crate::query_planner; use crate::query_planner::fetch::FetchNode; use crate::query_planner::fetch::SubgraphOperation; +use crate::services::connector_service::ConnectorServiceFactory; +use crate::services::fetch_service::FetchServiceFactory; use crate::services::subgraph_service::MakeSubgraphService; use crate::services::supergraph; use crate::services::SubgraphResponse; @@ -103,20 +105,28 @@ async fn mock_subgraph_service_withf_panics_should_be_reported_as_service_closed }); let (sender, _) = tokio::sync::mpsc::channel(10); - let sf = Arc::new(SubgraphServiceFactory { - services: Arc::new(HashMap::from([( - "product".into(), - Arc::new(mock_products_service) as Arc, - )])), - plugins: Default::default(), - }); + + let schema = Arc::new(Schema::parse(test_schema!(), &Default::default()).unwrap()); + let sf = Arc::new(FetchServiceFactory::new( + schema.clone(), + Default::default(), + Arc::new(SubgraphServiceFactory { + services: Arc::new(HashMap::from([( + "product".into(), + Arc::new(mock_products_service) as Arc, + )])), + plugins: Default::default(), + }), + None, + Arc::new(ConnectorServiceFactory::empty(schema.clone())), + )); let result = query_plan .execute( &Context::new(), &sf, &Default::default(), - &Arc::new(Schema::parse(test_schema!(), &Default::default()).unwrap()), + &schema, &Default::default(), sender, None, @@ -167,20 +177,27 @@ async fn fetch_includes_operation_name() { let (sender, _) = tokio::sync::mpsc::channel(10); - let sf = Arc::new(SubgraphServiceFactory { - services: Arc::new(HashMap::from([( - "product".into(), - Arc::new(mock_products_service) as Arc, - )])), - plugins: Default::default(), - }); + let schema = Arc::new(Schema::parse(test_schema!(), &Default::default()).unwrap()); + let sf = Arc::new(FetchServiceFactory::new( + schema.clone(), + Default::default(), + Arc::new(SubgraphServiceFactory { + services: Arc::new(HashMap::from([( + "product".into(), + Arc::new(mock_products_service) as Arc, + )])), + plugins: Default::default(), + }), + None, + Arc::new(ConnectorServiceFactory::empty(schema.clone())), + )); let _response = query_plan .execute( &Context::new(), &sf, &Default::default(), - &Arc::new(Schema::parse(test_schema!(), &Default::default()).unwrap()), + &schema, &Default::default(), sender, None, @@ -228,20 +245,27 @@ async fn fetch_makes_post_requests() { let (sender, _) = tokio::sync::mpsc::channel(10); - let sf = Arc::new(SubgraphServiceFactory { - services: Arc::new(HashMap::from([( - "product".into(), - Arc::new(mock_products_service) as Arc, - )])), - plugins: Default::default(), - }); + let schema = Arc::new(Schema::parse(test_schema!(), &Default::default()).unwrap()); + let sf = Arc::new(FetchServiceFactory::new( + schema.clone(), + Default::default(), + Arc::new(SubgraphServiceFactory { + services: Arc::new(HashMap::from([( + "product".into(), + Arc::new(mock_products_service) as Arc, + )])), + plugins: Default::default(), + }), + None, + Arc::new(ConnectorServiceFactory::empty(schema.clone())), + )); let _response = query_plan .execute( &Context::new(), &sf, &Default::default(), - &Arc::new(Schema::parse(test_schema!(), &Default::default()).unwrap()), + &schema, &Default::default(), sender, None, @@ -377,19 +401,25 @@ async fn defer() { let schema = include_str!("testdata/defer_schema.graphql"); let schema = Arc::new(Schema::parse(schema, &Default::default()).unwrap()); - let sf = Arc::new(SubgraphServiceFactory { - services: Arc::new(HashMap::from([ - ( - "X".into(), - Arc::new(mock_x_service) as Arc, - ), - ( - "Y".into(), - Arc::new(mock_y_service) as Arc, - ), - ])), - plugins: Default::default(), - }); + let sf = Arc::new(FetchServiceFactory::new( + schema.clone(), + Default::default(), + Arc::new(SubgraphServiceFactory { + services: Arc::new(HashMap::from([ + ( + "X".into(), + Arc::new(mock_x_service) as Arc, + ), + ( + "Y".into(), + Arc::new(mock_y_service) as Arc, + ), + ])), + plugins: Default::default(), + }), + None, + Arc::new(ConnectorServiceFactory::empty(schema.clone())), + )); let response = query_plan .execute( @@ -486,13 +516,20 @@ async fn defer_if_condition() { let (sender, receiver) = tokio::sync::mpsc::channel(10); let mut receiver_stream = ReceiverStream::new(receiver); - let service_factory = Arc::new(SubgraphServiceFactory { - services: Arc::new(HashMap::from([( - "accounts".into(), - Arc::new(mocked_accounts) as Arc, - )])), - plugins: Default::default(), - }); + let service_factory = Arc::new(FetchServiceFactory::new( + schema.clone(), + Default::default(), + Arc::new(SubgraphServiceFactory { + services: Arc::new(HashMap::from([( + "accounts".into(), + Arc::new(mocked_accounts) as Arc, + )])), + plugins: Default::default(), + }), + None, + Arc::new(ConnectorServiceFactory::empty(schema.clone())), + )); + let defer_primary_response = query_plan .execute( &Context::new(), @@ -638,19 +675,26 @@ async fn dependent_mutations() { let mut mock_b_service = plugin::test::MockSubgraphService::new(); mock_b_service.expect_call().never(); - let sf = Arc::new(SubgraphServiceFactory { - services: Arc::new(HashMap::from([ - ( - "A".into(), - Arc::new(mock_a_service) as Arc, - ), - ( - "B".into(), - Arc::new(mock_b_service) as Arc, - ), - ])), - plugins: Default::default(), - }); + let schema = Arc::new(Schema::parse(schema, &Default::default()).unwrap()); + let sf = Arc::new(FetchServiceFactory::new( + schema.clone(), + Default::default(), + Arc::new(SubgraphServiceFactory { + services: Arc::new(HashMap::from([ + ( + "A".into(), + Arc::new(mock_a_service) as Arc, + ), + ( + "B".into(), + Arc::new(mock_b_service) as Arc, + ), + ])), + plugins: Default::default(), + }), + None, + Arc::new(ConnectorServiceFactory::empty(schema.clone())), + )); let (sender, _) = tokio::sync::mpsc::channel(10); let _response = query_plan @@ -658,7 +702,7 @@ async fn dependent_mutations() { &Context::new(), &sf, &Default::default(), - &Arc::new(Schema::parse(schema, &Default::default()).unwrap()), + &schema, &Default::default(), sender, None, @@ -676,56 +720,56 @@ async fn alias_renaming() { { query: Query } - + directive @join__enumValue(graph: join__Graph!) repeatable on ENUM_VALUE - + directive @join__field(graph: join__Graph, requires: join__FieldSet, provides: join__FieldSet, type: String, external: Boolean, override: String, usedOverridden: Boolean) repeatable on FIELD_DEFINITION | INPUT_FIELD_DEFINITION - + directive @join__graph(name: String!, url: String!) on ENUM_VALUE - + directive @join__implements(graph: join__Graph!, interface: String!) repeatable on OBJECT | INTERFACE - + directive @join__type(graph: join__Graph!, key: join__FieldSet, extension: Boolean! = false, resolvable: Boolean! = true, isInterfaceObject: Boolean! = false) repeatable on OBJECT | INTERFACE | UNION | ENUM | INPUT_OBJECT | SCALAR - + directive @join__unionMember(graph: join__Graph!, member: String!) repeatable on UNION - + directive @link(url: String, as: String, for: link__Purpose, import: [link__Import]) repeatable on SCHEMA - + interface I @join__type(graph: S1) @join__type(graph: S2) { id: String! } - + scalar join__FieldSet - + enum join__Graph { S1 @join__graph(name: "S1", url: "http://localhost/s1") S2 @join__graph(name: "S2", url: "http://localhost/s2") } - + scalar link__Import - + enum link__Purpose { """ `SECURITY` features provide metadata necessary to securely resolve fields. """ SECURITY - + """ `EXECUTION` features provide metadata necessary for operation execution. """ EXECUTION } - + type Query @join__type(graph: S1) @join__type(graph: S2) { testQuery(id: String!): I @join__field(graph: S1) } - + type T1 implements I @join__implements(graph: S1, interface: "I") @join__implements(graph: S2, interface: "I") @@ -735,7 +779,7 @@ async fn alias_renaming() { id: String! foo: Test @join__field(graph: S2) } - + type T2 implements I @join__implements(graph: S1, interface: "I") @join__implements(graph: S2, interface: "I") @@ -745,7 +789,7 @@ async fn alias_renaming() { id: String! bar: Test @join__field(graph: S2) } - + type Test @join__type(graph: S2) { @@ -876,56 +920,56 @@ async fn missing_fields_in_requires() { { query: Query } - + directive @join__enumValue(graph: join__Graph!) repeatable on ENUM_VALUE - + directive @join__field(graph: join__Graph, requires: join__FieldSet, provides: join__FieldSet, type: String, external: Boolean, override: String, usedOverridden: Boolean) repeatable on FIELD_DEFINITION | INPUT_FIELD_DEFINITION - + directive @join__graph(name: String!, url: String!) on ENUM_VALUE - + directive @join__implements(graph: join__Graph!, interface: String!) repeatable on OBJECT | INTERFACE - + directive @join__type(graph: join__Graph!, key: join__FieldSet, extension: Boolean! = false, resolvable: Boolean! = true, isInterfaceObject: Boolean! = false) repeatable on OBJECT | INTERFACE | UNION | ENUM | INPUT_OBJECT | SCALAR - + directive @join__unionMember(graph: join__Graph!, member: String!) repeatable on UNION - + directive @link(url: String, as: String, for: link__Purpose, import: [link__Import]) repeatable on SCHEMA - + type Details @join__type(graph: SUB1) @join__type(graph: SUB2) { enabled: Boolean } - + scalar join__FieldSet - + enum join__Graph { SUB1 @join__graph(name: "sub1", url: "http://localhost:4002/test") SUB2 @join__graph(name: "sub2", url: "http://localhost:4002/test2") } - + scalar link__Import - + enum link__Purpose { """ `SECURITY` features provide metadata necessary to securely resolve fields. """ SECURITY - + """ `EXECUTION` features provide metadata necessary for operation execution. """ EXECUTION } - + type Query @join__type(graph: SUB1) @join__type(graph: SUB2) { stuff: Stuff @join__field(graph: SUB1) } - + type Stuff @join__type(graph: SUB1, key: "id") @join__type(graph: SUB2, key: "id", extension: true) @@ -1019,49 +1063,49 @@ async fn missing_typename_and_fragments_in_requires() { { query: Query } - + directive @join__enumValue(graph: join__Graph!) repeatable on ENUM_VALUE - + directive @join__field(graph: join__Graph, requires: join__FieldSet, provides: join__FieldSet, type: String, external: Boolean, override: String, usedOverridden: Boolean) repeatable on FIELD_DEFINITION | INPUT_FIELD_DEFINITION - + directive @join__graph(name: String!, url: String!) on ENUM_VALUE - + directive @join__implements(graph: join__Graph!, interface: String!) repeatable on OBJECT | INTERFACE - + directive @join__type(graph: join__Graph!, key: join__FieldSet, extension: Boolean! = false, resolvable: Boolean! = true, isInterfaceObject: Boolean! = false) repeatable on OBJECT | INTERFACE | UNION | ENUM | INPUT_OBJECT | SCALAR - + directive @join__unionMember(graph: join__Graph!, member: String!) repeatable on UNION - + directive @link(url: String, as: String, for: link__Purpose, import: [link__Import]) repeatable on SCHEMA - + scalar join__FieldSet - + enum join__Graph { SUB1 @join__graph(name: "sub1", url: "http://localhost:4002/test") SUB2 @join__graph(name: "sub2", url: "http://localhost:4002/test2") } - + scalar link__Import - + enum link__Purpose { """ `SECURITY` features provide metadata necessary to securely resolve fields. """ SECURITY - + """ `EXECUTION` features provide metadata necessary for operation execution. """ EXECUTION } - + type Query @join__type(graph: SUB1) @join__type(graph: SUB2) { stuff: Stuff @join__field(graph: SUB1) } - + type Stuff @join__type(graph: SUB1, key: "id") @join__type(graph: SUB2, key: "id", extension: true) @@ -1070,7 +1114,7 @@ async fn missing_typename_and_fragments_in_requires() { thing: Thing isEnabled: Boolean @join__field(graph: SUB2, requires: "thing { ... on Thing { text } }") } - + type Thing @join__type(graph: SUB1, key: "id") @join__type(graph: SUB2, key: "id") { @@ -1155,58 +1199,58 @@ async fn missing_typename_and_fragments_in_requires2() { { query: Query } - + directive @join__enumValue(graph: join__Graph!) repeatable on ENUM_VALUE - + directive @join__field(graph: join__Graph, requires: join__FieldSet, provides: join__FieldSet, type: String, external: Boolean, override: String, usedOverridden: Boolean) repeatable on FIELD_DEFINITION | INPUT_FIELD_DEFINITION - + directive @join__graph(name: String!, url: String!) on ENUM_VALUE - + directive @join__implements(graph: join__Graph!, interface: String!) repeatable on OBJECT | INTERFACE - + directive @join__type(graph: join__Graph!, key: join__FieldSet, extension: Boolean! = false, resolvable: Boolean! = true, isInterfaceObject: Boolean! = false) repeatable on OBJECT | INTERFACE | UNION | ENUM | INPUT_OBJECT | SCALAR - + directive @join__unionMember(graph: join__Graph!, member: String!) repeatable on UNION - + directive @link(url: String, as: String, for: link__Purpose, import: [link__Import]) repeatable on SCHEMA - + scalar join__FieldSet - + enum join__Graph { SUB1 @join__graph(name: "sub1", url: "http://localhost:4002/test") SUB2 @join__graph(name: "sub2", url: "http://localhost:4002/test2") } - + scalar link__Import - + enum link__Purpose { """ `SECURITY` features provide metadata necessary to securely resolve fields. """ SECURITY - + """ `EXECUTION` features provide metadata necessary for operation execution. """ EXECUTION } - + type Query @join__type(graph: SUB1) @join__type(graph: SUB2) { stuff: Stuff @join__field(graph: SUB1) } - + type Stuff @join__type(graph: SUB1, key: "id") @join__type(graph: SUB2, key: "id", extension: true) { id: ID - thing: PossibleThing @join__field(graph: SUB1) @join__field(graph: SUB2, external: true) + thing: PossibleThing @join__field(graph: SUB1) @join__field(graph: SUB2, external: true) isEnabled: Boolean @join__field(graph: SUB2, requires: "thing { ... on Thing1 { __typename text1 } ... on Thing2 { __typename text2 } }") } - + union PossibleThing @join__type(graph: SUB1) @join__type(graph: SUB2) @join__unionMember(graph: SUB1, member: "Thing1") @join__unionMember(graph: SUB1, member: "Thing2") @join__unionMember(graph: SUB2, member: "Thing1") @join__unionMember(graph: SUB2, member: "Thing2") @@ -1309,49 +1353,49 @@ async fn null_in_requires() { { query: Query } - + directive @join__enumValue(graph: join__Graph!) repeatable on ENUM_VALUE - + directive @join__field(graph: join__Graph, requires: join__FieldSet, provides: join__FieldSet, type: String, external: Boolean, override: String, usedOverridden: Boolean) repeatable on FIELD_DEFINITION | INPUT_FIELD_DEFINITION - + directive @join__graph(name: String!, url: String!) on ENUM_VALUE - + directive @join__implements(graph: join__Graph!, interface: String!) repeatable on OBJECT | INTERFACE - + directive @join__type(graph: join__Graph!, key: join__FieldSet, extension: Boolean! = false, resolvable: Boolean! = true, isInterfaceObject: Boolean! = false) repeatable on OBJECT | INTERFACE | UNION | ENUM | INPUT_OBJECT | SCALAR - + directive @join__unionMember(graph: join__Graph!, member: String!) repeatable on UNION - + directive @link(url: String, as: String, for: link__Purpose, import: [link__Import]) repeatable on SCHEMA - + scalar join__FieldSet - + enum join__Graph { SUB1 @join__graph(name: "sub1", url: "http://localhost:4002/test") SUB2 @join__graph(name: "sub2", url: "http://localhost:4002/test2") } - + scalar link__Import - + enum link__Purpose { """ `SECURITY` features provide metadata necessary to securely resolve fields. """ SECURITY - + """ `EXECUTION` features provide metadata necessary for operation execution. """ EXECUTION } - + type Query @join__type(graph: SUB1) @join__type(graph: SUB2) { stuff: Stuff @join__field(graph: SUB1) } - + type Stuff @join__type(graph: SUB1, key: "id") @join__type(graph: SUB2, key: "id", extension: true) @@ -1360,7 +1404,7 @@ async fn null_in_requires() { thing: Thing isEnabled: Boolean @join__field(graph: SUB2, requires: "thing { a text }") } - + type Thing @join__type(graph: SUB1, key: "id") @join__type(graph: SUB2, key: "id") { diff --git a/apollo-router/src/router_factory.rs b/apollo-router/src/router_factory.rs index 49b792e083..b3cfebdba7 100644 --- a/apollo-router/src/router_factory.rs +++ b/apollo-router/src/router_factory.rs @@ -7,12 +7,12 @@ use axum::response::IntoResponse; use http::StatusCode; use indexmap::IndexMap; use multimap::MultiMap; +use rustls::pki_types::CertificateDer; use rustls::RootCertStore; use serde_json::Map; use serde_json::Value; use tower::service_fn; use tower::BoxError; -use tower::ServiceBuilder; use tower::ServiceExt; use tower_service::Service; use tracing::Instrument; @@ -40,7 +40,6 @@ use crate::services::new_service::ServiceFactory; use crate::services::router; use crate::services::router::service::RouterCreator; use crate::services::subgraph; -use crate::services::transport; use crate::services::HasConfig; use crate::services::HasSchema; use crate::services::PluggableSupergraphServiceBuilder; @@ -70,21 +69,6 @@ impl std::fmt::Debug for Endpoint { } impl Endpoint { - /// Creates an Endpoint given a path and a Boxed Service - #[deprecated = "use `from_router_service` instead"] - #[allow(deprecated)] - pub fn new(path: String, handler: transport::BoxService) -> Self { - let router_service = ServiceBuilder::new() - .map_request(|request: router::Request| request.router_request) - .map_response(|response: transport::Response| response.into()) - .service(handler) - .boxed(); - Self { - path, - handler: Handler::new(router_service), - } - } - /// Creates an Endpoint given a path and a Boxed Service pub fn from_router_service(path: String, handler: router::BoxService) -> Self { Self { @@ -92,8 +76,9 @@ impl Endpoint { handler: Handler::new(handler), } } + pub(crate) fn into_router(self) -> axum::Router { - let handler = move |req: http::Request| { + let handler = move |req: http::Request| { let endpoint = self.handler.clone(); async move { Ok(endpoint @@ -351,8 +336,11 @@ impl YamlRouterFactory { async { let mut builder = PluggableSupergraphServiceBuilder::new(planner); builder = builder.with_configuration(configuration.clone()); + let http_service_factory = + create_http_services(&plugins, &schema, &configuration).await?; let subgraph_services = - create_subgraph_services(&plugins, &schema, &configuration).await?; + create_subgraph_services(&http_service_factory, &plugins, &configuration).await?; + builder = builder.with_http_service_factory(http_service_factory); for (name, subgraph_service) in subgraph_services { builder = builder.with_subgraph_service(&name, subgraph_service); } @@ -368,8 +356,8 @@ impl YamlRouterFactory { } pub(crate) async fn create_subgraph_services( + http_service_factory: &IndexMap, plugins: &Arc, - schema: &Schema, configuration: &Configuration, ) -> Result< IndexMap< @@ -388,6 +376,40 @@ pub(crate) async fn create_subgraph_services( >, BoxError, > { + let subscription_plugin_conf = plugins + .iter() + .find(|i| i.0.as_str() == APOLLO_SUBSCRIPTION_PLUGIN) + .and_then(|plugin| (*plugin.1).as_any().downcast_ref::()) + .map(|p| p.config.clone()); + + let shaping = plugins + .iter() + .find(|i| i.0.as_str() == APOLLO_TRAFFIC_SHAPING) + .and_then(|plugin| (*plugin.1).as_any().downcast_ref::()) + .expect("traffic shaping should always be part of the plugin list"); + + let mut subgraph_services = IndexMap::default(); + for (name, http_service_factory) in http_service_factory.iter() { + let subgraph_service = shaping.subgraph_service_internal( + name.as_ref(), + SubgraphService::from_config( + name.clone(), + configuration, + subscription_plugin_conf.clone(), + http_service_factory.clone(), + )?, + ); + subgraph_services.insert(name.clone(), subgraph_service); + } + + Ok(subgraph_services) +} + +pub(crate) async fn create_http_services( + plugins: &Arc, + schema: &Schema, + configuration: &Configuration, +) -> Result, BoxError> { let tls_root_store: RootCertStore = configuration .tls .subgraph @@ -396,19 +418,13 @@ pub(crate) async fn create_subgraph_services( .transpose()? .unwrap_or_else(crate::services::http::HttpClientService::native_roots_store); - let subscription_plugin_conf = plugins - .iter() - .find(|i| i.0.as_str() == APOLLO_SUBSCRIPTION_PLUGIN) - .and_then(|plugin| (*plugin.1).as_any().downcast_ref::()) - .map(|p| p.config.clone()); - let shaping = plugins .iter() .find(|i| i.0.as_str() == APOLLO_TRAFFIC_SHAPING) .and_then(|plugin| (*plugin.1).as_any().downcast_ref::()) .expect("traffic shaping should always be part of the plugin list"); - let mut subgraph_services = IndexMap::default(); + let mut http_services = IndexMap::new(); for (name, _) in schema.subgraphs() { let http_service = crate::services::http::HttpClientService::from_config( name, @@ -418,20 +434,9 @@ pub(crate) async fn create_subgraph_services( )?; let http_service_factory = HttpClientServiceFactory::new(http_service, plugins.clone()); - - let subgraph_service = shaping.subgraph_service_internal( - name, - SubgraphService::from_config( - name, - configuration, - subscription_plugin_conf.clone(), - http_service_factory, - )?, - ); - subgraph_services.insert(name.clone(), subgraph_service); + http_services.insert(name.clone(), http_service_factory); } - - Ok(subgraph_services) + Ok(http_services) } impl TlsClient { @@ -455,7 +460,7 @@ pub(crate) fn create_certificate_store( })?; for certificate in certificates { store - .add(&certificate) + .add(certificate) .map_err(|e| ConfigurationError::CertificateAuthorities { error: format!("could not add certificate to root store: {e}"), })?; @@ -469,17 +474,20 @@ pub(crate) fn create_certificate_store( } } -fn load_certs(certificates: &str) -> io::Result> { +fn load_certs(certificates: &str) -> io::Result>> { tracing::debug!("loading root certificates"); // Load and return certificate. - let certs = rustls_pemfile::certs(&mut certificates.as_bytes()).map_err(|_| { - io::Error::new( - io::ErrorKind::Other, - "failed to load certificate".to_string(), - ) - })?; - Ok(certs.into_iter().map(rustls::Certificate).collect()) + rustls_pemfile::certs(&mut certificates.as_bytes()) + .collect::, _>>() + // XXX(@goto-bus-stop): the error type here is already io::Error. Should we wrap it, + // instead of replacing it with this generic error message? + .map_err(|_| { + io::Error::new( + io::ErrorKind::Other, + "failed to load certificate".to_string(), + ) + }) } /// test only helper method to create a router factory in integration tests @@ -687,6 +695,7 @@ pub(crate) async fn create_plugins( add_optional_apollo_plugin!("demand_control"); // This relative ordering is documented in `docs/source/customizations/native.mdx`: + add_optional_apollo_plugin!("preview_connectors"); add_optional_apollo_plugin!("rhai"); add_optional_apollo_plugin!("coprocessor"); add_user_plugins!(); diff --git a/apollo-router/src/services/connect.rs b/apollo-router/src/services/connect.rs new file mode 100644 index 0000000000..40b1f990d1 --- /dev/null +++ b/apollo-router/src/services/connect.rs @@ -0,0 +1,54 @@ +//! Connect service request and response types. + +use std::sync::Arc; + +use apollo_compiler::validation::Valid; +use apollo_compiler::ExecutableDocument; +use static_assertions::assert_impl_all; +use tower::BoxError; + +use crate::graphql; +use crate::graphql::Request as GraphQLRequest; +use crate::query_planner::fetch::Variables; +use crate::Context; + +pub(crate) type BoxService = tower::util::BoxService; + +#[non_exhaustive] +pub(crate) struct Request { + pub(crate) service_name: Arc, + pub(crate) context: Context, + pub(crate) operation: Arc>, + pub(crate) supergraph_request: Arc>, + pub(crate) variables: Variables, +} + +assert_impl_all!(Response: Send); +#[derive(Debug)] +#[non_exhaustive] +pub(crate) struct Response { + pub(crate) response: http::Response, +} + +#[buildstructor::buildstructor] +impl Request { + /// This is the constructor (or builder) to use when constructing a real Request. + /// + /// Required parameters are required in non-testing code to create a Request. + #[builder(visibility = "pub")] + fn new( + service_name: Arc, + context: Context, + operation: Arc>, + supergraph_request: Arc>, + variables: Variables, + ) -> Self { + Self { + service_name, + context, + operation, + supergraph_request, + variables, + } + } +} diff --git a/apollo-router/src/services/connector_service.rs b/apollo-router/src/services/connector_service.rs new file mode 100644 index 0000000000..4ad2e19d33 --- /dev/null +++ b/apollo-router/src/services/connector_service.rs @@ -0,0 +1,367 @@ +//! Tower service for connectors. + +use std::str::FromStr; +use std::sync::Arc; +use std::task::Poll; + +use apollo_federation::sources::connect::Connector; +use futures::future::BoxFuture; +use indexmap::IndexMap; +use opentelemetry::Key; +use opentelemetry_api::metrics::ObservableGauge; +use parking_lot::Mutex; +use serde::Deserialize; +use serde::Serialize; +use tower::BoxError; +use tower::ServiceExt; +use tracing::error; +use tracing::Instrument; + +use super::connect::BoxService; +use super::http::HttpClientServiceFactory; +use super::http::HttpRequest; +use super::new_service::ServiceFactory; +use crate::error::FetchError; +use crate::plugins::connectors::error::Error as ConnectorError; +use crate::plugins::connectors::handle_responses::aggregate_responses; +use crate::plugins::connectors::handle_responses::process_response; +use crate::plugins::connectors::http::Request; +use crate::plugins::connectors::http::Response as ConnectorResponse; +use crate::plugins::connectors::http::Result as ConnectorResult; +use crate::plugins::connectors::make_requests::make_requests; +use crate::plugins::connectors::plugin::debug::ConnectorContext; +use crate::plugins::connectors::request_limit::RequestLimits; +use crate::plugins::connectors::tracing::connect_spec_version_instrument; +use crate::plugins::connectors::tracing::CONNECTOR_TYPE_HTTP; +use crate::plugins::subscription::SubscriptionConfig; +use crate::plugins::telemetry::consts::CONNECT_SPAN_NAME; +use crate::query_planner::fetch::SubgraphSchemas; +use crate::services::router::body::RouterBody; +use crate::services::ConnectRequest; +use crate::services::ConnectResponse; +use crate::spec::Schema; + +pub(crate) const APOLLO_CONNECTOR_TYPE: Key = Key::from_static_str("apollo.connector.type"); +pub(crate) const APOLLO_CONNECTOR_DETAIL: Key = Key::from_static_str("apollo.connector.detail"); +pub(crate) const APOLLO_CONNECTOR_SELECTION: Key = + Key::from_static_str("apollo.connector.selection"); +pub(crate) const APOLLO_CONNECTOR_FIELD_NAME: Key = + Key::from_static_str("apollo.connector.field.name"); +pub(crate) const APOLLO_CONNECTOR_FIELD_ALIAS: Key = + Key::from_static_str("apollo.connector.field.alias"); +pub(crate) const APOLLO_CONNECTOR_FIELD_RETURN_TYPE: Key = + Key::from_static_str("apollo.connector.field.return_type"); +pub(crate) const APOLLO_CONNECTOR_SOURCE_NAME: Key = + Key::from_static_str("apollo.connector.source.name"); +pub(crate) const APOLLO_CONNECTOR_SOURCE_DETAIL: Key = + Key::from_static_str("apollo.connector.source.detail"); +pub(crate) const CONNECTOR_INFO_CONTEXT_KEY: &str = "apollo_router::connector::info"; + +/// A service for executing connector requests. +#[derive(Clone)] +pub(crate) struct ConnectorService { + pub(crate) http_service_factory: Arc>, + pub(crate) _schema: Arc, + pub(crate) _subgraph_schemas: Arc, + pub(crate) _subscription_config: Option, + pub(crate) connectors_by_service_name: Arc, Connector>>, +} + +/// Serializable information about a connector. +#[derive(Clone, Debug, Serialize, Deserialize)] +pub(crate) struct ConnectorInfo { + pub(crate) subgraph_name: String, + pub(crate) source_name: Option, + pub(crate) http_method: String, + pub(crate) url_template: String, +} + +impl From<&Connector> for ConnectorInfo { + fn from(connector: &Connector) -> Self { + Self { + subgraph_name: connector.id.subgraph_name.to_string(), + source_name: connector.id.source_name.clone(), + http_method: connector.transport.method.as_str().to_string(), + url_template: connector.transport.connect_template.to_string(), + } + } +} + +/// A reference to a unique Connector source. +#[derive(Hash, Eq, PartialEq, Clone, Serialize, Deserialize)] +pub(crate) struct ConnectorSourceRef { + pub(crate) subgraph_name: String, + pub(crate) source_name: String, +} + +impl ConnectorSourceRef { + pub(crate) fn new(subgraph_name: String, source_name: String) -> Self { + Self { + subgraph_name, + source_name, + } + } +} + +impl FromStr for ConnectorSourceRef { + type Err = String; + + fn from_str(s: &str) -> Result { + let mut parts = s.split('.'); + let subgraph_name = parts + .next() + .ok_or(format!("Invalid connector source reference '{}'", s))? + .to_string(); + let source_name = parts + .next() + .ok_or(format!("Invalid connector source reference '{}'", s))? + .to_string(); + Ok(Self::new(subgraph_name, source_name)) + } +} + +impl TryFrom<&Connector> for ConnectorSourceRef { + type Error = (); + + fn try_from(value: &Connector) -> Result { + Ok(Self { + subgraph_name: value.id.subgraph_name.to_string(), + source_name: value.id.source_name.clone().ok_or(())?, + }) + } +} + +impl tower::Service for ConnectorService { + type Response = ConnectResponse; + type Error = BoxError; + type Future = BoxFuture<'static, Result>; + + fn poll_ready(&mut self, _cx: &mut std::task::Context<'_>) -> Poll> { + Poll::Ready(Ok(())) + } + + fn call(&mut self, request: ConnectRequest) -> Self::Future { + let connector = self + .connectors_by_service_name + .get(&request.service_name) + .cloned(); + + let http_client_factory = self + .http_service_factory + .get(&request.service_name.to_string()) + .cloned(); + + Box::pin(async move { + let Some(connector) = connector else { + return Err("no connector found".into()); + }; + + let Some(http_client_factory) = http_client_factory else { + return Err("no http client found".into()); + }; + let fetch_time_offset = request.context.created_at.elapsed().as_nanos() as i64; + let span = tracing::info_span!( + CONNECT_SPAN_NAME, + "otel.kind" = "INTERNAL", + "apollo.connector.type" = CONNECTOR_TYPE_HTTP, + "apollo.connector.detail" = tracing::field::Empty, + "apollo.connector.field.name" = %connector.field_name(), + "apollo.connector.selection" = %connector.selection, + "apollo.connector.source.name" = tracing::field::Empty, + "apollo.connector.source.detail" = tracing::field::Empty, + "apollo_private.sent_time_offset" = fetch_time_offset, + "otel.status_code" = tracing::field::Empty, + ); + // TODO: I think we should get rid of these attributes by default and only add it from custom telemetry. We just need to double check it's not required for Studio. + + // These additional attributes will be added to custom telemetry feature + // TODO: apollo.connector.field.alias + // TODO: apollo.connector.field.return_type + // TODO: apollo.connector.field.selection_set + let transport = &connector.transport; + if let Ok(detail) = serde_json::to_string( + &serde_json::json!({ transport.method.as_str(): transport.connect_template.to_string() }), + ) { + span.record("apollo.connector.detail", detail); + } + if let Some(source_name) = connector.id.source_name.as_ref() { + span.record("apollo.connector.source.name", source_name); + if let Ok(detail) = + serde_json::to_string(&serde_json::json!({ "baseURL": transport.source_url })) + { + span.record("apollo.connector.source.detail", detail); + } + } + + execute(&http_client_factory, request, &connector) + .instrument(span) + .await + }) + } +} + +async fn execute( + http_client_factory: &HttpClientServiceFactory, + request: ConnectRequest, + connector: &Connector, +) -> Result { + let context = request.context.clone(); + let original_subgraph_name = connector.id.subgraph_name.to_string(); + + let (ref debug, request_limit) = context.extensions().with_lock(|lock| { + let debug = lock.get::>>().cloned(); + let request_limit = lock + .get::>() + .map(|limits| limits.get((&connector.id).into(), connector.max_requests)) + .unwrap_or(None); + (debug, request_limit) + }); + + let requests = make_requests(request, connector, debug).map_err(BoxError::from)?; + + let tasks = requests.into_iter().map( + move |Request { + request: req, + key, + debug_request, + }| { + // Returning an error from this closure causes all tasks to be cancelled and the operation + // to fail. This is the reason for the Result-wrapped-in-a-Result here. An `Err` on the + // inner result fails just that one task, but an `Err` on the outer result cancels all the + // tasks and fails the whole operation. + let context = context.clone(); + if context + .insert(CONNECTOR_INFO_CONTEXT_KEY, ConnectorInfo::from(connector)) + .is_err() + { + error!("Failed to store connector info in context"); + } + let original_subgraph_name = original_subgraph_name.clone(); + let request_limit = request_limit.clone(); + async move { + let res = if request_limit.is_some_and(|request_limit| !request_limit.allow()) { + ConnectorResponse { + result: ConnectorResult::::Err( + ConnectorError::RequestLimitExceeded, + ), + key, + debug_request, + } + } else { + let client = http_client_factory.create(&original_subgraph_name); + let req = HttpRequest { + http_request: req, + context: context.clone(), + }; + let res = match client.oneshot(req).await { + Ok(res) => ConnectorResponse { + result: ConnectorResult::HttpResponse(res.http_response), + key, + debug_request, + }, + Err(e) => ConnectorResponse { + result: ConnectorResult::::Err( + ConnectorError::HTTPClientError(handle_subrequest_http_error( + e, connector, + )), + ), + key, + debug_request, + }, + }; + + u64_counter!( + "apollo.router.operations.connectors", + "Total number of requests to connectors", + 1, + "connector.type" = CONNECTOR_TYPE_HTTP, + "subgraph.name" = original_subgraph_name + ); + + res + }; + + Ok::<_, BoxError>(process_response(res, connector, &context, debug).await) + } + }, + ); + + aggregate_responses(futures::future::try_join_all(tasks).await?).map_err(BoxError::from) +} + +fn handle_subrequest_http_error(err: BoxError, connector: &Connector) -> BoxError { + match err.downcast::() { + // Replace the internal subgraph name with the connector label + Ok(inner) => match *inner { + FetchError::SubrequestHttpError { + status_code, + service: _, + reason, + } => Box::new(FetchError::SubrequestHttpError { + status_code, + service: connector.id.subgraph_source(), + reason, + }), + _ => inner, + }, + Err(e) => e, + } +} + +#[derive(Clone)] +pub(crate) struct ConnectorServiceFactory { + pub(crate) schema: Arc, + pub(crate) subgraph_schemas: Arc, + pub(crate) http_service_factory: Arc>, + pub(crate) subscription_config: Option, + pub(crate) connectors_by_service_name: Arc, Connector>>, + _connect_spec_version_instrument: Option>, +} + +impl ConnectorServiceFactory { + pub(crate) fn new( + schema: Arc, + subgraph_schemas: Arc, + http_service_factory: Arc>, + subscription_config: Option, + connectors_by_service_name: Arc, Connector>>, + ) -> Self { + Self { + http_service_factory, + subgraph_schemas, + schema: schema.clone(), + subscription_config, + connectors_by_service_name, + _connect_spec_version_instrument: connect_spec_version_instrument( + schema.connectors.as_ref(), + ), + } + } + + #[cfg(test)] + pub(crate) fn empty(schema: Arc) -> Self { + Self { + http_service_factory: Arc::new(Default::default()), + subgraph_schemas: Default::default(), + subscription_config: Default::default(), + connectors_by_service_name: Default::default(), + schema, + _connect_spec_version_instrument: None, + } + } +} + +impl ServiceFactory for ConnectorServiceFactory { + type Service = BoxService; + + fn create(&self) -> Self::Service { + ConnectorService { + http_service_factory: self.http_service_factory.clone(), + _schema: self.schema.clone(), + _subgraph_schemas: self.subgraph_schemas.clone(), + _subscription_config: self.subscription_config.clone(), + connectors_by_service_name: self.connectors_by_service_name.clone(), + } + .boxed() + } +} diff --git a/apollo-router/src/services/execution/service.rs b/apollo-router/src/services/execution/service.rs index e793178c62..b45ad5519f 100644 --- a/apollo-router/src/services/execution/service.rs +++ b/apollo-router/src/services/execution/service.rs @@ -48,11 +48,11 @@ use crate::plugins::telemetry::Telemetry; use crate::query_planner::fetch::SubgraphSchemas; use crate::query_planner::subscription::SubscriptionHandle; use crate::services::execution; +use crate::services::fetch_service::FetchServiceFactory; use crate::services::new_service::ServiceFactory; use crate::services::ExecutionRequest; use crate::services::ExecutionResponse; use crate::services::Plugins; -use crate::services::SubgraphServiceFactory; use crate::spec::query::subselections::BooleanValues; use crate::spec::Query; use crate::spec::Schema; @@ -62,7 +62,7 @@ use crate::spec::Schema; pub(crate) struct ExecutionService { pub(crate) schema: Arc, pub(crate) subgraph_schemas: Arc, - pub(crate) subgraph_service_factory: Arc, + pub(crate) fetch_service_factory: Arc, /// Subscription config if enabled subscription_config: Option, apollo_telemetry_config: Option, @@ -148,7 +148,7 @@ impl ExecutionService { .query_plan .execute( &context, - &self.subgraph_service_factory, + &self.fetch_service_factory, &Arc::new(req.supergraph_request), &self.schema, &self.subgraph_schemas, @@ -633,7 +633,7 @@ pub(crate) struct ExecutionServiceFactory { pub(crate) schema: Arc, pub(crate) subgraph_schemas: Arc, pub(crate) plugins: Arc, - pub(crate) subgraph_service_factory: Arc, + pub(crate) fetch_service_factory: Arc, } impl ServiceFactory for ExecutionServiceFactory { @@ -658,7 +658,7 @@ impl ServiceFactory for ExecutionServiceFactory { self.plugins.iter().rev().fold( crate::services::execution::service::ExecutionService { schema: self.schema.clone(), - subgraph_service_factory: self.subgraph_service_factory.clone(), + fetch_service_factory: self.fetch_service_factory.clone(), subscription_config: subscription_plugin_conf, subgraph_schemas: self.subgraph_schemas.clone(), apollo_telemetry_config: apollo_telemetry_conf, diff --git a/apollo-router/src/services/external.rs b/apollo-router/src/services/external.rs index cec76f3413..2700e74c82 100644 --- a/apollo-router/src/services/external.rs +++ b/apollo-router/src/services/external.rs @@ -25,7 +25,7 @@ use super::subgraph::SubgraphRequestId; use crate::plugins::telemetry::otel::OpenTelemetrySpanExt; use crate::plugins::telemetry::reload::prepare_context; use crate::query_planner::QueryPlan; -use crate::services::router::body::get_body_bytes; +use crate::services::router; use crate::services::router::body::RouterBody; use crate::Context; @@ -297,17 +297,17 @@ where .method(Method::POST) .header(ACCEPT, "application/json") .header(CONTENT_TYPE, "application/json") - .body(serde_json::to_vec(&self)?.into())?; + .body(router::body::from_bytes(serde_json::to_vec(&self)?))?; get_text_map_propagator(|propagator| { propagator.inject_context( &prepare_context(tracing::span::Span::current().context()), - &mut opentelemetry_http::HeaderInjector(request.headers_mut()), + &mut crate::otel_compat::HeaderInjector(request.headers_mut()), ); }); - let response = client.call(request).await?; - get_body_bytes(response.into_body()) + let response = client.call(request).await.map_err(BoxError::from)?; + router::body::into_bytes(response.into_body()) .await .map_err(BoxError::from) .and_then(|bytes| serde_json::from_slice(&bytes).map_err(BoxError::from)) diff --git a/apollo-router/src/services/fetch.rs b/apollo-router/src/services/fetch.rs new file mode 100644 index 0000000000..41cdc70015 --- /dev/null +++ b/apollo-router/src/services/fetch.rs @@ -0,0 +1,172 @@ +//! Fetch request and response types. + +use std::sync::Arc; + +use serde_json_bytes::json; +use serde_json_bytes::Value; +use tokio::sync::mpsc; +use tower::BoxError; + +use crate::error::Error; +use crate::error::FetchError; +use crate::graphql::Request as GraphQLRequest; +use crate::json_ext::Path; +use crate::plugins::subscription::SubscriptionConfig; +use crate::query_planner::fetch::FetchNode; +use crate::query_planner::fetch::Variables; +use crate::query_planner::subscription::SubscriptionHandle; +use crate::query_planner::subscription::SubscriptionNode; +use crate::Context; + +const FETCH_SUBGRAPH_NAME_EXTENSION_KEY: &str = "fetch_subgraph_name"; + +pub(crate) type BoxService = tower::util::BoxService; + +pub(crate) enum Request { + Fetch(FetchRequest), + Subscription(SubscriptionRequest), +} + +pub(crate) type Response = (Value, Vec); + +#[non_exhaustive] +pub(crate) struct FetchRequest { + pub(crate) context: Context, + pub(crate) fetch_node: FetchNode, + pub(crate) supergraph_request: Arc>, + pub(crate) variables: Variables, + pub(crate) current_dir: Path, +} + +#[buildstructor::buildstructor] +impl FetchRequest { + /// This is the constructor (or builder) to use when constructing a fetch Request. + /// + /// Required parameters are required in non-testing code to create a Request. + #[builder(visibility = "pub")] + fn new( + context: Context, + fetch_node: FetchNode, + supergraph_request: Arc>, + variables: Variables, + current_dir: Path, + ) -> Self { + Self { + context, + fetch_node, + supergraph_request, + variables, + current_dir, + } + } +} + +pub(crate) struct SubscriptionRequest { + pub(crate) context: Context, + pub(crate) subscription_node: SubscriptionNode, + pub(crate) supergraph_request: Arc>, + pub(crate) variables: Variables, + pub(crate) current_dir: Path, + pub(crate) sender: mpsc::Sender, + pub(crate) subscription_handle: Option, + pub(crate) subscription_config: Option, +} + +#[buildstructor::buildstructor] +impl SubscriptionRequest { + /// This is the constructor (or builder) to use when constructing a subscription Request. + /// + /// Required parameters are required in non-testing code to create a Request. + #[allow(clippy::too_many_arguments)] + #[builder(visibility = "pub")] + fn new( + context: Context, + subscription_node: SubscriptionNode, + supergraph_request: Arc>, + variables: Variables, + current_dir: Path, + sender: mpsc::Sender, + subscription_handle: Option, + subscription_config: Option, + ) -> Self { + Self { + context, + subscription_node, + supergraph_request, + variables, + current_dir, + sender, + subscription_handle, + subscription_config, + } + } +} + +/// Map a fetch error result to a [GraphQL error](GraphQLError). +pub(crate) trait ErrorMapping { + #[allow(clippy::result_large_err)] + fn map_to_graphql_error(self, service_name: String, current_dir: &Path) -> Result; +} + +impl ErrorMapping for Result { + fn map_to_graphql_error(self, service_name: String, current_dir: &Path) -> Result { + // TODO this is a problem since it restores details about failed service + // when errors have been redacted in the include_subgraph_errors module. + // Unfortunately, not easy to fix here, because at this point we don't + // know if we should be redacting errors for this subgraph... + self.map_err(|e| match e.downcast::() { + Ok(inner) => match *inner { + FetchError::SubrequestHttpError { .. } => *inner, + _ => FetchError::SubrequestHttpError { + status_code: None, + service: service_name, + reason: inner.to_string(), + }, + }, + Err(e) => FetchError::SubrequestHttpError { + status_code: None, + service: service_name, + reason: e.to_string(), + }, + }) + .map_err(|e| e.to_graphql_error(Some(current_dir.to_owned()))) + } +} + +/// Extension trait for adding a subgraph name associated with an error. +pub(crate) trait AddSubgraphNameExt { + /// Add the subgraph name associated with an error + fn add_subgraph_name(self, subgraph_name: &str) -> Self; +} + +impl AddSubgraphNameExt for Error { + fn add_subgraph_name(mut self, subgraph_name: &str) -> Self { + self.extensions + .insert(FETCH_SUBGRAPH_NAME_EXTENSION_KEY, json!(subgraph_name)); + self + } +} + +impl AddSubgraphNameExt for Result { + fn add_subgraph_name(self, subgraph_name: &str) -> Self { + self.map_err(|e| e.add_subgraph_name(subgraph_name)) + } +} + +/// Extension trait for getting the subgraph name associated with an error, if any. +pub(crate) trait SubgraphNameExt { + /// Get the subgraph name associated with an error, if any + fn subgraph_name(&mut self) -> Option; +} + +impl SubgraphNameExt for Error { + fn subgraph_name(&mut self) -> Option { + if let Some(subgraph_name) = self.extensions.remove(FETCH_SUBGRAPH_NAME_EXTENSION_KEY) { + subgraph_name + .as_str() + .map(|subgraph_name| subgraph_name.to_string()) + } else { + None + } + } +} diff --git a/apollo-router/src/services/fetch_service.rs b/apollo-router/src/services/fetch_service.rs new file mode 100644 index 0000000000..6a852cf22e --- /dev/null +++ b/apollo-router/src/services/fetch_service.rs @@ -0,0 +1,495 @@ +//! Tower fetcher for fetch node execution. + +use std::sync::atomic::Ordering; +use std::sync::Arc; +use std::task::Poll; + +use futures::future::BoxFuture; +use serde_json_bytes::Value; +use tokio::sync::mpsc; +use tower::BoxError; +use tower::ServiceExt; +use tracing::instrument::Instrumented; +use tracing::Instrument; + +use super::connector_service::ConnectorServiceFactory; +use super::fetch::AddSubgraphNameExt; +use super::fetch::BoxService; +use super::fetch::SubscriptionRequest; +use super::new_service::ServiceFactory; +use super::ConnectRequest; +use super::FetchRequest; +use super::SubgraphRequest; +use super::SubscriptionTaskParams; +use crate::error::Error; +use crate::graphql::Request as GraphQLRequest; +use crate::http_ext; +use crate::plugins::subscription::SubscriptionConfig; +use crate::query_planner::build_operation_with_aliasing; +use crate::query_planner::fetch::FetchNode; +use crate::query_planner::fetch::SubgraphSchemas; +use crate::query_planner::subscription::SubscriptionNode; +use crate::query_planner::subscription::OPENED_SUBSCRIPTIONS; +use crate::query_planner::OperationKind; +use crate::query_planner::FETCH_SPAN_NAME; +use crate::query_planner::SUBSCRIBE_SPAN_NAME; +use crate::services::fetch::ErrorMapping; +use crate::services::fetch::Request; +use crate::services::subgraph::BoxGqlStream; +use crate::services::FetchResponse; +use crate::services::SubgraphServiceFactory; +use crate::spec::Schema; + +/// The fetch service delegates to either the subgraph service or connector service depending +/// on whether connectors are present in the subgraph. +#[derive(Clone)] +pub(crate) struct FetchService { + pub(crate) subgraph_service_factory: Arc, + pub(crate) schema: Arc, + pub(crate) subgraph_schemas: Arc, + pub(crate) _subscription_config: Option, // TODO: add subscription support to FetchService + pub(crate) connector_service_factory: Arc, +} + +impl tower::Service for FetchService { + type Response = FetchResponse; + type Error = BoxError; + type Future = Instrumented>>; + + fn poll_ready(&mut self, _cx: &mut std::task::Context<'_>) -> Poll> { + Poll::Ready(Ok(())) + } + + fn call(&mut self, request: Request) -> Self::Future { + match request { + Request::Fetch(request) => self.handle_fetch(request), + Request::Subscription(request) => self.handle_subscription(request), + } + } +} + +impl FetchService { + fn handle_fetch( + &mut self, + request: FetchRequest, + ) -> >::Future { + let FetchRequest { + ref context, + fetch_node: FetchNode { + ref service_name, .. + }, + .. + } = request; + let service_name = service_name.clone(); + let fetch_time_offset = context.created_at.elapsed().as_nanos() as i64; + + if let Some(connector) = self + .connector_service_factory + .connectors_by_service_name + .get(service_name.as_ref()) + { + Self::fetch_with_connector_service( + self.schema.clone(), + self.connector_service_factory.clone(), + connector.id.subgraph_name.clone(), + request, + ) + .instrument(tracing::info_span!( + FETCH_SPAN_NAME, + "otel.kind" = "INTERNAL", + "apollo.subgraph.name" = connector.id.subgraph_name, + "apollo_private.sent_time_offset" = fetch_time_offset + )) + } else { + Self::fetch_with_subgraph_service( + self.schema.clone(), + self.subgraph_service_factory.clone(), + self.subgraph_schemas.clone(), + request, + ) + .instrument(tracing::info_span!( + FETCH_SPAN_NAME, + "otel.kind" = "INTERNAL", + "apollo.subgraph.name" = service_name.as_ref(), + "apollo_private.sent_time_offset" = fetch_time_offset + )) + } + } + + fn fetch_with_connector_service( + schema: Arc, + connector_service_factory: Arc, + subgraph_name: String, + request: FetchRequest, + ) -> BoxFuture<'static, Result> { + let FetchRequest { + fetch_node, + supergraph_request, + variables, + context, + current_dir, + .. + } = request; + + let paths = variables.inverted_paths.clone(); + let operation = fetch_node.operation.as_parsed().cloned(); + + Box::pin(async move { + let (_parts, response) = match connector_service_factory + .create() + .oneshot( + ConnectRequest::builder() + .service_name(fetch_node.service_name.clone()) + .context(context) + .operation(operation?.clone()) + .supergraph_request(supergraph_request) + .variables(variables) + .build(), + ) + .await + .map_to_graphql_error(subgraph_name.clone(), ¤t_dir.to_owned()) + .add_subgraph_name(subgraph_name.as_str()) + { + Err(e) => { + return Ok((Value::default(), vec![e])); + } + Ok(res) => res.response.into_parts(), + }; + + let (value, errors) = + fetch_node.response_at_path(&schema, ¤t_dir, paths, response); + Ok((value, errors)) + }) + } + + fn fetch_with_subgraph_service( + schema: Arc, + subgraph_service_factory: Arc, + subgraph_schemas: Arc, + request: FetchRequest, + ) -> BoxFuture<'static, Result> { + let FetchRequest { + fetch_node, + supergraph_request, + variables, + current_dir, + context, + } = request; + + let FetchNode { + ref service_name, + ref operation, + ref operation_kind, + ref operation_name, + .. + } = fetch_node; + + let uri = schema + .subgraph_url(service_name.as_ref()) + .unwrap_or_else(|| { + panic!("schema uri for subgraph '{service_name}' should already have been checked") + }) + .clone(); + + let alias_query_string; // this exists outside the if block to allow the as_str() to be longer lived + let aliased_operation = if let Some(ctx_arg) = &variables.contextual_arguments { + if let Some(subgraph_schema) = subgraph_schemas.get(&service_name.to_string()) { + match build_operation_with_aliasing(operation, ctx_arg, &subgraph_schema.schema) { + Ok(op) => { + alias_query_string = op.serialize().no_indent().to_string(); + alias_query_string.as_str() + } + Err(errors) => { + tracing::debug!( + "couldn't generate a valid executable document? {:?}", + errors + ); + operation.as_serialized() + } + } + } else { + tracing::debug!( + "couldn't find a subgraph schema for service {:?}", + &service_name + ); + operation.as_serialized() + } + } else { + operation.as_serialized() + }; + + let aqs = aliased_operation.to_string(); // TODO + let current_dir = current_dir.clone(); + let service = subgraph_service_factory + .create(&service_name.clone()) + .expect("we already checked that the service exists during planning; qed"); + + let mut subgraph_request = SubgraphRequest::builder() + .supergraph_request(supergraph_request.clone()) + .subgraph_request( + http_ext::Request::builder() + .method(http::Method::POST) + .uri(uri) + .body( + GraphQLRequest::builder() + .query(aliased_operation) + .and_operation_name(operation_name.as_ref().map(|n| n.to_string())) + .variables(variables.variables.clone()) + .build(), + ) + .build() + .expect("it won't fail because the url is correct and already checked; qed"), + ) + .subgraph_name(service_name.to_string()) + .operation_kind(*operation_kind) + .context(context.clone()) + .build(); + subgraph_request.query_hash = fetch_node.schema_aware_hash.clone(); + subgraph_request.authorization = fetch_node.authorization.clone(); + Box::pin(async move { + Ok(fetch_node + .subgraph_fetch( + service, + subgraph_request, + ¤t_dir, + &schema, + variables.inverted_paths, + &aqs, + variables.variables, + ) + .await) + }) + } + + fn handle_subscription( + &mut self, + request: SubscriptionRequest, + ) -> >::Future { + let SubscriptionRequest { + ref context, + subscription_node: SubscriptionNode { + ref service_name, .. + }, + .. + } = request; + + let service_name = service_name.clone(); + let fetch_time_offset = context.created_at.elapsed().as_nanos() as i64; + + // Subscriptions are not supported for connectors, so they always go to the subgraph service + Self::subscription_with_subgraph_service( + self.schema.clone(), + self.subgraph_service_factory.clone(), + request, + ) + .instrument(tracing::info_span!( + SUBSCRIBE_SPAN_NAME, + "otel.kind" = "INTERNAL", + "apollo.subgraph.name" = service_name.as_ref(), + "apollo_private.sent_time_offset" = fetch_time_offset + )) + } + + fn subscription_with_subgraph_service( + schema: Arc, + subgraph_service_factory: Arc, + request: SubscriptionRequest, + ) -> BoxFuture<'static, Result> { + let SubscriptionRequest { + context, + subscription_node, + current_dir, + sender, + variables, + supergraph_request, + subscription_handle, + subscription_config, + .. + } = request; + let SubscriptionNode { + ref service_name, + ref operation, + ref operation_name, + .. + } = subscription_node; + + let service_name = service_name.clone(); + + if let Some(max_opened_subscriptions) = subscription_config + .as_ref() + .and_then(|s| s.max_opened_subscriptions) + { + if OPENED_SUBSCRIPTIONS.load(Ordering::Relaxed) >= max_opened_subscriptions { + return Box::pin(async { + Ok(( + Value::default(), + vec![Error::builder() + .message("can't open new subscription, limit reached") + .extension_code("SUBSCRIPTION_MAX_LIMIT") + .build()], + )) + }); + } + } + let mode = match subscription_config.as_ref() { + Some(config) => config + .mode + .get_subgraph_config(&service_name) + .map(|mode| (config.clone(), mode)), + None => { + return Box::pin(async { + Ok(( + Value::default(), + vec![Error::builder() + .message("subscription support is not enabled") + .extension_code("SUBSCRIPTION_DISABLED") + .build()], + )) + }); + } + }; + + let service = subgraph_service_factory + .create(&service_name.clone()) + .expect("we already checked that the service exists during planning; qed"); + + let uri = schema + .subgraph_url(service_name.as_ref()) + .unwrap_or_else(|| { + panic!("schema uri for subgraph '{service_name}' should already have been checked") + }) + .clone(); + + let (tx_handle, rx_handle) = mpsc::channel::(1); + + let subscription_handle = subscription_handle + .as_ref() + .expect("checked in PlanNode; qed"); + + let subgraph_request = SubgraphRequest::builder() + .supergraph_request(supergraph_request.clone()) + .subgraph_request( + http_ext::Request::builder() + .method(http::Method::POST) + .uri(uri) + .body( + crate::graphql::Request::builder() + .query(operation.as_serialized()) + .and_operation_name(operation_name.as_ref().map(|n| n.to_string())) + .variables(variables.variables.clone()) + .build(), + ) + .build() + .expect("it won't fail because the url is correct and already checked; qed"), + ) + .operation_kind(OperationKind::Subscription) + .context(context) + .subgraph_name(service_name.to_string()) + .subscription_stream(tx_handle) + .and_connection_closed_signal(Some(subscription_handle.closed_signal.resubscribe())) + .build(); + + let mut subscription_handle = subscription_handle.clone(); + Box::pin(async move { + let response = match mode { + Some((subscription_config, _mode)) => { + let subscription_params = SubscriptionTaskParams { + client_sender: sender, + subscription_handle: subscription_handle.clone(), + subscription_config: subscription_config.clone(), + stream_rx: rx_handle.into(), + }; + + let subscription_conf_tx = + match subscription_handle.subscription_conf_tx.take() { + Some(sc) => sc, + None => { + return Ok(( + Value::default(), + vec![Error::builder() + .message("no subscription conf sender provided for a subscription") + .extension_code("NO_SUBSCRIPTION_CONF_TX") + .build()], + )); + } + }; + + if let Err(err) = subscription_conf_tx.send(subscription_params).await { + return Ok(( + Value::default(), + vec![Error::builder() + .message(format!("cannot send the subscription data: {err:?}")) + .extension_code("SUBSCRIPTION_DATA_SEND_ERROR") + .build()], + )); + } + + match service + .oneshot(subgraph_request) + .instrument(tracing::trace_span!("subscription_call")) + .await + .map_to_graphql_error(service_name.to_string(), ¤t_dir) + { + Err(e) => { + failfast_error!("subgraph call fetch error: {}", e); + vec![e] + } + Ok(response) => response.response.into_parts().1.errors, + } + } + None => { + vec![Error::builder() + .message(format!( + "subscription mode is not configured for subgraph {:?}", + service_name + )) + .extension_code("INVALID_SUBSCRIPTION_MODE") + .build()] + } + }; + Ok((Value::default(), response)) + }) + } +} + +#[derive(Clone)] +pub(crate) struct FetchServiceFactory { + pub(crate) schema: Arc, + pub(crate) subgraph_schemas: Arc, + pub(crate) subgraph_service_factory: Arc, + pub(crate) subscription_config: Option, + pub(crate) connector_service_factory: Arc, +} + +impl FetchServiceFactory { + pub(crate) fn new( + schema: Arc, + subgraph_schemas: Arc, + subgraph_service_factory: Arc, + subscription_config: Option, + connector_service_factory: Arc, + ) -> Self { + Self { + subgraph_service_factory, + subgraph_schemas, + schema, + subscription_config, + connector_service_factory, + } + } +} + +impl ServiceFactory for FetchServiceFactory { + type Service = BoxService; + + fn create(&self) -> Self::Service { + FetchService { + subgraph_service_factory: self.subgraph_service_factory.clone(), + schema: self.schema.clone(), + subgraph_schemas: self.subgraph_schemas.clone(), + _subscription_config: self.subscription_config.clone(), + connector_service_factory: self.connector_service_factory.clone(), + } + .boxed() + } +} diff --git a/apollo-router/src/services/hickory_dns_connector.rs b/apollo-router/src/services/hickory_dns_connector.rs index 987c6ec52f..5b7573f672 100644 --- a/apollo-router/src/services/hickory_dns_connector.rs +++ b/apollo-router/src/services/hickory_dns_connector.rs @@ -9,9 +9,9 @@ use std::task::Poll; use hickory_resolver::config::LookupIpStrategy; use hickory_resolver::system_conf::read_system_conf; use hickory_resolver::TokioAsyncResolver; -use hyper::client::connect::dns::Name; -use hyper::client::HttpConnector; -use hyper::service::Service; +use hyper_util::client::legacy::connect::dns::Name; +use hyper_util::client::legacy::connect::HttpConnector; +use tower::Service; use crate::configuration::shared::DnsResolutionStrategy; diff --git a/apollo-router/src/services/http.rs b/apollo-router/src/services/http.rs index 105bb26065..fa2abad3c9 100644 --- a/apollo-router/src/services/http.rs +++ b/apollo-router/src/services/http.rs @@ -9,7 +9,6 @@ use super::router::body::RouterBody; use super::Plugins; use crate::Context; -pub(crate) mod body_stream; pub(crate) mod service; #[cfg(test)] mod tests; diff --git a/apollo-router/src/services/http/body_stream.rs b/apollo-router/src/services/http/body_stream.rs deleted file mode 100644 index e75ed22c50..0000000000 --- a/apollo-router/src/services/http/body_stream.rs +++ /dev/null @@ -1,34 +0,0 @@ -use std::task::Poll; - -use futures::Stream; -use pin_project_lite::pin_project; - -pin_project! { - /// Allows conversion between an http_body::Body and a futures stream. - pub(crate) struct BodyStream { - #[pin] - inner: B - } -} - -impl BodyStream { - /// Create a new `BodyStream`. - pub(crate) fn new(body: B) -> Self { - Self { inner: body } - } -} - -impl Stream for BodyStream -where - B: http_body::Body, - B::Error: Into, -{ - type Item = Result; - - fn poll_next( - self: std::pin::Pin<&mut Self>, - cx: &mut std::task::Context<'_>, - ) -> Poll> { - self.project().inner.poll_data(cx) - } -} diff --git a/apollo-router/src/services/http/service.rs b/apollo-router/src/services/http/service.rs index b8bfb38f04..cd282f6fd0 100644 --- a/apollo-router/src/services/http/service.rs +++ b/apollo-router/src/services/http/service.rs @@ -1,33 +1,31 @@ +use std::error::Error as _; use std::fmt::Display; use std::sync::Arc; use std::task::Poll; use std::time::Duration; use ::serde::Deserialize; -use bytes::Bytes; use futures::future::BoxFuture; -use futures::Stream; -use futures::TryFutureExt; use global::get_text_map_propagator; use http::header::ACCEPT_ENCODING; use http::header::CONTENT_ENCODING; use http::HeaderValue; use http::Request; -use hyper::client::HttpConnector; +use http_body_util::BodyExt; use hyper_rustls::HttpsConnector; +use hyper_util::client::legacy::connect::HttpConnector; #[cfg(unix)] use hyperlocal::UnixConnector; use opentelemetry::global; -use pin_project_lite::pin_project; use rustls::ClientConfig; use rustls::RootCertStore; use schemars::JsonSchema; +#[cfg(unix)] use tower::util::Either; use tower::BoxError; use tower::Service; use tower::ServiceBuilder; use tower_http::decompression::Decompression; -use tower_http::decompression::DecompressionBody; use tower_http::decompression::DecompressionLayer; use tracing::Instrument; @@ -40,19 +38,22 @@ use crate::plugins::authentication::subgraph::SigningParamsConfig; use crate::plugins::telemetry::consts::HTTP_REQUEST_SPAN_NAME; use crate::plugins::telemetry::otel::OpenTelemetrySpanExt; use crate::plugins::telemetry::reload::prepare_context; -use crate::plugins::telemetry::LOGGING_DISPLAY_BODY; -use crate::plugins::telemetry::LOGGING_DISPLAY_HEADERS; use crate::plugins::traffic_shaping::Http2Config; use crate::services::hickory_dns_connector::new_async_http_connector; use crate::services::hickory_dns_connector::AsyncHyperResolver; +use crate::services::router; use crate::services::router::body::RouterBody; use crate::Configuration; use crate::Context; -type HTTPClient = - Decompression>, RouterBody>>; +type HTTPClient = Decompression< + hyper_util::client::legacy::Client< + HttpsConnector>, + RouterBody, + >, +>; #[cfg(unix)] -type UnixHTTPClient = Decompression>; +type UnixHTTPClient = Decompression>; #[cfg(unix)] type MixedClient = Either; #[cfg(not(unix))] @@ -89,7 +90,7 @@ impl Display for Compression { #[derive(Clone)] pub(crate) struct HttpClientService { - // Note: We use hyper::Client here in preference to reqwest to avoid expensive URL translation + // Note: We use hyper_util::client::legacy::Client here in preference to reqwest to avoid expensive URL translation // in the hot path. We use reqwest elsewhere because it's convenient and some of the // opentelemetry crate require reqwest clients to work correctly (at time of writing). http_client: HTTPClient, @@ -129,7 +130,8 @@ impl HttpClientService { .client_authentication .as_ref()); - let tls_client_config = generate_tls_client_config(tls_cert_store, client_cert_config)?; + let tls_client_config = + generate_tls_client_config(tls_cert_store, client_cert_config.map(|arc| arc.as_ref()))?; HttpClientService::new(name, tls_client_config, client_config) } @@ -157,10 +159,11 @@ impl HttpClientService { builder.wrap_connector(http_connector) }; - let http_client = hyper::Client::builder() - .pool_idle_timeout(POOL_IDLE_TIMEOUT_DURATION) - .http2_only(http2 == Http2Config::Http2Only) - .build(connector); + let http_client = + hyper_util::client::legacy::Client::builder(hyper_util::rt::TokioExecutor::new()) + .pool_idle_timeout(POOL_IDLE_TIMEOUT_DURATION) + .http2_only(http2 == Http2Config::Http2Only) + .build(connector); Ok(Self { http_client: ServiceBuilder::new() .layer(DecompressionLayer::new()) @@ -168,33 +171,23 @@ impl HttpClientService { #[cfg(unix)] unix_client: ServiceBuilder::new() .layer(DecompressionLayer::new()) - .service(hyper::Client::builder().build(UnixConnector)), + .service( + hyper_util::client::legacy::Client::builder( + hyper_util::rt::TokioExecutor::new(), + ) + .build(UnixConnector), + ), service: Arc::new(service.into()), }) } pub(crate) fn native_roots_store() -> RootCertStore { let mut roots = rustls::RootCertStore::empty(); - let mut valid_count = 0; - let mut invalid_count = 0; - - for cert in rustls_native_certs::load_native_certs().expect("could not load platform certs") - { - let cert = rustls::Certificate(cert.0); - match roots.add(&cert) { - Ok(_) => valid_count += 1, - Err(err) => { - tracing::trace!("invalid cert der {:?}", cert.0); - tracing::debug!("certificate parsing failed: {:?}", err); - invalid_count += 1 - } - } - } - tracing::debug!( - "with_native_roots processed {} valid and {} invalid certs", - valid_count, - invalid_count + + roots.add_parsable_certificates( + rustls_native_certs::load_native_certs().expect("could not load platform certs"), ); + assert!(!roots.is_empty(), "no CA certificates found"); roots } @@ -204,13 +197,17 @@ pub(crate) fn generate_tls_client_config( tls_cert_store: RootCertStore, client_cert_config: Option<&TlsClientAuth>, ) -> Result { - let tls_builder = rustls::ClientConfig::builder().with_safe_defaults(); + // Enable crypto + let _ = rustls::crypto::aws_lc_rs::default_provider().install_default(); + + let tls_builder = rustls::ClientConfig::builder(); + Ok(match client_cert_config { Some(client_auth_config) => tls_builder .with_root_certificates(tls_cert_store) .with_client_auth_cert( client_auth_config.certificate_chain.clone(), - client_auth_config.key.clone(), + client_auth_config.key.clone_key(), )?, None => tls_builder .with_root_certificates(tls_cert_store) @@ -251,8 +248,8 @@ impl tower::Service for HttpClientService { #[cfg(unix)] let client = match schema_uri.scheme().map(|s| s.as_str()) { - Some("unix") => Either::B(self.unix_client.clone()), - _ => Either::A(self.http_client.clone()), + Some("unix") => Either::Right(self.unix_client.clone()), + _ => Either::Left(self.http_client.clone()), }; #[cfg(not(unix))] let client = self.http_client.clone(); @@ -274,7 +271,7 @@ impl tower::Service for HttpClientService { get_text_map_propagator(|propagator| { propagator.inject_context( &prepare_context(http_req_span.context()), - &mut opentelemetry_http::HeaderInjector(http_request.headers_mut()), + &mut crate::otel_compat::HeaderInjector(http_request.headers_mut()), ); }); @@ -288,8 +285,9 @@ impl tower::Service for HttpClientService { let body = match opt_compressor { None => body, - Some(compressor) => RouterBody::wrap_stream(compressor.process(body)), + Some(compressor) => router::body::from_result_stream(compressor.process(body)), }; + let mut http_request = http::Request::from_parts(parts, body); http_request @@ -307,26 +305,10 @@ impl tower::Service for HttpClientService { http_request }; - let display_headers = context.contains_key(LOGGING_DISPLAY_HEADERS); - let display_body = context.contains_key(LOGGING_DISPLAY_BODY); - - // Print out the debug for the request - if display_headers { - tracing::info!(http.request.headers = ?http_request.headers(), apollo.subgraph.name = %service_name, "Request headers to subgraph {service_name:?}"); - } - if display_body { - tracing::info!(http.request.body = ?http_request.body(), apollo.subgraph.name = %service_name, "Request body to subgraph {service_name:?}"); - } - let http_response = do_fetch(client, &context, &service_name, http_request) .instrument(http_req_span) .await?; - // Print out the debug for the response - if display_headers { - tracing::info!(response.headers = ?http_response.headers(), apollo.subgraph.name = %service_name, "Response headers from subgraph {service_name:?}"); - } - Ok(HttpResponse { http_response, context, @@ -335,6 +317,34 @@ impl tower::Service for HttpClientService { } } +/// Hyper client errors are very opaque. This function peels back the layers and attempts to +/// provide a useful message to end users. +fn report_hyper_client_error(err: hyper_util::client::legacy::Error) -> String { + // At the time of writing, a hyper-util error only prints "client error", and no useful further + // information. So if we have a source error (always true in practice), we simply discard the + // "client error" part and only report the inner error. + let Some(source) = err.source() else { + // No further information + return err.to_string(); + }; + + // If there was a connection, parsing, http, etc, error, the source will be a + // `hyper::Error`. `hyper::Error` provides a minimal error message only, that + // will explain vaguely where the problem is, like "error in user's Body stream", + // or "error parsing http header". + // This is important to preserve as it may clarify the difference between a malfunctioning + // subgraph and a buggy router. + // It's not enough information though, in particular for the user error kinds, so if there is + // another inner error, we report *both* the hyper error and the inner error. + let subsource = source + .downcast_ref::() + .and_then(|err| err.source()); + match subsource { + Some(inner_err) => format!("{source}: {inner_err}"), + None => source.to_string(), + } +} + async fn do_fetch( mut client: MixedClient, context: &Context, @@ -344,49 +354,18 @@ async fn do_fetch( let _active_request_guard = context.enter_active_request(); let (parts, body) = client .call(request) + .await .map_err(|err| { tracing::error!(fetch_error = ?err); FetchError::SubrequestHttpError { status_code: None, service: service_name.to_string(), - reason: err.to_string(), + reason: report_hyper_client_error(err), } - }) - .await? + })? .into_parts(); Ok(http::Response::from_parts( parts, - RouterBody::wrap_stream(BodyStream { inner: body }), + RouterBody::new(body.map_err(axum::Error::new)), )) } - -pin_project! { - pub(crate) struct BodyStream { - #[pin] - inner: DecompressionBody - } -} - -impl BodyStream { - /// Create a new `BodyStream`. - pub(crate) fn new(body: DecompressionBody) -> Self { - Self { inner: body } - } -} - -impl Stream for BodyStream -where - B: hyper::body::HttpBody, - B::Error: Into, -{ - type Item = Result; - - fn poll_next( - self: std::pin::Pin<&mut Self>, - cx: &mut std::task::Context<'_>, - ) -> Poll> { - use hyper::body::HttpBody; - - self.project().inner.poll_data(cx) - } -} diff --git a/apollo-router/src/services/http/tests.rs b/apollo-router/src/services/http/tests.rs index 68bf996939..7eca2bf8d8 100644 --- a/apollo-router/src/services/http/tests.rs +++ b/apollo-router/src/services/http/tests.rs @@ -1,6 +1,5 @@ use std::convert::Infallible; use std::io; -use std::net::TcpListener; use std::str::FromStr; use std::sync::atomic::AtomicBool; use std::sync::atomic::Ordering; @@ -8,29 +7,30 @@ use std::sync::Arc; use async_compression::tokio::write::GzipDecoder; use async_compression::tokio::write::GzipEncoder; -use axum::Server; +use axum::body::Body; use http::header::CONTENT_ENCODING; use http::header::CONTENT_TYPE; +use http::Request; use http::StatusCode; use http::Uri; use http::Version; -use hyper::server::conn::AddrIncoming; -use hyper::service::make_service_fn; -use hyper::Body; +use hyper::body::Incoming; use hyper_rustls::ConfigBuilderExt; -use hyper_rustls::TlsAcceptor; -#[cfg(unix)] -use hyperlocal::UnixServerExt; +use hyper_util::rt::TokioExecutor; +use hyper_util::rt::TokioIo; use mime::APPLICATION_JSON; -use rustls::server::AllowAnyAuthenticatedClient; -use rustls::Certificate; -use rustls::PrivateKey; +use rustls::pki_types::CertificateDer; +use rustls::pki_types::PrivateKeyDer; +use rustls::server::WebPkiClientVerifier; use rustls::RootCertStore; use rustls::ServerConfig; use serde_json_bytes::ByteString; use serde_json_bytes::Value; use tokio::io::AsyncWriteExt; -use tower::service_fn; +use tokio::net::TcpListener; +#[cfg(unix)] +use tokio::net::UnixListener; +use tokio_rustls::TlsAcceptor; use tower::BoxError; use tower::ServiceExt; @@ -44,37 +44,105 @@ use crate::plugin::PluginPrivate; use crate::plugins::traffic_shaping::Http2Config; use crate::services::http::HttpClientService; use crate::services::http::HttpRequest; -use crate::services::router::body::get_body_bytes; +use crate::services::router; use crate::services::supergraph; use crate::Configuration; use crate::Context; use crate::TestHarness; async fn tls_server( - listener: tokio::net::TcpListener, - certificates: Vec, - key: PrivateKey, + listener: TcpListener, + certificates: Vec>, + key: PrivateKeyDer<'static>, body: &'static str, ) { - let acceptor = TlsAcceptor::builder() - .with_single_cert(certificates, key) - .unwrap() - .with_all_versions_alpn() - .with_incoming(AddrIncoming::from_listener(listener).unwrap()); - let service = make_service_fn(|_| async { - Ok::<_, io::Error>(service_fn(|_req| async { - Ok::<_, io::Error>( - http::Response::builder() - .header(CONTENT_TYPE, APPLICATION_JSON.essence_str()) - .status(StatusCode::OK) - .version(Version::HTTP_11) - .body::(body.into()) - .unwrap(), - ) - })) - }); - let server = Server::builder(acceptor).serve(service); - server.await.unwrap() + // Enable crypto + let _ = rustls::crypto::aws_lc_rs::default_provider().install_default(); + + let tls_config = Arc::new( + ServerConfig::builder() + .with_no_client_auth() + .with_single_cert(certificates, key) + .expect("built our tls config"), + ); + let acceptor = TlsAcceptor::from(tls_config); + + loop { + let (stream, _) = listener.accept().await.expect("accepting connections"); + let acceptor = acceptor.clone(); + + tokio::spawn(async move { + let acceptor_stream = acceptor.accept(stream).await.expect("accepted stream"); + let tokio_stream = TokioIo::new(acceptor_stream); + + let hyper_service = + hyper::service::service_fn(move |_request: Request| async { + Ok::<_, io::Error>( + http::Response::builder() + .header(CONTENT_TYPE, APPLICATION_JSON.essence_str()) + .status(StatusCode::OK) + .version(Version::HTTP_11) + .body::(body.into()) + .unwrap(), + ) + }); + if let Err(err) = hyper_util::server::conn::auto::Builder::new(TokioExecutor::new()) + .serve_connection_with_upgrades(tokio_stream, hyper_service) + .await + { + eprintln!("failed to serve connection: {err:#}"); + } + }); + } +} + +async fn serve(listener: TcpListener, handle: Handler) -> std::io::Result<()> +where + Handler: (Fn(http::Request) -> Fut) + Clone + Sync + Send + 'static, + Fut: std::future::Future, Infallible>> + Send + 'static, +{ + loop { + let (stream, _) = listener.accept().await?; + let io = TokioIo::new(stream); + let handle = handle.clone(); + tokio::spawn(async move { + // N.B. should use hyper service_fn here, since it's required to be implemented hyper Service trait! + let svc = hyper::service::service_fn(|request: Request| { + handle(request.map(Body::new)) + }); + if let Err(err) = hyper_util::server::conn::auto::Builder::new(TokioExecutor::new()) + .serve_connection_with_upgrades(io, svc) + .await + { + eprintln!("server error: {}", err); + } + }); + } +} + +#[cfg(unix)] +async fn serve_unix(listener: UnixListener, handle: Handler) -> std::io::Result<()> +where + Handler: (Fn(http::Request) -> Fut) + Clone + Sync + Send + 'static, + Fut: std::future::Future, Infallible>> + Send + 'static, +{ + loop { + let (stream, _) = listener.accept().await?; + let io = TokioIo::new(stream); + let handle = handle.clone(); + tokio::spawn(async move { + // N.B. should use hyper service_fn here, since it's required to be implemented hyper Service trait! + let svc = hyper::service::service_fn(|request: Request| { + handle(request.map(Body::new)) + }); + if let Err(err) = hyper_util::server::conn::auto::Builder::new(TokioExecutor::new()) + .serve_connection_with_upgrades(io, svc) + .await + { + eprintln!("server error: {}", err); + } + }); + } } // Note: This test relies on a checked in certificate with the following validity @@ -97,7 +165,7 @@ async fn tls_self_signed() { let certificates = load_certs(certificate_pem).unwrap(); let key = load_key(key_pem).unwrap(); - let listener = tokio::net::TcpListener::bind("127.0.0.1:0").await.unwrap(); + let listener = TcpListener::bind("127.0.0.1:0").await.unwrap(); let socket_addr = listener.local_addr().unwrap(); tokio::task::spawn(tls_server(listener, certificates, key, r#"{"data": null}"#)); @@ -126,7 +194,9 @@ async fn tls_self_signed() { http_request: http::Request::builder() .uri(url) .header(CONTENT_TYPE, APPLICATION_JSON.essence_str()) - .body(r#"{"query":"{ me { name username } }"#.into()) + .body(router::body::from_bytes( + r#"{"query":"{ me { name username } }"#, + )) .unwrap(), context: Context::new(), }) @@ -135,7 +205,7 @@ async fn tls_self_signed() { assert_eq!( std::str::from_utf8( - &get_body_bytes(response.http_response.into_parts().1) + &router::body::into_bytes(response.http_response.into_parts().1) .await .unwrap() ) @@ -154,7 +224,7 @@ async fn tls_custom_root() { certificates.extend(load_certs(ca_pem).unwrap()); let key = load_key(key_pem).unwrap(); - let listener = tokio::net::TcpListener::bind("127.0.0.1:0").await.unwrap(); + let listener = TcpListener::bind("127.0.0.1:0").await.unwrap(); let socket_addr = listener.local_addr().unwrap(); tokio::task::spawn(tls_server(listener, certificates, key, r#"{"data": null}"#)); @@ -183,7 +253,9 @@ async fn tls_custom_root() { http_request: http::Request::builder() .uri(url) .header(CONTENT_TYPE, APPLICATION_JSON.essence_str()) - .body(r#"{"query":"{ me { name username } }"#.into()) + .body(router::body::from_bytes( + r#"{"query":"{ me { name username } }"#, + )) .unwrap(), context: Context::new(), }) @@ -191,7 +263,7 @@ async fn tls_custom_root() { .unwrap(); assert_eq!( std::str::from_utf8( - &get_body_bytes(response.http_response.into_parts().1) + &router::body::into_bytes(response.http_response.into_parts().1) .await .unwrap() ) @@ -201,45 +273,60 @@ async fn tls_custom_root() { } async fn tls_server_with_client_auth( - listener: tokio::net::TcpListener, - certificates: Vec, - key: PrivateKey, - client_root: Certificate, + listener: TcpListener, + certificates: Vec>, + key: PrivateKeyDer<'static>, + client_root: CertificateDer<'static>, body: &'static str, ) { let mut client_auth_roots = RootCertStore::empty(); - client_auth_roots.add(&client_root).unwrap(); + client_auth_roots.add(client_root).unwrap(); - let client_auth = AllowAnyAuthenticatedClient::new(client_auth_roots).boxed(); + let client_auth = WebPkiClientVerifier::builder(Arc::new(client_auth_roots)) + .build() + .unwrap(); - let acceptor = TlsAcceptor::builder() - .with_tls_config( - ServerConfig::builder() - .with_safe_defaults() - .with_client_cert_verifier(client_auth) - .with_single_cert(certificates, key) - .unwrap(), - ) - .with_all_versions_alpn() - .with_incoming(AddrIncoming::from_listener(listener).unwrap()); - let service = make_service_fn(|_| async { - Ok::<_, io::Error>(service_fn(|_req| async { - Ok::<_, io::Error>( - http::Response::builder() - .header(CONTENT_TYPE, APPLICATION_JSON.essence_str()) - .status(StatusCode::OK) - .version(Version::HTTP_11) - .body::(body.into()) - .unwrap(), - ) - })) - }); - let server = Server::builder(acceptor).serve(service); - server.await.unwrap() + let tls_config = Arc::new( + ServerConfig::builder() + .with_client_cert_verifier(client_auth) + .with_single_cert(certificates, key) + .unwrap(), + ); + let acceptor = TlsAcceptor::from(tls_config); + + loop { + let (stream, _) = listener.accept().await.expect("accepting connections"); + let acceptor = acceptor.clone(); + + tokio::spawn(async move { + let acceptor_stream = acceptor.accept(stream).await.expect("accepted stream"); + let tokio_stream = TokioIo::new(acceptor_stream); + + let hyper_service = + hyper::service::service_fn(move |_request: Request| async { + Ok::<_, io::Error>( + http::Response::builder() + .header(CONTENT_TYPE, APPLICATION_JSON.essence_str()) + .status(StatusCode::OK) + .version(Version::HTTP_11) + .body::(body.into()) + .unwrap(), + ) + }); + if let Err(err) = hyper_util::server::conn::auto::Builder::new(TokioExecutor::new()) + .serve_connection_with_upgrades(tokio_stream, hyper_service) + .await + { + eprintln!("failed to serve connection: {err:#}"); + } + }); + } } #[tokio::test(flavor = "multi_thread")] async fn tls_client_auth() { + // Enable crypto + let _ = rustls::crypto::aws_lc_rs::default_provider().install_default(); let server_certificate_pem = include_str!("./testdata/server.crt"); let ca_pem = include_str!("./testdata/CA/ca.crt"); let server_key_pem = include_str!("./testdata/server.key"); @@ -249,7 +336,7 @@ async fn tls_client_auth() { server_certificates.push(ca_certificate.clone()); let key = load_key(server_key_pem).unwrap(); - let listener = tokio::net::TcpListener::bind("127.0.0.1:0").await.unwrap(); + let listener = TcpListener::bind("127.0.0.1:0").await.unwrap(); let socket_addr = listener.local_addr().unwrap(); tokio::task::spawn(tls_server_with_client_auth( listener, @@ -273,10 +360,10 @@ async fn tls_client_auth() { "test".to_string(), TlsClient { certificate_authorities: Some(ca_pem.into()), - client_authentication: Some(TlsClientAuth { + client_authentication: Some(Arc::new(TlsClientAuth { certificate_chain: client_certificates, key: client_key, - }), + })), }, ); let subgraph_service = HttpClientService::from_config( @@ -293,7 +380,9 @@ async fn tls_client_auth() { http_request: http::Request::builder() .uri(url) .header(CONTENT_TYPE, APPLICATION_JSON.essence_str()) - .body(r#"{"query":"{ me { name username } }"#.into()) + .body(router::body::from_bytes( + r#"{"query":"{ me { name username } }"#, + )) .unwrap(), context: Context::new(), }) @@ -301,7 +390,7 @@ async fn tls_client_auth() { .unwrap(); assert_eq!( std::str::from_utf8( - &get_body_bytes(response.http_response.into_parts().1) + &router::body::into_bytes(response.http_response.into_parts().1) .await .unwrap() ) @@ -328,24 +417,23 @@ async fn emulate_h2c_server(listener: TcpListener) { .unwrap()) } - let make_svc = make_service_fn(|_conn| async { Ok::<_, Infallible>(service_fn(handle)) }); - let server = Server::from_tcp(listener) - .unwrap() - .http2_only(true) - .serve(make_svc); - server.await.unwrap(); + // XXX(@goto-bus-stop): ideally this server would *only* support HTTP 2 and not HTTP 1 + serve(listener, handle).await.unwrap(); } #[tokio::test(flavor = "multi_thread")] async fn test_subgraph_h2c() { - let listener = std::net::TcpListener::bind("127.0.0.1:0").unwrap(); + // Enable crypto + let _ = rustls::crypto::aws_lc_rs::default_provider().install_default(); + + let listener = TcpListener::bind("127.0.0.1:0").await.unwrap(); let socket_addr = listener.local_addr().unwrap(); tokio::task::spawn(emulate_h2c_server(listener)); let subgraph_service = HttpClientService::new( "test", rustls::ClientConfig::builder() - .with_safe_defaults() .with_native_roots() + .expect("read native TLS root certificates") .with_no_client_auth(), crate::configuration::shared::Client::builder() .experimental_http2(Http2Config::Http2Only) @@ -359,7 +447,9 @@ async fn test_subgraph_h2c() { http_request: http::Request::builder() .uri(url) .header(CONTENT_TYPE, APPLICATION_JSON.essence_str()) - .body(r#"{"query":"{ me { name username } }"#.into()) + .body(router::body::from_bytes( + r#"{"query":"{ me { name username } }"#, + )) .unwrap(), context: Context::new(), }) @@ -367,7 +457,7 @@ async fn test_subgraph_h2c() { .unwrap(); assert_eq!( std::str::from_utf8( - &get_body_bytes(response.http_response.into_parts().1) + &router::body::into_bytes(response.http_response.into_parts().1) .await .unwrap() ) @@ -379,7 +469,10 @@ async fn test_subgraph_h2c() { // starts a local server emulating a subgraph returning compressed response async fn emulate_subgraph_compressed_response(listener: TcpListener) { async fn handle(request: http::Request) -> Result, Infallible> { - let body = get_body_bytes(request.into_body()).await.unwrap().to_vec(); + let body = router::body::into_bytes(request.into_body()) + .await + .unwrap() + .to_vec(); let mut decoder = GzipDecoder::new(Vec::new()); decoder.write_all(&body).await.unwrap(); decoder.shutdown().await.unwrap(); @@ -409,21 +502,22 @@ async fn emulate_subgraph_compressed_response(listener: TcpListener) { .unwrap()) } - let make_svc = make_service_fn(|_conn| async { Ok::<_, Infallible>(service_fn(handle)) }); - let server = Server::from_tcp(listener).unwrap().serve(make_svc); - server.await.unwrap(); + serve(listener, handle).await.unwrap(); } #[tokio::test(flavor = "multi_thread")] async fn test_compressed_request_response_body() { - let listener = std::net::TcpListener::bind("127.0.0.1:0").unwrap(); + // Though the server doesn't use TLS, the client still supports it, and so we need crypto stuff + let _ = rustls::crypto::aws_lc_rs::default_provider().install_default(); + + let listener = TcpListener::bind("127.0.0.1:0").await.unwrap(); let socket_addr = listener.local_addr().unwrap(); tokio::task::spawn(emulate_subgraph_compressed_response(listener)); let subgraph_service = HttpClientService::new( "test", rustls::ClientConfig::builder() - .with_safe_defaults() .with_native_roots() + .expect("read native TLS root certificates") .with_no_client_auth(), crate::configuration::shared::Client::builder() .experimental_http2(Http2Config::Http2Only) @@ -438,7 +532,9 @@ async fn test_compressed_request_response_body() { .uri(url) .header(CONTENT_TYPE, APPLICATION_JSON.essence_str()) .header(CONTENT_ENCODING, "gzip") - .body(r#"{"query":"{ me { name username } }"#.into()) + .body(router::body::from_bytes( + r#"{"query":"{ me { name username } }"#, + )) .unwrap(), context: Context::new(), }) @@ -447,7 +543,7 @@ async fn test_compressed_request_response_body() { assert_eq!( std::str::from_utf8( - &get_body_bytes(response.http_response.into_parts().1) + &router::body::into_bytes(response.http_response.into_parts().1) .await .unwrap() ) @@ -569,29 +665,22 @@ async fn test_unix_socket() { let path = dir.path().join("router.sock"); let schema = make_schema(path.to_str().unwrap()); - let make_service = make_service_fn(|_| async { - Ok::<_, hyper::Error>(service_fn(|mut req: http::Request| async move { - let data = get_body_bytes(req.body_mut()).await.unwrap(); - let body = std::str::from_utf8(&data).unwrap(); - println!("{:?}", body); - let response = http::Response::builder() - .status(StatusCode::OK) - .header(CONTENT_TYPE, "application/json") - .body(Body::from( - r#"{ "data": { "currentUser": { "id": "0" } } }"#, - )) - .unwrap(); - Ok::<_, hyper::Error>(response) - })) - }); - - tokio::task::spawn(async move { - hyper::Server::bind_unix(path) - .unwrap() - .serve(make_service) - .await + async fn handle(mut req: http::Request) -> Result, Infallible> { + let data = router::body::into_bytes(req.body_mut()).await.unwrap(); + let body = std::str::from_utf8(&data).unwrap(); + println!("{:?}", body); + let response = http::Response::builder() + .status(StatusCode::OK) + .header(CONTENT_TYPE, "application/json") + .body(Body::from( + r#"{ "data": { "currentUser": { "id": "0" } } }"#, + )) .unwrap(); - }); + Ok(response) + } + + let listener = UnixListener::bind(path).unwrap(); + tokio::task::spawn(serve_unix(listener, handle)); let service = TestHarness::builder() .configuration_json(serde_json::json!({"include_subgraph_errors": { "all": true } })) diff --git a/apollo-router/src/services/layers/content_negotiation.rs b/apollo-router/src/services/layers/content_negotiation.rs index 64f9a18367..5d7a83071d 100644 --- a/apollo-router/src/services/layers/content_negotiation.rs +++ b/apollo-router/src/services/layers/content_negotiation.rs @@ -55,7 +55,7 @@ where let response: http::Response = http::Response::builder() .status(StatusCode::UNSUPPORTED_MEDIA_TYPE) .header(CONTENT_TYPE, APPLICATION_JSON.essence_str()) - .body(crate::services::router::Body::from( + .body(router::body::from_bytes( serde_json::json!({ "errors": [ graphql::Error::builder() @@ -71,17 +71,6 @@ where .to_string(), )) .expect("cannot fail"); - u64_counter!( - "apollo_router_http_requests_total", - "Total number of HTTP requests made.", - 1, - status = StatusCode::UNSUPPORTED_MEDIA_TYPE.as_u16() as i64, - error = format!( - r#"'content-type' header must be one of: {:?} or {:?}"#, - APPLICATION_JSON.essence_str(), - GRAPHQL_JSON_RESPONSE_HEADER_VALUE, - ) - ); return Ok(ControlFlow::Break(response.into())); } @@ -99,8 +88,8 @@ where Ok(ControlFlow::Continue(req)) } else { - let response: http::Response = http::Response::builder().status(StatusCode::NOT_ACCEPTABLE).header(CONTENT_TYPE, APPLICATION_JSON.essence_str()).body( - hyper::Body::from( + let response: http::Response = http::Response::builder().status(StatusCode::NOT_ACCEPTABLE).header(CONTENT_TYPE, APPLICATION_JSON.essence_str()).body( + router::body::from_bytes( serde_json::json!({ "errors": [ graphql::Error::builder() diff --git a/apollo-router/src/services/layers/query_analysis.rs b/apollo-router/src/services/layers/query_analysis.rs index 7af0a11ec0..c5e3c7b54c 100644 --- a/apollo-router/src/services/layers/query_analysis.rs +++ b/apollo-router/src/services/layers/query_analysis.rs @@ -118,13 +118,6 @@ impl QueryAnalysisLayer { .message("Must provide query string.".to_string()) .extension_code("MISSING_QUERY_STRING") .build()]; - u64_counter!( - "apollo_router_http_requests_total", - "Total number of HTTP requests made.", - 1, - status = StatusCode::BAD_REQUEST.as_u16() as i64, - error = "Must provide query string" - ); return Err(SupergraphResponse::builder() .errors(errors) diff --git a/apollo-router/src/services/layers/static_page.rs b/apollo-router/src/services/layers/static_page.rs index 75ecfb2538..ec62dc7352 100644 --- a/apollo-router/src/services/layers/static_page.rs +++ b/apollo-router/src/services/layers/static_page.rs @@ -64,7 +64,7 @@ where CONTENT_TYPE, HeaderValue::from_static(mime::TEXT_HTML_UTF_8.as_ref()), ) - .body(crate::services::router::Body::from(page.clone())) + .body(router::body::from_bytes(page.clone())) .unwrap(); ControlFlow::Break(router::Response { response, diff --git a/apollo-router/src/services/mod.rs b/apollo-router/src/services/mod.rs index c244cac420..68102d98af 100644 --- a/apollo-router/src/services/mod.rs +++ b/apollo-router/src/services/mod.rs @@ -11,8 +11,12 @@ use crate::http_ext; pub use crate::http_ext::TryIntoHeaderName; pub use crate::http_ext::TryIntoHeaderValue; pub use crate::query_planner::OperationKind; +pub(crate) use crate::services::connect::Request as ConnectRequest; +pub(crate) use crate::services::connect::Response as ConnectResponse; pub(crate) use crate::services::execution::Request as ExecutionRequest; pub(crate) use crate::services::execution::Response as ExecutionResponse; +pub(crate) use crate::services::fetch::FetchRequest; +pub(crate) use crate::services::fetch::Response as FetchResponse; pub(crate) use crate::services::query_planner::Request as QueryPlannerRequest; pub(crate) use crate::services::query_planner::Response as QueryPlannerResponse; pub(crate) use crate::services::router::Request as RouterRequest; @@ -23,8 +27,12 @@ pub(crate) use crate::services::supergraph::service::SupergraphCreator; pub(crate) use crate::services::supergraph::Request as SupergraphRequest; pub(crate) use crate::services::supergraph::Response as SupergraphResponse; +pub(crate) mod connect; +pub(crate) mod connector_service; pub mod execution; pub(crate) mod external; +pub(crate) mod fetch; +pub(crate) mod fetch_service; pub(crate) mod hickory_dns_connector; pub(crate) mod http; pub(crate) mod layers; @@ -34,7 +42,6 @@ pub mod router; pub mod subgraph; pub(crate) mod subgraph_service; pub mod supergraph; -pub mod transport; impl AsRef for http_ext::Request { fn as_ref(&self) -> &Request { diff --git a/apollo-router/src/services/query_planner.rs b/apollo-router/src/services/query_planner.rs index ce4675f884..370ebb3cbc 100644 --- a/apollo-router/src/services/query_planner.rs +++ b/apollo-router/src/services/query_planner.rs @@ -122,10 +122,6 @@ pub(crate) type BoxCloneService = tower::util::BoxCloneService; #[allow(dead_code)] pub(crate) type ServiceResult = Result; -#[allow(dead_code)] -pub(crate) type Body = hyper::Body; -#[allow(dead_code)] -pub(crate) type Error = hyper::Error; #[async_trait] pub(crate) trait QueryPlannerPlugin: Send + Sync + 'static { diff --git a/apollo-router/src/services/router.rs b/apollo-router/src/services/router.rs index cfd6e69c21..519f2cb9ee 100644 --- a/apollo-router/src/services/router.rs +++ b/apollo-router/src/services/router.rs @@ -12,6 +12,7 @@ use http::header::CONTENT_TYPE; use http::HeaderValue; use http::Method; use http::StatusCode; +use http_body_util::BodyExt; use multer::Multipart; use multimap::MultiMap; use serde_json_bytes::ByteString; @@ -27,7 +28,6 @@ use super::supergraph; use crate::graphql; use crate::http_ext::header_map; use crate::json_ext::Path; -use crate::services; use crate::services::TryIntoHeaderName; use crate::services::TryIntoHeaderValue; use crate::Context; @@ -35,8 +35,8 @@ use crate::Context; pub type BoxService = tower::util::BoxService; pub type BoxCloneService = tower::util::BoxCloneService; pub type ServiceResult = Result; -//#[deprecated] -pub type Body = hyper::Body; + +pub type Body = RouterBody; pub type Error = hyper::Error; pub mod body; @@ -66,6 +66,33 @@ impl From<(http::Request, Context)> for Request { } } +/// Helper type to conveniently construct a body from several types used commonly in tests. +/// +/// It's only meant for integration tests, as the "real" router should create bodies explicitly accounting for +/// streaming, size limits, etc. +pub struct IntoBody(Body); + +impl From for IntoBody { + fn from(value: Body) -> Self { + Self(value) + } +} +impl From for IntoBody { + fn from(value: String) -> Self { + Self(self::body::from_bytes(value)) + } +} +impl From for IntoBody { + fn from(value: Bytes) -> Self { + Self(self::body::from_bytes(value)) + } +} +impl From> for IntoBody { + fn from(value: Vec) -> Self { + Self(self::body::from_bytes(value)) + } +} + #[buildstructor::buildstructor] impl Request { /// This is the constructor (or builder) to use when constructing a real Request. @@ -101,12 +128,12 @@ impl Request { headers: MultiMap, uri: Option, method: Option, - body: Option, + body: Option, ) -> Result { let mut router_request = http::Request::builder() .uri(uri.unwrap_or_else(|| http::Uri::from_static("http://example.com/"))) .method(method.unwrap_or(Method::GET)) - .body(body.unwrap_or_else(Body::empty))?; + .body(body.map_or_else(self::body::empty, |constructed| constructed.0))?; *router_request.headers_mut() = header_map(headers)?; Ok(Self { router_request, @@ -157,14 +184,13 @@ impl TryFrom for Request { .parse() .map_err(ParseError::InvalidUri)?; - http::Request::from_parts(parts, RouterBody::empty().into_inner()) + http::Request::from_parts(parts, self::body::empty()) } else { http::Request::from_parts( parts, - RouterBody::from( + self::body::from_bytes( serde_json::to_vec(&request).map_err(ParseError::SerializationError)?, - ) - .into_inner(), + ), ) }; Ok(Self { @@ -184,8 +210,8 @@ pub struct Response { #[buildstructor::buildstructor] impl Response { - pub async fn next_response(&mut self) -> Option> { - self.response.body_mut().next().await + pub async fn next_response(&mut self) -> Option> { + self.response.body_mut().into_data_stream().next().await } #[deprecated] @@ -236,9 +262,7 @@ impl Response { } } - // let response = builder.body(once(ready(res)).boxed())?; - - let response = builder.body(RouterBody::from(serde_json::to_vec(&res)?).into_inner())?; + let response = builder.body(self::body::from_bytes(serde_json::to_vec(&res)?))?; Ok(Self { response, context }) } @@ -301,7 +325,9 @@ impl Response { } let response = builder - .body(RouterBody::from(serde_json::to_vec(&res).expect("can't fail")).into_inner()) + .body(self::body::from_bytes( + serde_json::to_vec(&res).expect("can't fail"), + )) .expect("can't fail"); Self { response, context } @@ -322,7 +348,10 @@ impl Response { || *value == MULTIPART_SUBSCRIPTION_CONTENT_TYPE_HEADER_VALUE }) { - let multipart = Multipart::new(self.response.into_body(), "graphql"); + let multipart = Multipart::new( + http_body_util::BodyDataStream::new(self.response.into_body()), + "graphql", + ); Either::Left(futures::stream::unfold(multipart, |mut m| async { if let Ok(Some(response)) = m.next_field().await { @@ -336,7 +365,7 @@ impl Response { None })) } else { - let mut body = self.response.into_body(); + let mut body = http_body_util::BodyDataStream::new(self.response.into_body()); let res = body.next().await.and_then(|res| res.ok()); Either::Right( @@ -447,9 +476,7 @@ where let val_any = &mut b as &mut dyn Any; match val_any.downcast_mut::() { Some(body) => mem::take(body), - None => Body::wrap_stream(services::http::body_stream::BodyStream::new( - b.map_err(Into::into), - )), + None => Body::new(http_body_util::BodyStream::new(b.map_err(axum::Error::new))), } } @@ -459,11 +486,11 @@ mod test { use std::task::Context; use std::task::Poll; - use http::HeaderMap; + use http_body::Frame; use tower::BoxError; - use crate::services::router::body::get_body_bytes; - use crate::services::router::convert_to_body; + use super::convert_to_body; + use crate::services::router; struct MockBody { data: Option<&'static str>, @@ -472,39 +499,32 @@ mod test { type Data = bytes::Bytes; type Error = BoxError; - fn poll_data( - mut self: Pin<&mut Self>, + fn poll_frame( + self: Pin<&mut Self>, _cx: &mut Context<'_>, - ) -> Poll>> { - if let Some(data) = self.data.take() { - Poll::Ready(Some(Ok(bytes::Bytes::from(data)))) + ) -> Poll, Self::Error>>> { + if let Some(data) = self.get_mut().data.take() { + Poll::Ready(Some(Ok(Frame::data(bytes::Bytes::from(data))))) } else { Poll::Ready(None) } } - - fn poll_trailers( - self: Pin<&mut Self>, - _cx: &mut Context<'_>, - ) -> Poll, Self::Error>> { - Poll::Ready(Ok(None)) - } } #[tokio::test] async fn test_convert_from_http_body() { let body = convert_to_body(MockBody { data: Some("test") }); assert_eq!( - &String::from_utf8(get_body_bytes(body).await.unwrap().to_vec()).unwrap(), + &String::from_utf8(router::body::into_bytes(body).await.unwrap().to_vec()).unwrap(), "test" ); } #[tokio::test] async fn test_convert_from_hyper_body() { - let body = convert_to_body(hyper::Body::from("test")); + let body = convert_to_body(String::from("test")); assert_eq!( - &String::from_utf8(get_body_bytes(body).await.unwrap().to_vec()).unwrap(), + &String::from_utf8(router::body::into_bytes(body).await.unwrap().to_vec()).unwrap(), "test" ); } diff --git a/apollo-router/src/services/router/body.rs b/apollo-router/src/services/router/body.rs index 4890544817..3269a49b16 100644 --- a/apollo-router/src/services/router/body.rs +++ b/apollo-router/src/services/router/body.rs @@ -1,133 +1,63 @@ -#![allow(deprecated)] -use std::fmt::Debug; - +use axum::Error as AxumError; use bytes::Bytes; -use futures::future::BoxFuture; -use futures::FutureExt; use futures::Stream; -use http_body::SizeHint; -use hyper::body::HttpBody; -use tower::BoxError; -use tower::Service; - -pub struct RouterBody(super::Body); - -impl RouterBody { - pub fn empty() -> Self { - Self(super::Body::empty()) - } +use futures::StreamExt; +use http_body::Frame; +use http_body_util::combinators::UnsyncBoxBody; +use http_body_util::BodyExt; +use http_body_util::Empty; +use http_body_util::Full; +use http_body_util::StreamBody; +use hyper::body::Body as HttpBody; - pub fn into_inner(self) -> super::Body { - self.0 - } +pub type RouterBody = UnsyncBoxBody; - pub async fn to_bytes(self) -> Result { - hyper::body::to_bytes(self.0).await - } - - pub fn wrap_stream(stream: S) -> RouterBody - where - S: Stream> + Send + 'static, - O: Into + 'static, - E: Into> + 'static, - { - Self(super::Body::wrap_stream(stream)) - } +pub(crate) async fn into_bytes(body: B) -> Result { + Ok(body.collect().await?.to_bytes()) } -impl> From for RouterBody { - fn from(value: T) -> Self { - RouterBody(value.into()) - } -} +// We create some utility functions to make Empty and Full bodies +// and convert types -impl Debug for RouterBody { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - self.0.fmt(f) - } +/// Create an empty RouterBody +pub(crate) fn empty() -> UnsyncBoxBody { + Empty::::new() + .map_err(|never| match never {}) + .boxed_unsync() } -impl Stream for RouterBody { - type Item = ::Item; - - fn poll_next( - mut self: std::pin::Pin<&mut Self>, - cx: &mut std::task::Context<'_>, - ) -> std::task::Poll> { - let mut pinned = std::pin::pin!(&mut self.0); - pinned.as_mut().poll_next(cx) - } +/// Create a Full RouterBody using the supplied chunk +pub(crate) fn from_bytes>(chunk: T) -> UnsyncBoxBody { + Full::new(chunk.into()) + .map_err(|never| match never {}) + .boxed_unsync() } -impl HttpBody for RouterBody { - type Data = ::Data; - - type Error = ::Error; - - fn poll_data( - mut self: std::pin::Pin<&mut Self>, - cx: &mut std::task::Context<'_>, - ) -> std::task::Poll>> { - let mut pinned = std::pin::pin!(&mut self.0); - pinned.as_mut().poll_data(cx) - } - - fn poll_trailers( - mut self: std::pin::Pin<&mut Self>, - cx: &mut std::task::Context<'_>, - ) -> std::task::Poll, Self::Error>> { - let mut pinned = std::pin::pin!(&mut self.0); - pinned.as_mut().poll_trailers(cx) - } - - fn is_end_stream(&self) -> bool { - self.0.is_end_stream() - } - - fn size_hint(&self) -> SizeHint { - HttpBody::size_hint(&self.0) - } -} - -pub(crate) async fn get_body_bytes(body: B) -> Result { - hyper::body::to_bytes(body).await -} - -// this is used to wrap a hyper::Client because its Service implementation will always return a hyper::Body, -// it does not keep the body type used by the request -#[derive(Clone)] -pub(crate) struct RouterBodyConverter { - pub(crate) inner: C, +/// Create a streaming RouterBody using the supplied stream +pub(crate) fn from_result_stream(data_stream: S) -> RouterBody +where + S: Stream> + Send + 'static, + S: StreamExt, + E: Into, +{ + RouterBody::new(StreamBody::new( + data_stream.map(|s| s.map(Frame::data).map_err(AxumError::new)), + )) } -impl Service> for RouterBodyConverter +/// Get a body's contents as a utf-8 string for use in test assertions, or return an error. +#[cfg(test)] +pub(crate) async fn into_string(input: B) -> Result where - C: Service, Response = http::Response, Error = BoxError> - + Clone - + Send - + Sync - + 'static, - >>::Future: Send + Sync + 'static, + B: HttpBody, + B::Error: Into, { - type Response = http::Response; - - type Error = BoxError; - - type Future = BoxFuture<'static, Result, BoxError>>; - - fn poll_ready( - &mut self, - cx: &mut std::task::Context<'_>, - ) -> std::task::Poll> { - self.inner.poll_ready(cx) - } - - fn call(&mut self, req: http::Request) -> Self::Future { - Box::pin(self.inner.call(req).map(|res| { - res.map(|http_response| { - let (parts, body) = http_response.into_parts(); - http::Response::from_parts(parts, RouterBody::from(body)) - }) - })) - } + let bytes = input + .collect() + .await + .map_err(AxumError::new)? + .to_bytes() + .to_vec(); + let string = String::from_utf8(bytes).map_err(AxumError::new)?; + Ok(string) } diff --git a/apollo-router/src/services/router/service.rs b/apollo-router/src/services/router/service.rs index acc67e332d..1743c7d33a 100644 --- a/apollo-router/src/services/router/service.rs +++ b/apollo-router/src/services/router/service.rs @@ -4,7 +4,6 @@ use std::collections::HashMap; use std::sync::Arc; use std::task::Poll; -use axum::body::StreamBody; use axum::response::*; use bytes::BufMut; use bytes::Bytes; @@ -12,7 +11,6 @@ use bytes::BytesMut; use futures::future::join_all; use futures::future::ready; use futures::future::BoxFuture; -use futures::stream; use futures::stream::once; use futures::stream::StreamExt; use futures::TryFutureExt; @@ -24,9 +22,10 @@ use http::HeaderName; use http::HeaderValue; use http::Method; use http::StatusCode; -use http_body::Body as _; use mime::APPLICATION_JSON; use multimap::MultiMap; +use opentelemetry::KeyValue; +use opentelemetry_semantic_conventions::trace::HTTP_REQUEST_METHOD; use tower::BoxError; use tower::Layer; use tower::ServiceBuilder; @@ -47,6 +46,14 @@ use crate::graphql; use crate::http_ext; #[cfg(test)] use crate::plugin::test::MockSupergraphService; +use crate::plugins::telemetry::config_new::attributes::HTTP_REQUEST_BODY; +use crate::plugins::telemetry::config_new::attributes::HTTP_REQUEST_HEADERS; +use crate::plugins::telemetry::config_new::attributes::HTTP_REQUEST_URI; +use crate::plugins::telemetry::config_new::attributes::HTTP_REQUEST_VERSION; +use crate::plugins::telemetry::config_new::events::log_event; +use crate::plugins::telemetry::config_new::events::DisplayRouterRequest; +use crate::plugins::telemetry::config_new::events::DisplayRouterResponse; +use crate::plugins::telemetry::config_new::events::RouterResponseBodyExtensionType; use crate::protocols::multipart::Multipart; use crate::protocols::multipart::ProtocolMode; use crate::query_planner::InMemoryCachePlanner; @@ -59,8 +66,6 @@ use crate::services::layers::query_analysis::QueryAnalysisLayer; use crate::services::layers::static_page::StaticPageLayer; use crate::services::new_service::ServiceFactory; use crate::services::router; -use crate::services::router::body::get_body_bytes; -use crate::services::router::body::RouterBody; #[cfg(test)] use crate::services::supergraph; use crate::services::HasPlugins; @@ -269,6 +274,10 @@ impl RouterService { .extensions() .with_lock(|lock| lock.get().cloned()) .unwrap_or_default(); + let display_router_response: DisplayRouterResponse = context + .extensions() + .with_lock(|lock| lock.get().cloned()) + .unwrap_or_default(); let (mut parts, mut body) = response.into_parts(); process_vary_header(&mut parts.headers); @@ -287,10 +296,9 @@ impl RouterService { Ok(router::Response { response: http::Response::builder() .status(StatusCode::SERVICE_UNAVAILABLE) - .body( - RouterBody::from("router service is not available to process request") - .into_inner(), - ) + .body(router::body::from_bytes( + "router service is not available to process request", + )) .expect("cannot fail"), context, }) @@ -307,15 +315,22 @@ impl RouterService { parts .headers .insert(CONTENT_TYPE, APPLICATION_JSON_HEADER_VALUE.clone()); - tracing::trace_span!("serialize_response").in_scope(|| { - let body = serde_json::to_string(&response)?; - Ok(router::Response { - response: http::Response::from_parts( - parts, - RouterBody::from(body).into_inner(), - ), - context, - }) + let body: Result = tracing::trace_span!("serialize_response") + .in_scope(|| { + let body = serde_json::to_string(&response)?; + Ok(body) + }); + let body = body?; + + if display_router_response.0 { + context.extensions().with_lock(|mut ext| { + ext.insert(RouterResponseBodyExtensionType(body.clone())); + }); + } + + Ok(router::Response { + response: http::Response::from_parts(parts, router::body::from_bytes(body)), + context, }) } else if accepts_multipart_defer || accepts_multipart_subscription { if accepts_multipart_defer { @@ -339,42 +354,22 @@ impl RouterService { ACCEL_BUFFERING_HEADER_NAME.clone(), ACCEL_BUFFERING_HEADER_VALUE.clone(), ); - let multipart_stream = match response.subscribed { - Some(true) => StreamBody::new(Multipart::new( - body.inspect(|response| { - if !response.errors.is_empty() { - Self::count_errors(&response.errors); - } - }), - ProtocolMode::Subscription, - )), - _ => StreamBody::new(Multipart::new( - once(ready(response)).chain(body.inspect(|response| { - if !response.errors.is_empty() { - Self::count_errors(&response.errors); - } - })), - ProtocolMode::Defer, - )), + let response = match response.subscribed { + Some(true) => http::Response::from_parts( + parts, + router::body::from_result_stream(Multipart::new( + body, + ProtocolMode::Subscription, + )), + ), + _ => http::Response::from_parts( + parts, + router::body::from_result_stream(Multipart::new( + once(ready(response)).chain(body), + ProtocolMode::Defer, + )), + ), }; - let response = (parts, multipart_stream).into_response().map(|body| { - // Axum makes this `body` have type: - // https://docs.rs/http-body/0.4.5/http_body/combinators/struct.UnsyncBoxBody.html - let mut body = Box::pin(body); - // We make a stream based on its `poll_data` method - // in order to create a `hyper::Body`. - RouterBody::wrap_stream(stream::poll_fn(move |ctx| { - body.as_mut().poll_data(ctx) - })) - .into_inner() - // … but we ignore the `poll_trailers` method: - // https://docs.rs/http-body/0.4.5/http_body/trait.Body.html#tymethod.poll_trailers - // Apparently HTTP/2 trailers are like headers, except after the response body. - // I (Simon) believe nothing in the Apollo Router uses trailers as of this writing, - // so ignoring `poll_trailers` is fine. - // If we want to use trailers, we may need remove this convertion to `hyper::Body` - // and return `UnsyncBoxBody` (a.k.a. `axum::BoxBody`) as-is. - }); Ok(RouterResponse { response, context }) } else { @@ -416,7 +411,7 @@ impl RouterService { async fn call_inner(&self, req: RouterRequest) -> Result { let context = req.context; let (parts, body) = req.router_request.into_parts(); - let requests = self.get_graphql_requests(&parts, body).await?; + let requests = self.get_graphql_requests(&context, &parts, body).await?; let (supergraph_requests, is_batch) = match futures::future::ready(requests) .and_then(|r| self.translate_request(&context, parts, r)) @@ -424,13 +419,6 @@ impl RouterService { { Ok(requests) => requests, Err(err) => { - u64_counter!( - "apollo_router_http_requests_total", - "Total number of HTTP requests made.", - 1, - status = err.status.as_u16() as i64, - error = err.error.to_string() - ); // Useful for selector in spans/instruments/events context .insert_json_value(CONTAINS_GRAPHQL_ERROR, serde_json_bytes::Value::Bool(true)); @@ -501,17 +489,19 @@ impl RouterService { let context = first.context; let mut bytes = BytesMut::new(); bytes.put_u8(b'['); - bytes.extend_from_slice(&get_body_bytes(body).await?); + bytes.extend_from_slice(&router::body::into_bytes(body).await?); for result in results_it { bytes.put(&b", "[..]); - bytes.extend_from_slice(&get_body_bytes(result.response.into_body()).await?); + bytes.extend_from_slice( + &router::body::into_bytes(result.response.into_body()).await?, + ); } bytes.put_u8(b']'); Ok(RouterResponse { response: http::Response::from_parts( parts, - RouterBody::from(bytes.freeze()).into_inner(), + router::body::from_bytes(bytes.freeze()), ), context, }) @@ -541,7 +531,6 @@ impl RouterService { result = graphql::Request::batch_from_urlencoded_query(q.to_string()) .map_err(|e| TranslateError { status: StatusCode::BAD_REQUEST, - error: "failed to decode a valid GraphQL request from path", extension_code: "INVALID_GRAPHQL_REQUEST", extension_details: format!( "failed to decode a valid GraphQL request from path {e}" @@ -550,7 +539,6 @@ impl RouterService { if result.is_empty() { return Err(TranslateError { status: StatusCode::BAD_REQUEST, - error: "failed to decode a valid GraphQL request from path", extension_code: "INVALID_GRAPHQL_REQUEST", extension_details: "failed to decode a valid GraphQL request from path: empty array ".to_string() }); @@ -565,14 +553,12 @@ impl RouterService { }; return Err(TranslateError { status: StatusCode::BAD_REQUEST, - error: "batching not enabled", extension_code: "BATCHING_NOT_ENABLED", extension_details, }); } else { return Err(TranslateError { status: StatusCode::BAD_REQUEST, - error: "failed to decode a valid GraphQL request from path", extension_code: "INVALID_GRAPHQL_REQUEST", extension_details: format!( "failed to decode a valid GraphQL request from path {err}" @@ -585,7 +571,6 @@ impl RouterService { }).unwrap_or_else(|| { Err(TranslateError { status: StatusCode::BAD_REQUEST, - error: "There was no GraphQL operation to execute. Use the `query` parameter to send an operation, using either GET or POST.", extension_code: "INVALID_GRAPHQL_REQUEST", extension_details: "There was no GraphQL operation to execute. Use the `query` parameter to send an operation, using either GET or POST.".to_string() }) @@ -610,7 +595,6 @@ impl RouterService { result = graphql::Request::batch_from_bytes(bytes).map_err(|e| TranslateError { status: StatusCode::BAD_REQUEST, - error: "failed to deserialize the request body into JSON", extension_code: "INVALID_GRAPHQL_REQUEST", extension_details: format!( "failed to deserialize the request body into JSON: {e}" @@ -619,7 +603,6 @@ impl RouterService { if result.is_empty() { return Err(TranslateError { status: StatusCode::BAD_REQUEST, - error: "failed to decode a valid GraphQL request from path", extension_code: "INVALID_GRAPHQL_REQUEST", extension_details: "failed to decode a valid GraphQL request from path: empty array " @@ -637,14 +620,12 @@ impl RouterService { }; return Err(TranslateError { status: StatusCode::BAD_REQUEST, - error: "batching not enabled", extension_code: "BATCHING_NOT_ENABLED", extension_details, }); } else { return Err(TranslateError { status: StatusCode::BAD_REQUEST, - error: "failed to deserialize the request body into JSON", extension_code: "INVALID_GRAPHQL_REQUEST", extension_details: format!( "failed to deserialize the request body into JSON: {err}" @@ -724,7 +705,6 @@ impl RouterService { Batch::query_for_index(shared_batch_details.clone(), index + 1).map_err( |err| TranslateError { status: StatusCode::INTERNAL_SERVER_ERROR, - error: "failed to create batch", extension_code: "BATCHING_ERROR", extension_details: format!("failed to create batch entry: {err}"), }, @@ -753,7 +733,6 @@ impl RouterService { let b_for_index = Batch::query_for_index(shared_batch_details, 0).map_err(|err| TranslateError { status: StatusCode::INTERNAL_SERVER_ERROR, - error: "failed to create batch", extension_code: "BATCHING_ERROR", extension_details: format!("failed to create batch entry: {err}"), })?; @@ -775,6 +754,7 @@ impl RouterService { async fn get_graphql_requests( &self, + context: &Context, parts: &Parts, body: Body, ) -> Result, bool), TranslateError>, BoxError> { @@ -782,9 +762,51 @@ impl RouterService { if parts.method == Method::GET { self.translate_query_request(parts).await } else { - let bytes = get_body_bytes(body) + let bytes = router::body::into_bytes(body) .instrument(tracing::debug_span!("receive_body")) .await?; + if let Some(level) = context + .extensions() + .with_lock(|ext| ext.get::().cloned()) + .map(|d| d.0) + { + let mut attrs = Vec::with_capacity(5); + #[cfg(test)] + let mut headers: indexmap::IndexMap = parts + .headers + .clone() + .into_iter() + .filter_map(|(name, val)| Some((name?.to_string(), val))) + .collect(); + #[cfg(test)] + headers.sort_keys(); + #[cfg(not(test))] + let headers = &parts.headers; + + attrs.push(KeyValue::new( + HTTP_REQUEST_HEADERS, + opentelemetry::Value::String(format!("{:?}", headers).into()), + )); + attrs.push(KeyValue::new( + HTTP_REQUEST_METHOD, + opentelemetry::Value::String(format!("{}", parts.method).into()), + )); + attrs.push(KeyValue::new( + HTTP_REQUEST_URI, + opentelemetry::Value::String(format!("{}", parts.uri).into()), + )); + attrs.push(KeyValue::new( + HTTP_REQUEST_VERSION, + opentelemetry::Value::String(format!("{:?}", parts.version).into()), + )); + attrs.push(KeyValue::new( + HTTP_REQUEST_BODY, + opentelemetry::Value::String( + format!("{:?}", String::from_utf8_lossy(&bytes)).into(), + ), + )); + log_event(level, "router.request", attrs, ""); + } self.translate_bytes_request(&bytes) }; Ok(graphql_requests) @@ -822,7 +844,6 @@ impl RouterService { struct TranslateError<'a> { status: StatusCode, - error: &'a str, extension_code: &'a str, extension_details: String, } diff --git a/apollo-router/src/services/router/tests.rs b/apollo-router/src/services/router/tests.rs index 59737d44f3..0adf3cacb1 100644 --- a/apollo-router/src/services/router/tests.rs +++ b/apollo-router/src/services/router/tests.rs @@ -15,7 +15,6 @@ use tower_service::Service; use crate::graphql; use crate::services::router; -use crate::services::router::body::get_body_bytes; use crate::services::router::service::from_supergraph_mock_callback; use crate::services::router::service::process_vary_header; use crate::services::subgraph; @@ -204,7 +203,7 @@ async fn test_http_max_request_bytes() { in `apollo-router/src/services/supergraph.rs` has changed. \ Please update `CANNED_REQUEST_LEN` accordingly." ); - hyper::Body::from(json_bytes) + router::body::from_bytes(json_bytes) }); let config = serde_json::json!({ "limits": { @@ -259,8 +258,12 @@ async fn it_only_accepts_batch_http_link_mode_for_query_batch() { // Send a request let response = with_config().await.response; assert_eq!(response.status(), http::StatusCode::BAD_REQUEST); - let data: serde_json::Value = - serde_json::from_slice(&get_body_bytes(response.into_body()).await.unwrap()).unwrap(); + let data: serde_json::Value = serde_json::from_slice( + &router::body::into_bytes(response.into_body()) + .await + .unwrap(), + ) + .unwrap(); assert_eq!(expected_response, data); } @@ -294,7 +297,7 @@ async fn it_processes_a_valid_query_batch() { result.push(b','); result.append(&mut json_bytes_3); result.push(b']'); - hyper::Body::from(result) + router::body::from_bytes(result) }); let config = serde_json::json!({ "batching": { @@ -315,8 +318,12 @@ async fn it_processes_a_valid_query_batch() { // Send a request let response = with_config().await.response; assert_eq!(response.status(), http::StatusCode::OK); - let data: serde_json::Value = - serde_json::from_slice(&get_body_bytes(response.into_body()).await.unwrap()).unwrap(); + let data: serde_json::Value = serde_json::from_slice( + &router::body::into_bytes(response.into_body()) + .await + .unwrap(), + ) + .unwrap(); assert_eq!(expected_response, data); } @@ -349,8 +356,12 @@ async fn it_will_not_process_a_query_batch_without_enablement() { // Send a request let response = with_config().await.response; assert_eq!(response.status(), http::StatusCode::BAD_REQUEST); - let data: serde_json::Value = - serde_json::from_slice(&get_body_bytes(response.into_body()).await.unwrap()).unwrap(); + let data: serde_json::Value = serde_json::from_slice( + &router::body::into_bytes(response.into_body()) + .await + .unwrap(), + ) + .unwrap(); assert_eq!(expected_response, data); } @@ -374,7 +385,7 @@ async fn it_will_not_process_a_poorly_formatted_query_batch() { result.push(b','); result.append(&mut json_bytes); // Deliberately omit the required trailing ] - hyper::Body::from(result) + router::body::from_bytes(result) }); let config = serde_json::json!({ "batching": { @@ -395,8 +406,12 @@ async fn it_will_not_process_a_poorly_formatted_query_batch() { // Send a request let response = with_config().await.response; assert_eq!(response.status(), http::StatusCode::BAD_REQUEST); - let data: serde_json::Value = - serde_json::from_slice(&get_body_bytes(response.into_body()).await.unwrap()).unwrap(); + let data: serde_json::Value = serde_json::from_slice( + &router::body::into_bytes(response.into_body()) + .await + .unwrap(), + ) + .unwrap(); assert_eq!(expected_response, data); } @@ -427,7 +442,7 @@ async fn it_will_process_a_non_batched_defered_query() { .supergraph_request .map(|req: graphql::Request| { let bytes = serde_json::to_vec(&req).unwrap(); - hyper::Body::from(bytes) + router::body::from_bytes(bytes) }); let config = serde_json::json!({ "batching": { @@ -448,7 +463,9 @@ async fn it_will_process_a_non_batched_defered_query() { // Send a request let response = with_config().await.response; assert_eq!(response.status(), http::StatusCode::OK); - let bytes = get_body_bytes(response.into_body()).await.unwrap(); + let bytes = router::body::into_bytes(response.into_body()) + .await + .unwrap(); let data = String::from_utf8_lossy(&bytes); assert_eq!(expected_response, data); } @@ -501,7 +518,9 @@ async fn it_will_not_process_a_batched_deferred_query() { // Send a request let response = with_config().await.response; assert_eq!(response.status(), http::StatusCode::NOT_ACCEPTABLE); - let bytes = get_body_bytes(response.into_body()).await.unwrap(); + let bytes = router::body::into_bytes(response.into_body()) + .await + .unwrap(); let data = String::from_utf8_lossy(&bytes); assert_eq!(expected_response, data); } diff --git a/apollo-router/src/services/subgraph_service.rs b/apollo-router/src/services/subgraph_service.rs index 78574f9502..90f527d674 100644 --- a/apollo-router/src/services/subgraph_service.rs +++ b/apollo-router/src/services/subgraph_service.rs @@ -68,12 +68,11 @@ use crate::plugins::telemetry::config_new::events::log_event; use crate::plugins::telemetry::config_new::events::SubgraphEventRequest; use crate::plugins::telemetry::config_new::events::SubgraphEventResponse; use crate::plugins::telemetry::consts::SUBGRAPH_REQUEST_SPAN_NAME; -use crate::plugins::telemetry::LOGGING_DISPLAY_BODY; -use crate::plugins::telemetry::LOGGING_DISPLAY_HEADERS; use crate::protocols::websocket::convert_websocket_stream; use crate::protocols::websocket::GraphqlWebSocket; use crate::query_planner::OperationKind; use crate::services::layers::apq; +use crate::services::router; use crate::services::SubgraphRequest; use crate::services::SubgraphResponse; use crate::Configuration; @@ -182,21 +181,26 @@ pub(crate) fn generate_tls_client_config( tls_cert_store: Option, client_cert_config: Option<&TlsClientAuth>, ) -> Result { - let tls_builder = rustls::ClientConfig::builder().with_safe_defaults(); + // Enable crypto + let _ = rustls::crypto::aws_lc_rs::default_provider().install_default(); + + let tls_builder = rustls::ClientConfig::builder(); Ok(match (tls_cert_store, client_cert_config) { - (None, None) => tls_builder.with_native_roots().with_no_client_auth(), + (None, None) => tls_builder.with_native_roots()?.with_no_client_auth(), (Some(store), None) => tls_builder .with_root_certificates(store) .with_no_client_auth(), - (None, Some(client_auth_config)) => tls_builder.with_native_roots().with_client_auth_cert( - client_auth_config.certificate_chain.clone(), - client_auth_config.key.clone(), - )?, + (None, Some(client_auth_config)) => { + tls_builder.with_native_roots()?.with_client_auth_cert( + client_auth_config.certificate_chain.clone(), + client_auth_config.key.clone_key(), + )? + } (Some(store), Some(client_auth_config)) => tls_builder .with_root_certificates(store) .with_client_auth_cert( client_auth_config.certificate_chain.clone(), - client_auth_config.key.clone(), + client_auth_config.key.clone_key(), )?, }) } @@ -555,9 +559,6 @@ async fn call_websocket( let request = get_websocket_request(service_name.clone(), parts, subgraph_cfg)?; - let display_headers = context.contains_key(LOGGING_DISPLAY_HEADERS); - let display_body = context.contains_key(LOGGING_DISPLAY_BODY); - let signing_params = context .extensions() .with_lock(|lock| lock.get::>().cloned()); @@ -604,14 +605,6 @@ async fn call_websocket( ); } - if display_headers { - tracing::info!(http.request.headers = ?request.headers(), apollo.subgraph.name = %service_name, "Websocket request headers to subgraph {service_name:?}"); - } - - if display_body { - tracing::info!(http.request.body = ?request.body(), apollo.subgraph.name = %service_name, "Websocket request body to subgraph {service_name:?}"); - } - let uri = request.uri(); let path = uri.path(); let host = uri.host().unwrap_or_default(); @@ -637,7 +630,7 @@ async fn call_websocket( "graphql.operation.name" = %operation_name, ); - let (ws_stream, mut resp) = match request.uri().scheme_str() { + let (ws_stream, resp) = match request.uri().scheme_str() { Some("wss") => { connect_async_tls_with_config(request, None, false, None) .instrument(subgraph_req_span) @@ -645,27 +638,11 @@ async fn call_websocket( } _ => connect_async(request).instrument(subgraph_req_span).await, } - .map_err(|err| { - if display_body || display_headers { - tracing::info!( - http.response.error = format!("{:?}", &err), apollo.subgraph.name = %service_name, "Websocket connection error from subgraph {service_name:?} received" - ); - } - FetchError::SubrequestWsError { - service: service_name.clone(), - reason: format!("cannot connect websocket to subgraph: {err}"), - } + .map_err(|err| FetchError::SubrequestWsError { + service: service_name.clone(), + reason: format!("cannot connect websocket to subgraph: {err}"), })?; - if display_headers { - tracing::info!(response.headers = ?resp.headers(), apollo.subgraph.name = %service_name, "Websocket response headers to subgraph {service_name:?}"); - } - if display_body { - tracing::info!( - response.body = %String::from_utf8_lossy(&resp.body_mut().take().unwrap_or_default()), apollo.subgraph.name = %service_name, "Websocket response body from subgraph {service_name:?} received" - ); - } - let gql_socket = GraphqlWebSocket::new( convert_websocket_stream(ws_stream, subscription_hash.clone()), subscription_hash, @@ -867,7 +844,6 @@ pub(crate) async fn process_batch( .expect("we have at least one context in the batch") .0 .clone(); - let display_body = batch_context.contains_key(LOGGING_DISPLAY_BODY); let client = client_factory.create(&service); // Update our batching metrics (just before we fetch) @@ -891,35 +867,34 @@ pub(crate) async fn process_batch( // Perform the actual fetch. If this fails then we didn't manage to make the call at all, so we can't do anything with it. tracing::debug!("fetching from subgraph: {service}"); - let (parts, content_type, body) = - match do_fetch(client, &batch_context, &service, request, display_body) - .instrument(subgraph_req_span) - .await - { - Ok(res) => res, - Err(err) => { - let resp = http::Response::builder() - .status(StatusCode::INTERNAL_SERVER_ERROR) - .body(err.to_graphql_error(None)) - .map_err(|err| FetchError::SubrequestHttpError { - status_code: None, - service: service.clone(), - reason: format!("cannot create the http response from error: {err:?}"), - })?; - let (parts, body) = resp.into_parts(); - let body = - serde_json::to_vec(&body).map_err(|err| FetchError::SubrequestHttpError { - status_code: None, - service: service.clone(), - reason: format!("cannot serialize the error: {err:?}"), - })?; - ( - parts, - Ok(ContentType::ApplicationJson), - Some(Ok(body.into())), - ) - } - }; + let (parts, content_type, body) = match do_fetch(client, &batch_context, &service, request) + .instrument(subgraph_req_span) + .await + { + Ok(res) => res, + Err(err) => { + let resp = http::Response::builder() + .status(StatusCode::INTERNAL_SERVER_ERROR) + .body(err.to_graphql_error(None)) + .map_err(|err| FetchError::SubrequestHttpError { + status_code: None, + service: service.clone(), + reason: format!("cannot create the http response from error: {err:?}"), + })?; + let (parts, body) = resp.into_parts(); + let body = + serde_json::to_vec(&body).map_err(|err| FetchError::SubrequestHttpError { + status_code: None, + service: service.clone(), + reason: format!("cannot serialize the error: {err:?}"), + })?; + ( + parts, + Ok(ContentType::ApplicationJson), + Some(Ok(body.into())), + ) + } + }; let subgraph_response_event = batch_context .extensions() @@ -956,14 +931,6 @@ pub(crate) async fn process_batch( ); } - if display_body { - if let Some(Ok(b)) = &body { - tracing::info!( - response.body = %String::from_utf8_lossy(b), apollo.subgraph.name = %&service, "Raw response body from subgraph {service:?} received" - ); - } - } - tracing::debug!("parts: {parts:?}, content_type: {content_type:?}, body: {body:?}"); let value = serde_json::from_slice(&body.ok_or(FetchError::SubrequestMalformedResponse { @@ -1266,7 +1233,7 @@ pub(crate) async fn call_single_http( let (parts, _) = subgraph_request.into_parts(); let body = serde_json::to_string(&body)?; tracing::debug!("our JSON body: {body:?}"); - let mut request = http::Request::from_parts(parts, RouterBody::from(body)); + let mut request = http::Request::from_parts(parts, router::body::from_bytes(body)); request .headers_mut() @@ -1303,8 +1270,6 @@ pub(crate) async fn call_single_http( // 2. If an HTTP status is not 2xx it will always be attached as a graphql error. // 3. If the response type is `application/json` and status is not 2xx and the body the entire body will be output if the response is not valid graphql. - let display_body = context.contains_key(LOGGING_DISPLAY_BODY); - // TODO: Temporary solution to plug FileUploads plugin until 'http_client' will be fixed https://github.com/apollographql/router/pull/4666 let request = file_uploads::http_request_wrapper(request).await; @@ -1340,35 +1305,26 @@ pub(crate) async fn call_single_http( } // Perform the actual fetch. If this fails then we didn't manage to make the call at all, so we can't do anything with it. - let (parts, content_type, body) = - match do_fetch(client, &context, service_name, request, display_body) - .instrument(subgraph_req_span) - .await - { - Ok(resp) => resp, - Err(err) => { - return Ok(SubgraphResponse::builder() - .subgraph_name(service_name.to_string()) - .error(err.to_graphql_error(None)) - .status_code(StatusCode::INTERNAL_SERVER_ERROR) - .context(context) - .extensions(Object::default()) - .build()); - } - }; + let (parts, content_type, body) = match do_fetch(client, &context, service_name, request) + .instrument(subgraph_req_span) + .await + { + Ok(resp) => resp, + Err(err) => { + return Ok(SubgraphResponse::builder() + .subgraph_name(service_name.to_string()) + .error(err.to_graphql_error(None)) + .status_code(StatusCode::INTERNAL_SERVER_ERROR) + .context(context) + .extensions(Object::default()) + .build()); + } + }; let subgraph_response_event = context .extensions() .with_lock(|lock| lock.get::().cloned()); - if display_body { - if let Some(Ok(b)) = &body { - tracing::info!( - response.body = %String::from_utf8_lossy(b), apollo.subgraph.name = %service_name, "Raw response body from subgraph {service_name:?} received" - ); - } - } - if let Some(subgraph_response_event) = subgraph_response_event { let mut should_log = true; if let Some(condition) = subgraph_response_event.0.condition() { @@ -1487,7 +1443,6 @@ async fn do_fetch( context: &Context, service_name: &str, request: Request, - display_body: bool, ) -> Result< ( Parts, @@ -1517,8 +1472,7 @@ async fn do_fetch( let content_type = get_graphql_content_type(service_name, &parts); let body = if content_type.is_ok() { - let body = body - .to_bytes() + let body = router::body::into_bytes(body) .instrument(tracing::debug_span!("aggregate_response_data")) .await .map_err(|err| { @@ -1529,34 +1483,8 @@ async fn do_fetch( reason: err.to_string(), } }); - if let Ok(body) = &body { - if display_body { - tracing::info!( - http.response.body = %String::from_utf8_lossy(body), apollo.subgraph.name = %service_name, "Raw response body from subgraph {service_name:?} received" - ); - } - } Some(body) } else { - if display_body { - let body = body - .to_bytes() - .instrument(tracing::debug_span!("aggregate_response_data")) - .await - .map_err(|err| { - tracing::error!(fetch_error = ?err); - FetchError::SubrequestHttpError { - status_code: Some(parts.status.as_u16()), - service: service_name.to_string(), - reason: err.to_string(), - } - }); - if let Ok(body) = &body { - tracing::info!( - http.response.body = %String::from_utf8_lossy(body), apollo.subgraph.name = %service_name, "Raw response body from subgraph {service_name:?} received" - ); - } - } None }; Ok((parts, content_type, body)) @@ -1597,7 +1525,12 @@ fn get_websocket_request( })?, None => subgraph_url, }; - let mut request = subgraph_url.into_client_request().map_err(|err| { + // XXX During hyper upgrade, observed that we had lost the implementation for Url + // so I made the expedient decision to get a string representation (as_str()) + // for the creation of the client request. This works fine, but I'm not sure + // why we need to do it, because into_client_request **should** be implemented + // for Url... + let mut request = subgraph_url.as_str().into_client_request().map_err(|err| { tracing::error!("cannot create websocket client request: {err:?}"); FetchError::SubrequestWsError { @@ -1695,28 +1628,25 @@ where mod tests { use std::convert::Infallible; use std::net::SocketAddr; - use std::net::TcpListener; use std::str::FromStr; + use axum::body::Body; use axum::extract::ws::Message; use axum::extract::ConnectInfo; use axum::extract::WebSocketUpgrade; use axum::response::IntoResponse; use axum::routing::get; use axum::Router; - use axum::Server; use bytes::Buf; use futures::StreamExt; use http::header::HOST; use http::StatusCode; use http::Uri; - use hyper::service::make_service_fn; - use hyper::Body; use serde_json_bytes::ByteString; use serde_json_bytes::Value; + use tokio::net::TcpListener; use tokio::sync::mpsc; use tokio_stream::wrappers::ReceiverStream; - use tower::service_fn; use tower::ServiceExt; use url::Url; use SubgraphRequest; @@ -1733,9 +1663,42 @@ mod tests { use crate::protocols::websocket::ServerMessage; use crate::protocols::websocket::WebSocketProtocol; use crate::query_planner::fetch::OperationKind; - use crate::services::router::body::get_body_bytes; + use crate::services::router; use crate::Context; + async fn serve(listener: TcpListener, handle: Handler) -> std::io::Result<()> + where + Handler: (Fn(http::Request) -> Fut) + Clone + Sync + Send + 'static, + Fut: + std::future::Future, Infallible>> + Send + 'static, + { + use hyper::body::Incoming; + use hyper_util::rt::TokioExecutor; + use hyper_util::rt::TokioIo; + + // Not sure this is the *right* place to do it, because it's actually clients that + // use crypto, not the server. + let _ = rustls::crypto::aws_lc_rs::default_provider().install_default(); + + loop { + let (stream, _) = listener.accept().await?; + let io = TokioIo::new(stream); + let handle = handle.clone(); + tokio::spawn(async move { + // N.B. should use hyper service_fn here, since it's required to be implemented hyper Service trait! + let svc = hyper::service::service_fn(|request: http::Request| { + handle(request.map(Body::new)) + }); + if let Err(err) = hyper_util::server::conn::auto::Builder::new(TokioExecutor::new()) + .serve_connection_with_upgrades(io, svc) + .await + { + eprintln!("server error: {}", err); + } + }); + } + } + // starts a local server emulating a subgraph returning status code 400 async fn emulate_subgraph_bad_request(listener: TcpListener) { async fn handle(_request: http::Request) -> Result, Infallible> { @@ -1756,9 +1719,7 @@ mod tests { .unwrap()) } - let make_svc = make_service_fn(|_conn| async { Ok::<_, Infallible>(service_fn(handle)) }); - let server = Server::from_tcp(listener).unwrap().serve(make_svc); - server.await.unwrap(); + serve(listener, handle).await.unwrap(); } // starts a local server emulating a subgraph returning status code 401 @@ -1771,9 +1732,7 @@ mod tests { .unwrap()) } - let make_svc = make_service_fn(|_conn| async { Ok::<_, Infallible>(service_fn(handle)) }); - let server = Server::from_tcp(listener).unwrap().serve(make_svc); - server.await.unwrap(); + serve(listener, handle).await.unwrap(); } // starts a local server emulating a subgraph returning connection closed @@ -1783,8 +1742,10 @@ mod tests { panic!("test") } - let make_svc = make_service_fn(|_conn| async { Ok::<_, Infallible>(service_fn(handle)) }); - let server = Server::from_tcp(listener).unwrap().serve(make_svc); + let server = axum::serve( + listener, + Router::new().route("/", axum::routing::any_service(tower::service_fn(handle))), + ); server.await.unwrap(); } @@ -1798,9 +1759,7 @@ mod tests { .unwrap()) } - let make_svc = make_service_fn(|_conn| async { Ok::<_, Infallible>(service_fn(handle)) }); - let server = Server::from_tcp(listener).unwrap().serve(make_svc); - server.await.unwrap(); + serve(listener, handle).await.unwrap(); } // starts a local server emulating a subgraph returning bad response format @@ -1815,9 +1774,7 @@ mod tests { .unwrap()) } - let make_svc = make_service_fn(|_conn| async { Ok::<_, Infallible>(service_fn(handle)) }); - let server = Server::from_tcp(listener).unwrap().serve(make_svc); - server.await.unwrap(); + serve(listener, handle).await.unwrap(); } // starts a local server emulating a subgraph returning bad response format @@ -1832,9 +1789,7 @@ mod tests { .unwrap()) } - let make_svc = make_service_fn(|_conn| async { Ok::<_, Infallible>(service_fn(handle)) }); - let server = Server::from_tcp(listener).unwrap().serve(make_svc); - server.await.unwrap(); + serve(listener, handle).await.unwrap(); } // starts a local server emulating a subgraph returning bad response format @@ -1847,9 +1802,7 @@ mod tests { .unwrap()) } - let make_svc = make_service_fn(|_conn| async { Ok::<_, Infallible>(service_fn(handle)) }); - let server = Server::from_tcp(listener).unwrap().serve(make_svc); - server.await.unwrap(); + serve(listener, handle).await.unwrap(); } // starts a local server emulating a subgraph returning bad response format @@ -1862,9 +1815,7 @@ mod tests { .unwrap()) } - let make_svc = make_service_fn(|_conn| async { Ok::<_, Infallible>(service_fn(handle)) }); - let server = Server::from_tcp(listener).unwrap().serve(make_svc); - server.await.unwrap(); + serve(listener, handle).await.unwrap(); } // starts a local server emulating a subgraph returning response with missing content_type @@ -1876,9 +1827,7 @@ mod tests { .unwrap()) } - let make_svc = make_service_fn(|_conn| async { Ok::<_, Infallible>(service_fn(handle)) }); - let server = Server::from_tcp(listener).unwrap().serve(make_svc); - server.await.unwrap(); + serve(listener, handle).await.unwrap(); } // starts a local server emulating a subgraph returning response with invalid content_type @@ -1891,9 +1840,7 @@ mod tests { .unwrap()) } - let make_svc = make_service_fn(|_conn| async { Ok::<_, Infallible>(service_fn(handle)) }); - let server = Server::from_tcp(listener).unwrap().serve(make_svc); - server.await.unwrap(); + serve(listener, handle).await.unwrap(); } // starts a local server emulating a subgraph returning unsupported content_type @@ -1906,9 +1853,7 @@ mod tests { .unwrap()) } - let make_svc = make_service_fn(|_conn| async { Ok::<_, Infallible>(service_fn(handle)) }); - let server = Server::from_tcp(listener).unwrap().serve(make_svc); - server.await.unwrap(); + serve(listener, handle).await.unwrap(); } // starts a local server emulating a subgraph returning response with @@ -1916,7 +1861,7 @@ mod tests { async fn emulate_persisted_query_not_supported_message(listener: TcpListener) { async fn handle(request: http::Request) -> Result, Infallible> { let (_, body) = request.into_parts(); - let graphql_request: Result = get_body_bytes(body) + let graphql_request: Result = router::body::into_bytes(body) .await .map_err(|_| ()) .and_then(|bytes| serde_json::from_reader(bytes.reader()).map_err(|_| ())) @@ -1961,9 +1906,7 @@ mod tests { } } - let make_svc = make_service_fn(|_conn| async { Ok::<_, Infallible>(service_fn(handle)) }); - let server = Server::from_tcp(listener).unwrap().serve(make_svc); - server.await.unwrap(); + serve(listener, handle).await.unwrap(); } // starts a local server emulating a subgraph returning response with @@ -1971,7 +1914,7 @@ mod tests { async fn emulate_persisted_query_not_supported_extension_code(listener: TcpListener) { async fn handle(request: http::Request) -> Result, Infallible> { let (_, body) = request.into_parts(); - let graphql_request: Result = get_body_bytes(body) + let graphql_request: Result = router::body::into_bytes(body) .await .map_err(|_| ()) .and_then(|bytes| serde_json::from_reader(bytes.reader()).map_err(|_| ())) @@ -2018,9 +1961,7 @@ mod tests { } } - let make_svc = make_service_fn(|_conn| async { Ok::<_, Infallible>(service_fn(handle)) }); - let server = Server::from_tcp(listener).unwrap().serve(make_svc); - server.await.unwrap(); + serve(listener, handle).await.unwrap(); } // starts a local server emulating a subgraph returning response with @@ -2028,7 +1969,7 @@ mod tests { async fn emulate_persisted_query_not_found_message(listener: TcpListener) { async fn handle(request: http::Request) -> Result, Infallible> { let (_, body) = request.into_parts(); - let graphql_request: Result = get_body_bytes(body) + let graphql_request: Result = router::body::into_bytes(body) .await .map_err(|_| ()) .and_then(|bytes| serde_json::from_reader(bytes.reader()).map_err(|_| ())) @@ -2078,9 +2019,7 @@ mod tests { } } - let make_svc = make_service_fn(|_conn| async { Ok::<_, Infallible>(service_fn(handle)) }); - let server = Server::from_tcp(listener).unwrap().serve(make_svc); - server.await.unwrap(); + serve(listener, handle).await.unwrap(); } // starts a local server emulating a subgraph returning response with @@ -2088,7 +2027,7 @@ mod tests { async fn emulate_persisted_query_not_found_extension_code(listener: TcpListener) { async fn handle(request: http::Request) -> Result, Infallible> { let (_, body) = request.into_parts(); - let graphql_request: Result = get_body_bytes(body) + let graphql_request: Result = router::body::into_bytes(body) .await .map_err(|_| ()) .and_then(|bytes| serde_json::from_reader(bytes.reader()).map_err(|_| ())) @@ -2138,9 +2077,7 @@ mod tests { } } - let make_svc = make_service_fn(|_conn| async { Ok::<_, Infallible>(service_fn(handle)) }); - let server = Server::from_tcp(listener).unwrap().serve(make_svc); - server.await.unwrap(); + serve(listener, handle).await.unwrap(); } // starts a local server emulating a subgraph returning a response to request with apq @@ -2148,7 +2085,7 @@ mod tests { async fn emulate_expected_apq_enabled_configuration(listener: TcpListener) { async fn handle(request: http::Request) -> Result, Infallible> { let (_, body) = request.into_parts(); - let graphql_request: Result = get_body_bytes(body) + let graphql_request: Result = router::body::into_bytes(body) .await .map_err(|_| ()) .and_then(|bytes| serde_json::from_reader(bytes.reader()).map_err(|_| ())) @@ -2179,9 +2116,7 @@ mod tests { } } - let make_svc = make_service_fn(|_conn| async { Ok::<_, Infallible>(service_fn(handle)) }); - let server = Server::from_tcp(listener).unwrap().serve(make_svc); - server.await.unwrap(); + serve(listener, handle).await.unwrap(); } // starts a local server emulating a subgraph returning a response to request without apq @@ -2189,7 +2124,7 @@ mod tests { async fn emulate_expected_apq_disabled_configuration(listener: TcpListener) { async fn handle(request: http::Request) -> Result, Infallible> { let (_, body) = request.into_parts(); - let graphql_request: Result = get_body_bytes(body) + let graphql_request: Result = router::body::into_bytes(body) .await .map_err(|_| ()) .and_then(|bytes| serde_json::from_reader(bytes.reader()).map_err(|_| ())) @@ -2222,9 +2157,7 @@ mod tests { } } - let make_svc = make_service_fn(|_conn| async { Ok::<_, Infallible>(service_fn(handle)) }); - let server = Server::from_tcp(listener).unwrap().serve(make_svc); - server.await.unwrap(); + serve(listener, handle).await.unwrap(); } async fn emulate_correct_websocket_server(listener: TcpListener) { @@ -2234,13 +2167,13 @@ mod tests { ) -> Result { // finalize the upgrade process by returning upgrade callback. // we can customize the callback by sending additional info such as address. - let res = ws.on_upgrade(move |mut socket| async move { + let res = ws.protocols(["graphql-transport-ws"]).on_upgrade(move |mut socket| async move { let connection_ack = socket.recv().await.unwrap().unwrap().into_text().unwrap(); let ack_msg: ClientMessage = serde_json::from_str(&connection_ack).unwrap(); assert!(matches!(ack_msg, ClientMessage::ConnectionInit { .. })); socket - .send(Message::Text( + .send(Message::text( serde_json::to_string(&ServerMessage::ConnectionAck).unwrap(), )) .await @@ -2262,7 +2195,7 @@ mod tests { }; socket - .send(Message::Text( + .send(Message::text( serde_json::to_string(&ServerMessage::Next { id: client_id, payload: graphql::Response::builder().data(serde_json_bytes::json!({"userWasCreated": {"username": "ada_lovelace"}})).build() }).unwrap(), )) .await @@ -2273,9 +2206,10 @@ mod tests { } let app = Router::new().route("/ws", get(ws_handler)); - let server = Server::from_tcp(listener) - .unwrap() - .serve(app.into_make_service_with_connect_info::()); + let server = axum::serve( + listener, + app.into_make_service_with_connect_info::(), + ); server.await.unwrap(); } @@ -2288,9 +2222,10 @@ mod tests { } let app = Router::new().route("/ws", get(ws_handler)); - let server = Server::from_tcp(listener) - .unwrap() - .serve(app.into_make_service_with_connect_info::()); + let server = axum::serve( + listener, + app.into_make_service_with_connect_info::(), + ); server.await.unwrap(); } @@ -2302,7 +2237,7 @@ mod tests { .get_all(ACCEPT) .iter() .any(|header_value| header_value == CALLBACK_PROTOCOL_ACCEPT)); - let graphql_request: Result = get_body_bytes(body) + let graphql_request: Result = router::body::into_bytes(body) .await .map_err(|_| ()) .and_then(|bytes| serde_json::from_reader(bytes.reader()).map_err(|_| ())) @@ -2337,9 +2272,7 @@ mod tests { .unwrap()) } - let make_svc = make_service_fn(|_conn| async { Ok::<_, Infallible>(service_fn(handle)) }); - let server = Server::from_tcp(listener).unwrap().serve(make_svc); - server.await.unwrap(); + serve(listener, handle).await.unwrap(); } fn subscription_config() -> SubscriptionConfig { @@ -2394,7 +2327,7 @@ mod tests { #[tokio::test(flavor = "multi_thread")] async fn test_subgraph_service_callback() { let _ = SUBSCRIPTION_CALLBACK_HMAC_KEY.set(String::from("TESTEST")); - let listener = std::net::TcpListener::bind("127.0.0.1:0").unwrap(); + let listener = TcpListener::bind("127.0.0.1:0").await.unwrap(); let socket_addr = listener.local_addr().unwrap(); let spawned_task = tokio::task::spawn(emulate_subgraph_with_callback_data(listener)); let subgraph_service = SubgraphService::new( @@ -2438,7 +2371,7 @@ mod tests { #[tokio::test(flavor = "multi_thread")] async fn test_subgraph_service_content_type_application_graphql() { - let listener = std::net::TcpListener::bind("127.0.0.1:0").unwrap(); + let listener = TcpListener::bind("127.0.0.1:0").await.unwrap(); let socket_addr = listener.local_addr().unwrap(); tokio::task::spawn(emulate_subgraph_application_graphql_response(listener)); let subgraph_service = SubgraphService::new( @@ -2472,7 +2405,7 @@ mod tests { #[tokio::test(flavor = "multi_thread")] async fn test_subgraph_service_content_type_application_json() { - let listener = std::net::TcpListener::bind("127.0.0.1:0").unwrap(); + let listener = TcpListener::bind("127.0.0.1:0").await.unwrap(); let socket_addr = listener.local_addr().unwrap(); tokio::task::spawn(emulate_subgraph_application_json_response(listener)); let subgraph_service = SubgraphService::new( @@ -2507,7 +2440,7 @@ mod tests { #[tokio::test(flavor = "multi_thread")] #[cfg(not(target_os = "macos"))] async fn test_subgraph_service_panic() { - let listener = std::net::TcpListener::bind("127.0.0.1:0").unwrap(); + let listener = TcpListener::bind("127.0.0.1:0").await.unwrap(); let socket_addr = listener.local_addr().unwrap(); tokio::task::spawn(emulate_subgraph_panic(listener)); let subgraph_service = SubgraphService::new( @@ -2545,7 +2478,7 @@ mod tests { #[tokio::test(flavor = "multi_thread")] async fn test_subgraph_service_invalid_response() { - let listener = std::net::TcpListener::bind("127.0.0.1:0").unwrap(); + let listener = TcpListener::bind("127.0.0.1:0").await.unwrap(); let socket_addr = listener.local_addr().unwrap(); tokio::task::spawn(emulate_subgraph_ok_status_invalid_response(listener)); let subgraph_service = SubgraphService::new( @@ -2582,7 +2515,7 @@ mod tests { #[tokio::test(flavor = "multi_thread")] async fn test_subgraph_invalid_status_invalid_response_application_json() { - let listener = std::net::TcpListener::bind("127.0.0.1:0").unwrap(); + let listener = TcpListener::bind("127.0.0.1:0").await.unwrap(); let socket_addr = listener.local_addr().unwrap(); tokio::task::spawn( emulate_subgraph_invalid_response_invalid_status_application_json(listener), @@ -2625,7 +2558,7 @@ mod tests { #[tokio::test(flavor = "multi_thread")] async fn test_subgraph_invalid_status_invalid_response_application_graphql() { - let listener = std::net::TcpListener::bind("127.0.0.1:0").unwrap(); + let listener = TcpListener::bind("127.0.0.1:0").await.unwrap(); let socket_addr = listener.local_addr().unwrap(); tokio::task::spawn( emulate_subgraph_invalid_response_invalid_status_application_graphql(listener), @@ -2668,7 +2601,7 @@ mod tests { #[tokio::test(flavor = "multi_thread")] async fn test_subgraph_service_websocket() { - let listener = std::net::TcpListener::bind("127.0.0.1:0").unwrap(); + let listener = TcpListener::bind("127.0.0.1:0").await.unwrap(); let socket_addr = listener.local_addr().unwrap(); let spawned_task = tokio::task::spawn(emulate_correct_websocket_server(listener)); let subgraph_service = SubgraphService::new( @@ -2721,7 +2654,7 @@ mod tests { #[tokio::test(flavor = "multi_thread")] async fn test_subgraph_service_websocket_with_error() { - let listener = std::net::TcpListener::bind("127.0.0.1:0").unwrap(); + let listener = TcpListener::bind("127.0.0.1:0").await.unwrap(); let socket_addr = listener.local_addr().unwrap(); tokio::task::spawn(emulate_incorrect_websocket_server(listener)); let subgraph_service = SubgraphService::new( @@ -2765,7 +2698,7 @@ mod tests { #[tokio::test(flavor = "multi_thread")] async fn test_bad_status_code_should_not_fail() { - let listener = std::net::TcpListener::bind("127.0.0.1:0").unwrap(); + let listener = TcpListener::bind("127.0.0.1:0").await.unwrap(); let socket_addr = listener.local_addr().unwrap(); tokio::task::spawn(emulate_subgraph_bad_request(listener)); let subgraph_service = SubgraphService::new( @@ -2806,7 +2739,7 @@ mod tests { #[tokio::test(flavor = "multi_thread")] async fn test_missing_content_type() { - let listener = std::net::TcpListener::bind("127.0.0.1:0").unwrap(); + let listener = TcpListener::bind("127.0.0.1:0").await.unwrap(); let socket_addr = listener.local_addr().unwrap(); tokio::task::spawn(emulate_subgraph_missing_content_type(listener)); @@ -2844,7 +2777,7 @@ mod tests { #[tokio::test(flavor = "multi_thread")] async fn test_invalid_content_type() { - let listener = std::net::TcpListener::bind("127.0.0.1:0").unwrap(); + let listener = TcpListener::bind("127.0.0.1:0").await.unwrap(); let socket_addr = listener.local_addr().unwrap(); tokio::task::spawn(emulate_subgraph_invalid_content_type(listener)); @@ -2882,7 +2815,7 @@ mod tests { #[tokio::test(flavor = "multi_thread")] async fn test_unsupported_content_type() { - let listener = std::net::TcpListener::bind("127.0.0.1:0").unwrap(); + let listener = TcpListener::bind("127.0.0.1:0").await.unwrap(); let socket_addr = listener.local_addr().unwrap(); tokio::task::spawn(emulate_subgraph_unsupported_content_type(listener)); @@ -2920,7 +2853,7 @@ mod tests { #[tokio::test(flavor = "multi_thread")] async fn test_unauthorized() { - let listener = std::net::TcpListener::bind("127.0.0.1:0").unwrap(); + let listener = TcpListener::bind("127.0.0.1:0").await.unwrap(); let socket_addr = listener.local_addr().unwrap(); tokio::task::spawn(emulate_subgraph_unauthorized(listener)); let subgraph_service = SubgraphService::new( @@ -2957,7 +2890,7 @@ mod tests { #[tokio::test(flavor = "multi_thread")] async fn test_persisted_query_not_supported_message() { - let listener = std::net::TcpListener::bind("127.0.0.1:0").unwrap(); + let listener = TcpListener::bind("127.0.0.1:0").await.unwrap(); let socket_addr = listener.local_addr().unwrap(); tokio::task::spawn(emulate_persisted_query_not_supported_message(listener)); let subgraph_service = SubgraphService::new( @@ -3001,7 +2934,7 @@ mod tests { #[tokio::test(flavor = "multi_thread")] async fn test_persisted_query_not_supported_extension_code() { - let listener = std::net::TcpListener::bind("127.0.0.1:0").unwrap(); + let listener = TcpListener::bind("127.0.0.1:0").await.unwrap(); let socket_addr = listener.local_addr().unwrap(); tokio::task::spawn(emulate_persisted_query_not_supported_extension_code( listener, @@ -3047,7 +2980,7 @@ mod tests { #[tokio::test(flavor = "multi_thread")] async fn test_persisted_query_not_found_message() { - let listener = std::net::TcpListener::bind("127.0.0.1:0").unwrap(); + let listener = TcpListener::bind("127.0.0.1:0").await.unwrap(); let socket_addr = listener.local_addr().unwrap(); tokio::task::spawn(emulate_persisted_query_not_found_message(listener)); let subgraph_service = SubgraphService::new( @@ -3088,7 +3021,7 @@ mod tests { #[tokio::test(flavor = "multi_thread")] async fn test_persisted_query_not_found_extension_code() { - let listener = std::net::TcpListener::bind("127.0.0.1:0").unwrap(); + let listener = TcpListener::bind("127.0.0.1:0").await.unwrap(); let socket_addr = listener.local_addr().unwrap(); tokio::task::spawn(emulate_persisted_query_not_found_extension_code(listener)); let subgraph_service = SubgraphService::new( @@ -3129,7 +3062,7 @@ mod tests { #[tokio::test(flavor = "multi_thread")] async fn test_apq_enabled_subgraph_configuration() { - let listener = std::net::TcpListener::bind("127.0.0.1:0").unwrap(); + let listener = TcpListener::bind("127.0.0.1:0").await.unwrap(); let socket_addr = listener.local_addr().unwrap(); tokio::task::spawn(emulate_expected_apq_enabled_configuration(listener)); let subgraph_service = SubgraphService::new( @@ -3170,7 +3103,7 @@ mod tests { #[tokio::test(flavor = "multi_thread")] async fn test_apq_disabled_subgraph_configuration() { - let listener = std::net::TcpListener::bind("127.0.0.1:0").unwrap(); + let listener = TcpListener::bind("127.0.0.1:0").await.unwrap(); let socket_addr = listener.local_addr().unwrap(); tokio::task::spawn(emulate_expected_apq_disabled_configuration(listener)); let subgraph_service = SubgraphService::new( diff --git a/apollo-router/src/services/supergraph/service.rs b/apollo-router/src/services/supergraph/service.rs index ac5fbd1295..2c01813446 100644 --- a/apollo-router/src/services/supergraph/service.rs +++ b/apollo-router/src/services/supergraph/service.rs @@ -34,12 +34,15 @@ use crate::graphql; use crate::graphql::IntoGraphQLErrors; use crate::graphql::Response; use crate::plugin::DynPlugin; +use crate::plugins::connectors::query_plans::store_connectors; +use crate::plugins::connectors::query_plans::store_connectors_labels; +use crate::plugins::subscription::Subscription; use crate::plugins::subscription::SubscriptionConfig; +use crate::plugins::subscription::APOLLO_SUBSCRIPTION_PLUGIN; use crate::plugins::telemetry::config_new::events::log_event; use crate::plugins::telemetry::config_new::events::SupergraphEventResponse; use crate::plugins::telemetry::consts::QUERY_PLANNING_SPAN_NAME; use crate::plugins::telemetry::tracing::apollo_telemetry::APOLLO_PRIVATE_DURATION_NS; -use crate::plugins::telemetry::LOGGING_DISPLAY_BODY; use crate::plugins::traffic_shaping::TrafficShaping; use crate::plugins::traffic_shaping::APOLLO_TRAFFIC_SHAPING; use crate::query_planner::subscription::SubscriptionHandle; @@ -48,9 +51,13 @@ use crate::query_planner::subscription::SUBSCRIPTION_EVENT_SPAN_NAME; use crate::query_planner::CachingQueryPlanner; use crate::query_planner::InMemoryCachePlanner; use crate::query_planner::QueryPlannerService; +use crate::router_factory::create_http_services; use crate::router_factory::create_plugins; use crate::router_factory::create_subgraph_services; +use crate::services::connector_service::ConnectorServiceFactory; use crate::services::execution::QueryPlan; +use crate::services::fetch_service::FetchServiceFactory; +use crate::services::http::HttpClientServiceFactory; use crate::services::layers::allow_only_http_post_mutations::AllowOnlyHttpPostMutationsLayer; use crate::services::layers::content_negotiation; use crate::services::layers::persisted_queries::PersistedQueryLayer; @@ -60,13 +67,13 @@ use crate::services::query_planner; use crate::services::router::ClientRequestAccepts; use crate::services::subgraph::BoxGqlStream; use crate::services::subgraph_service::MakeSubgraphService; -use crate::services::subgraph_service::SubgraphServiceFactory; use crate::services::supergraph; use crate::services::ExecutionRequest; use crate::services::ExecutionResponse; use crate::services::ExecutionServiceFactory; use crate::services::QueryPlannerContent; use crate::services::QueryPlannerResponse; +use crate::services::SubgraphServiceFactory; use crate::services::SupergraphRequest; use crate::services::SupergraphResponse; use crate::spec::operation_limits::OperationLimits; @@ -119,6 +126,11 @@ impl Service for SupergraphService { } fn call(&mut self, req: SupergraphRequest) -> Self::Future { + if let Some(connectors) = &self.schema.connectors { + store_connectors_labels(&req.context, connectors.labels_by_service_name.clone()); + store_connectors(&req.context, connectors.by_service_name.clone()); + } + // Consume our cloned services and allow ownership to be transferred to the async block. let clone = self.query_planner_service.clone(); @@ -419,7 +431,6 @@ pub struct SubscriptionTaskParams { pub(crate) subscription_handle: SubscriptionHandle, pub(crate) subscription_config: SubscriptionConfig, pub(crate) stream_rx: ReceiverStream, - pub(crate) service_name: String, } async fn subscription_task( @@ -438,7 +449,6 @@ async fn subscription_task( }; let subscription_config = sub_params.subscription_config; let subscription_handle = sub_params.subscription_handle; - let service_name = sub_params.service_name; let mut receiver = sub_params.stream_rx; let sender = sub_params.client_sender; @@ -486,7 +496,6 @@ async fn subscription_task( .ok() .flatten() .unwrap_or_default(); - let display_body = context.contains_key(LOGGING_DISPLAY_BODY); let mut receiver = match receiver.next().await { Some(receiver) => receiver, @@ -530,9 +539,6 @@ async fn subscription_task( message = receiver.next() => { match message { Some(mut val) => { - if display_body { - tracing::info!(http.request.body = ?val, apollo.subgraph.name = %service_name, "Subscription event body from subgraph {service_name:?}"); - } val.created_at = Some(Instant::now()); let res = dispatch_event(&supergraph_req, &execution_service_factory, query_plan.as_ref(), context.clone(), val, sender.clone()) .instrument(tracing::info_span!(SUBSCRIPTION_EVENT_SPAN_NAME, @@ -567,7 +573,14 @@ async fn subscription_task( break; }, }; - let subgraph_services = match create_subgraph_services(&plugins, &execution_service_factory.schema, &conf).await { + let http_service_factory = match create_http_services(&plugins, &execution_service_factory.schema, &conf).await { + Ok(http_service_factory) => http_service_factory, + Err(err) => { + tracing::error!("cannot re-create subgraph service with the new configuration (closing existing subscription): {err:?}"); + break; + }, }; + + let subgraph_services = match create_subgraph_services(&http_service_factory, &plugins, &conf).await { Ok(subgraph_services) => subgraph_services, Err(err) => { tracing::error!("cannot re-create subgraph service with the new configuration (closing existing subscription): {err:?}"); @@ -575,11 +588,42 @@ async fn subscription_task( }, }; + + let subscription_plugin_conf = execution_service_factory + .plugins + .iter() + .find(|i| i.0.as_str() == APOLLO_SUBSCRIPTION_PLUGIN) + .and_then(|plugin| (*plugin.1).as_any().downcast_ref::()) + .map(|p| p.config.clone()); + + let fetch_service_factory = Arc::new(FetchServiceFactory::new( + execution_service_factory.schema.clone(), + execution_service_factory.subgraph_schemas.clone(), + Arc::new(SubgraphServiceFactory::new( + subgraph_services.into_iter().map(|(k, v)| (k, Arc::new(v) as Arc)).collect(), + execution_service_factory.plugins.clone(), + )), + subscription_plugin_conf.clone(), + // TODO: HTTP SERVICE + CONNECTORS + Arc::new(ConnectorServiceFactory::new( + execution_service_factory.schema.clone(), + execution_service_factory.subgraph_schemas.clone(), + Arc::new(http_service_factory), + subscription_plugin_conf, + execution_service_factory.schema + .connectors.as_ref().map(|c| c.by_service_name.clone()) + .unwrap_or_default(), + )), + ), + + ); + + execution_service_factory = ExecutionServiceFactory { schema: execution_service_factory.schema.clone(), subgraph_schemas: execution_service_factory.subgraph_schemas.clone(), plugins: plugins.clone(), - subgraph_service_factory: Arc::new(SubgraphServiceFactory::new(subgraph_services.into_iter().map(|(k, v)| (k, Arc::new(v) as Arc)).collect(), plugins.clone())), + fetch_service_factory, }; } @@ -753,6 +797,7 @@ fn clone_supergraph_request( pub(crate) struct PluggableSupergraphServiceBuilder { plugins: Arc, subgraph_services: Vec<(String, Box)>, + http_service_factory: IndexMap, configuration: Option>, planner: QueryPlannerService, } @@ -762,6 +807,7 @@ impl PluggableSupergraphServiceBuilder { Self { plugins: Arc::new(Default::default()), subgraph_services: Default::default(), + http_service_factory: Default::default(), configuration: None, planner, } @@ -788,6 +834,14 @@ impl PluggableSupergraphServiceBuilder { self } + pub(crate) fn with_http_service_factory( + mut self, + http_service_factory: IndexMap, + ) -> PluggableSupergraphServiceBuilder { + self.http_service_factory = http_service_factory; + self + } + pub(crate) fn with_configuration( mut self, configuration: Arc, @@ -805,7 +859,7 @@ impl PluggableSupergraphServiceBuilder { let query_planner_service = CachingQueryPlanner::new( self.planner, schema.clone(), - subgraph_schemas, + subgraph_schemas.clone(), &configuration, IndexMap::default(), ) @@ -821,17 +875,41 @@ impl PluggableSupergraphServiceBuilder { // For now just shoe-horn something in, but if we ever reintroduce the query planner hook in plugins and activate then this can be made clean. query_planner_service.activate(); - let subgraph_service_factory = Arc::new(SubgraphServiceFactory::new( - self.subgraph_services - .into_iter() - .map(|(name, service)| (name, service.into())) - .collect(), - self.plugins.clone(), + let subscription_plugin_conf = self + .plugins + .iter() + .find(|i| i.0.as_str() == APOLLO_SUBSCRIPTION_PLUGIN) + .and_then(|plugin| (*plugin.1).as_any().downcast_ref::()) + .map(|p| p.config.clone()); + + let fetch_service_factory = Arc::new(FetchServiceFactory::new( + schema.clone(), + subgraph_schemas.clone(), + Arc::new(SubgraphServiceFactory::new( + self.subgraph_services + .into_iter() + .map(|(name, service)| (name, service.into())) + .collect(), + self.plugins.clone(), + )), + subscription_plugin_conf.clone(), + // TODO: HTTP SERVICE + CONNECTORS + Arc::new(ConnectorServiceFactory::new( + schema.clone(), + subgraph_schemas, + Arc::new(self.http_service_factory), + subscription_plugin_conf, + schema + .connectors + .as_ref() + .map(|c| c.by_service_name.clone()) + .unwrap_or_default(), + )), )); Ok(SupergraphCreator { query_planner_service, - subgraph_service_factory, + fetch_service_factory, schema, plugins: self.plugins, config: configuration, @@ -843,7 +921,7 @@ impl PluggableSupergraphServiceBuilder { #[derive(Clone)] pub(crate) struct SupergraphCreator { query_planner_service: CachingQueryPlanner, - subgraph_service_factory: Arc, + fetch_service_factory: Arc, schema: Arc, config: Arc, plugins: Arc, @@ -901,7 +979,7 @@ impl SupergraphCreator { schema: self.schema.clone(), subgraph_schemas: self.query_planner_service.subgraph_schemas(), plugins: self.plugins.clone(), - subgraph_service_factory: self.subgraph_service_factory.clone(), + fetch_service_factory: self.fetch_service_factory.clone(), }) .schema(self.schema.clone()) .notify(self.config.notify.clone()) diff --git a/apollo-router/src/services/transport.rs b/apollo-router/src/services/transport.rs deleted file mode 100644 index b1e1fbccb1..0000000000 --- a/apollo-router/src/services/transport.rs +++ /dev/null @@ -1,15 +0,0 @@ -#![allow(deprecated)] -#![allow(missing_docs)] - -use tower::BoxError; - -#[deprecated = "use `apollo_router::services::router::Request` instead"] -pub type Request = http::Request; -#[deprecated = "use `apollo_router::services::router::Response` instead"] -pub type Response = http::Response; -#[deprecated = "use `apollo_router::services::router::BoxService` instead"] -pub type BoxService = tower::util::BoxService; -#[deprecated = "use `apollo_router::services::router::BoxCloneService` instead"] -pub type BoxCloneService = tower::util::BoxCloneService; -#[deprecated = "use `apollo_router::services::router::ServiceResult` instead"] -pub type ServiceResult = Result; diff --git a/apollo-router/src/spec/schema.rs b/apollo-router/src/spec/schema.rs index 8bfda05e64..f4e79b5ff2 100644 --- a/apollo-router/src/spec/schema.rs +++ b/apollo-router/src/spec/schema.rs @@ -9,6 +9,9 @@ use apollo_compiler::schema::Implementers; use apollo_compiler::validation::Valid; use apollo_compiler::Name; use apollo_federation::schema::ValidFederationSchema; +use apollo_federation::sources::connect::expand::expand_connectors; +use apollo_federation::sources::connect::expand::Connectors; +use apollo_federation::sources::connect::expand::ExpansionResult; use apollo_federation::ApiSchemaOptions; use apollo_federation::Supergraph; use http::Uri; @@ -19,6 +22,7 @@ use sha2::Sha256; use crate::error::ParseErrors; use crate::error::SchemaError; +use crate::plugins::connectors::configuration::apply_config; use crate::query_planner::OperationKind; use crate::uplink::schema::SchemaState; use crate::Configuration; @@ -31,6 +35,7 @@ pub(crate) struct Schema { pub(crate) implementers_map: apollo_compiler::collections::HashMap, api_schema: ApiSchema, pub(crate) schema_id: Arc, + pub(crate) connectors: Option, pub(crate) launch_id: Option>, } @@ -48,7 +53,33 @@ impl Schema { config: &Configuration, ) -> Result { let start = Instant::now(); + + let api_schema_options = ApiSchemaOptions { + include_defer: config.supergraph.defer_support, + ..Default::default() + }; + + let expansion = + expand_connectors(&raw_sdl.sdl, &api_schema_options).map_err(SchemaError::Connector)?; + let preserved_launch_id = raw_sdl.launch_id.clone(); + let (raw_sdl, api_schema, connectors) = match expansion { + ExpansionResult::Expanded { + raw_sdl, + api_schema: api, + connectors, + } => ( + Arc::new(SchemaState { + sdl: raw_sdl, + launch_id: preserved_launch_id, + }), + Some(ValidFederationSchema::new(*api).map_err(SchemaError::Connector)?), + Some(apply_config(config, connectors)), + ), + ExpansionResult::Unchanged => (raw_sdl, None, None), + }; + let mut parser = apollo_compiler::parser::Parser::new(); + let result = parser.parse_ast(&raw_sdl.sdl, "schema.graphql"); // Trace log recursion limit data @@ -67,7 +98,7 @@ impl Schema { let mut subgraphs = HashMap::new(); // TODO: error if not found? if let Some(join_enum) = definitions.get_enum("join__Graph") { - for (name, url) in join_enum.values.iter().filter_map(|(_name, value)| { + for (name, url) in join_enum.values.values().filter_map(|value| { let join_directive = value.directives.get("join__graph")?; let name = join_directive .specified_argument_by_name("name")? @@ -75,25 +106,34 @@ impl Schema { let url = join_directive.specified_argument_by_name("url")?.as_str()?; Some((name, url)) }) { - if url.is_empty() { - return Err(SchemaError::MissingSubgraphUrl(name.to_string())); - } - #[cfg(unix)] - // there is no standard for unix socket URLs apparently - let url = if let Some(path) = url.strip_prefix("unix://") { - // there is no specified format for unix socket URLs (cf https://github.com/whatwg/url/issues/577) - // so a unix:// URL will not be parsed by http::Uri - // To fix that, hyperlocal came up with its own Uri type that can be converted to http::Uri. - // It hides the socket path in a hex encoded authority that the unix socket connector will - // know how to decode - hyperlocal::Uri::new(path, "/").into() + let is_connector = connectors + .as_ref() + .map(|connectors| connectors.by_service_name.contains_key(name)) + .unwrap_or_default(); + + let url = if is_connector { + Uri::from_static("http://unused") } else { + if url.is_empty() { + return Err(SchemaError::MissingSubgraphUrl(name.to_string())); + } + #[cfg(unix)] + // there is no standard for unix socket URLs apparently + if let Some(path) = url.strip_prefix("unix://") { + // there is no specified format for unix socket URLs (cf https://github.com/whatwg/url/issues/577) + // so a unix:// URL will not be parsed by http::Uri + // To fix that, hyperlocal came up with its own Uri type that can be converted to http::Uri. + // It hides the socket path in a hex encoded authority that the unix socket connector will + // know how to decode + hyperlocal::Uri::new(path, "/").into() + } else { + Uri::from_str(url) + .map_err(|err| SchemaError::UrlParse(name.to_string(), err))? + } + #[cfg(not(unix))] Uri::from_str(url) .map_err(|err| SchemaError::UrlParse(name.to_string(), err))? }; - #[cfg(not(unix))] - let url = Uri::from_str(url) - .map_err(|err| SchemaError::UrlParse(name.to_string(), err))?; if subgraphs.insert(name.to_string(), url).is_some() { return Err(SchemaError::Api(format!( @@ -114,16 +154,13 @@ impl Schema { let schema_id = Arc::new(Schema::schema_id(&raw_sdl.sdl)); - let api_schema = supergraph - .to_api_schema(ApiSchemaOptions { - include_defer: config.supergraph.defer_support, - ..Default::default() - }) - .map_err(|e| { + let api_schema = api_schema.map(Ok).unwrap_or_else(|| { + supergraph.to_api_schema(api_schema_options).map_err(|e| { SchemaError::Api(format!( "The supergraph schema failed to produce a valid API schema: {e}" )) - })?; + }) + })?; Ok(Schema { launch_id: raw_sdl @@ -137,6 +174,7 @@ impl Schema { implementers_map, api_schema: ApiSchema(api_schema), schema_id, + connectors, }) } @@ -343,8 +381,9 @@ impl std::fmt::Debug for Schema { subgraphs, implementers_map, api_schema: _, // skip - schema_id: _, // skip - launch_id: _, // skip + schema_id: _, + connectors: _, + launch_id: _, // skip } = self; f.debug_struct("Schema") .field("raw_sdl", raw_sdl) @@ -414,7 +453,7 @@ mod tests { type Baz { me: String } - + union UnionType2 = Foo | Bar "#, ); diff --git a/apollo-router/src/test_harness.rs b/apollo-router/src/test_harness.rs index 2eb49be5f2..b81d89bcb4 100644 --- a/apollo-router/src/test_harness.rs +++ b/apollo-router/src/test_harness.rs @@ -43,6 +43,9 @@ pub mod mocks; #[cfg(test)] pub(crate) mod http_client; +#[cfg(any(test, feature = "snapshot"))] +pub(crate) mod http_snapshot; + /// Builder for the part of an Apollo Router that handles GraphQL requests, as a [`tower::Service`]. /// /// This allows tests, benchmarks, etc @@ -385,7 +388,7 @@ impl<'a> TestHarness<'a> { #[cfg(test)] pub(crate) type HttpService = tower::util::BoxService< http::Request, - http::Response, + http::Response, std::convert::Infallible, >; @@ -548,6 +551,6 @@ pub fn make_fake_batch( result.push(b','); result.append(&mut json_bytes_new_req); result.push(b']'); - crate::services::router::Body::from(result) + router::body::from_bytes(result) }) } diff --git a/apollo-router/src/test_harness/http_client.rs b/apollo-router/src/test_harness/http_client.rs index 3486ee079b..7af9b782f9 100644 --- a/apollo-router/src/test_harness/http_client.rs +++ b/apollo-router/src/test_harness/http_client.rs @@ -3,13 +3,11 @@ use std::pin::Pin; use std::task::Poll; use async_compression::tokio::bufread::BrotliDecoder; -use axum::body::BoxBody; -use futures::stream::poll_fn; +use axum::body::Body; use futures::Future; use futures::Stream; use futures::StreamExt; use http::HeaderValue; -use http_body::Body; use mediatype::MediaType; use mediatype::ReadParams; use mime::APPLICATION_JSON; @@ -20,7 +18,11 @@ use tower::BoxError; use tower::Service; use tower::ServiceBuilder; +use crate::services::router; +use crate::services::router::body::RouterBody; + /// Added by `response_decompression` to `http::Response::extensions` +#[derive(Clone)] pub(crate) struct ResponseBodyWasCompressed(pub(crate) bool); pub(crate) enum MaybeMultipart { @@ -53,7 +55,7 @@ pub(crate) fn response_decompression( > where InnerService: - Service, Response = http::Response, Error = BoxError>, + Service, Response = http::Response, Error = BoxError>, { ServiceBuilder::new() .map_request(|mut request: http::Request| { @@ -62,11 +64,10 @@ where .insert("accept-encoding", "br".try_into().unwrap()); request }) - .map_response(|response: http::Response| { + .map_response(|response: http::Response| { let mut response = response.map(|body| { - // Convert from axum’s BoxBody to AsyncBufRead - let mut body = Box::pin(body); - let stream = poll_fn(move |ctx| body.as_mut().poll_data(ctx)) + let stream = body + .into_data_stream() .map(|result| result.map_err(|e| io::Error::new(io::ErrorKind::Other, e))); StreamReader::new(stream) }); @@ -243,7 +244,7 @@ pub(crate) fn json( > where InnerService: Service< - http::Request, + http::Request, Response = http::Response>>, Error = BoxError, >, @@ -254,7 +255,7 @@ where "content-type", HeaderValue::from_static(APPLICATION_JSON.essence_str()), ); - request.map(|body| serde_json::to_vec(&body).unwrap().into()) + request.map(|body| router::body::from_bytes(serde_json::to_vec(&body).unwrap())) }) .map_response(|response: http::Response>>| { let (parts, body) = response.into_parts(); diff --git a/apollo-router/src/test_harness/http_snapshot.rs b/apollo-router/src/test_harness/http_snapshot.rs new file mode 100644 index 0000000000..eff17e59f7 --- /dev/null +++ b/apollo-router/src/test_harness/http_snapshot.rs @@ -0,0 +1,574 @@ +//! Snapshot server to capture and replay HTTP responses. This is useful for: +//! +//! * Capturing HTTP responses from a real API or server, and replaying them in tests +//! * Mocking responses from a non-existent HTTP API for testing +//! * Working offline by capturing output from a server, and replaying it +//! +//! For example, this can be used with the router `override_subgraph_url` to replay recorded +//! responses from GraphQL subgraphs. Or it can be used with `override_url` in Connectors, to +//! record the HTTP responses from an external REST API. This allows the replayed responses to +//! be used in tests, or even in Apollo Sandbox to work offline or avoid hitting the REST API +//! too frequently. +//! +//! The snapshot server can be started from tests by calling the [`SnapshotServer::spawn`] method, +//! or as a standalone application by invoking [`standalone::main`]. In the latter case, there +//! is a binary wrapper in `http_snapshot_main` that can be run like this: +//! +//! `cargo run --bin snapshot --features snapshot -- --snapshot-path --url [--offline] [--update] [--port ] [-v]` +//! +//! Any requests made to the snapshot server will be proxied on to the given base URL, and the +//! responses will be saved to the given file. The next time the snapshot server receives the +//! same request (same relative path, HTTP method, and request body), it will respond with the +//! response recorded in the file rather than sending the request to the upstream server. +//! +//! The snapshot file can be manually edited to manipulate responses for testing purposes, or to +//! redact information that you don't want to include in source-controlled snapshot files. +//! +//! The offline mode will never call the upstream server, and will always return a saved snapshot +//! response. If one is not available, a `500` error is returned. This is useful for tests, for +//! example to ensure that CI builds never attempt to access the network. +//! +//! The update mode can be used to force an update of recorded snapshots, even if there is already +//! a snapshot saved in the file. This overrides the offline mode, and is useful to update tests +//! when a change is made to the upstream HTTP responses. +//! +//! The set of response headers returned can be filtered by supplying a list of headers to include. +//! This is typically desirable, as headers may contain ephemeral information like dates or tokens. +//! +//! **IMPORTANT:** this module stores HTTP responses to the local file system in plain text. It +//! should not be used with production APIs that return sensitive data. +//! +//! This module should also not be used in conjunction with performance testing, as returning +//! snapshot data locally will be much faster than sending HTTP requests to an external server. + +use std::collections::BTreeMap; +use std::net::SocketAddr; +use std::net::TcpListener; +use std::path::Path; +use std::str::FromStr; +use std::sync::Arc; +use std::sync::Mutex; + +use axum::extract::Path as AxumPath; +use axum::extract::State; +use axum::routing::any; +use axum::Router; +use base64::Engine; +use http::HeaderMap; +use http::HeaderName; +use http::HeaderValue; +use http::Method; +use http::Uri; +use hyper::StatusCode; +use hyper_rustls::ConfigBuilderExt; +use indexmap::IndexMap; +use serde::Deserialize; +use serde::Serialize; +use serde_json_bytes::json; +use serde_json_bytes::Value; +use tower::ServiceExt; +use tracing::debug; +use tracing::error; +use tracing::info; +use tracing::warn; + +use crate::configuration::shared::Client; +use crate::services::http::HttpClientService; +use crate::services::http::HttpRequest; +use crate::services::router; +use crate::services::router::body::RouterBody; + +/// An error from the snapshot server +#[derive(Debug, thiserror::Error)] +enum SnapshotError { + /// Unable to load snapshots + #[error("unable to load snapshots")] + IoError(#[from] std::io::Error), + /// Unable to parse snapshots + #[error("unable to parse snapshots")] + ParseError(#[from] serde_json::Error), +} + +/// A server that mocks an API using snapshots recorded from actual HTTP responses. +#[cfg_attr(test, allow(unreachable_pub))] +pub struct SnapshotServer { + // The socket address the server is listening on + #[cfg_attr(not(test), allow(dead_code))] + socket_address: SocketAddr, +} + +#[derive(Clone)] +struct SnapshotServerState { + client: HttpClientService, + base_url: Uri, + snapshots: Arc>>, + snapshot_file: Box, + offline: bool, + update: bool, + include_headers: Option>, +} + +async fn root_handler( + State(state): State, + req: http::Request, +) -> Result, StatusCode> { + handle(State(state), req, "/".to_string()).await +} + +async fn handler( + State(state): State, + AxumPath(path): AxumPath, + req: http::Request, +) -> Result, StatusCode> { + handle(State(state), req, path).await +} + +async fn handle( + State(state): State, + req: http::Request, + path: String, +) -> Result, StatusCode> { + let uri = [state.base_url.to_string(), path.clone()].concat(); + let method = req.method().clone(); + let version = req.version(); + let request_headers = req.headers().clone(); + let body_bytes = axum::body::to_bytes(req.into_body(), usize::MAX) + .await + .unwrap(); + let request_json_body = serde_json::from_slice(&body_bytes).unwrap_or(Value::Null); + + let key = snapshot_key( + Some(method.as_str()), + Some(path.as_str()), + &request_json_body, + ); + + if let Some(response) = response_from_snapshot(&state, &uri, &method, &key) { + Ok(response) + } else if state.offline && !state.update { + fail( + uri, + method, + "Offline mode enabled and no snapshot available", + ) + } else { + debug!( + url = %uri, + method = %method, + "Taking snapshot" + ); + let mut request = http::Request::builder() + .method(method.clone()) + .version(version) + .uri(uri.clone()) + .body(router::body::from_bytes(body_bytes)) + .unwrap(); + *request.headers_mut() = request_headers.clone(); + let response = state + .client + .oneshot(HttpRequest { + http_request: request, + context: crate::context::Context::new(), + }) + .await + .unwrap(); + let (parts, body) = response.http_response.into_parts(); + + if let Ok(body_bytes) = router::body::into_bytes(body).await { + if let Ok(response_json_body) = serde_json::from_slice(&body_bytes) { + let snapshot = Snapshot { + request: Request { + method: Some(method.to_string()), + path: Some(path), + body: request_json_body, + }, + response: Response { + status: parts.status.as_u16(), + headers: map_headers(parts.headers, |name| { + state + .include_headers + .as_ref() + .map(|headers| headers.contains(&name.to_string())) + .unwrap_or(true) + }), + body: response_json_body, + }, + }; + { + let mut snapshots = state.snapshots.lock().unwrap(); + snapshots.insert(key, snapshot.clone()); + if let Err(e) = save(state.snapshot_file, &mut snapshots) { + error!( + url = %uri, + method = %method, + error = ?e, + "Unable to save snapshot" + ); + } + } + if let Ok(response) = snapshot.into_body() { + Ok(response) + } else { + fail(uri, method, "Unable to convert snapshot into response body") + } + } else { + fail(uri, method, "Unable to parse response body as JSON") + } + } else { + fail(uri, method, "Unable to read response body") + } + } +} + +fn response_from_snapshot( + state: &SnapshotServerState, + uri: &String, + method: &Method, + key: &String, +) -> Option> { + let mut snapshots = state.snapshots.lock().unwrap(); + if state.update { + snapshots.remove(key); + None + } else { + snapshots.get(key).and_then(|snapshot| { + debug!( + url = %uri, + method = %method, + "Found existing snapshot" + ); + snapshot + .clone() + .into_body() + .map_err(|e| error!("Unable to convert snapshot into HTTP response: {:?}", e)) + .ok() + }) + } +} + +fn fail( + uri: String, + method: Method, + message: &str, +) -> Result, StatusCode> { + error!( + url = %uri, + method = %method, + message + ); + http::Response::builder() + .status(StatusCode::INTERNAL_SERVER_ERROR) + .body(router::body::from_bytes( + json!({ "error": message}).to_string(), + )) + .map_err(|_| StatusCode::INTERNAL_SERVER_ERROR) +} + +fn map_headers bool>( + headers: HeaderMap, + include: F, +) -> IndexMap> { + headers.iter().fold( + IndexMap::new(), + |mut map: IndexMap>, (name, value)| { + let name = name.to_string(); + if include(&name) { + let value = value.to_str().unwrap_or_default().to_string(); + map.entry(name).or_default().push(value); + } + map + }, + ) +} + +fn save>( + path: P, + snapshots: &mut BTreeMap, +) -> Result<(), SnapshotError> { + let path = path.as_ref(); + if let Some(parent) = path.parent() { + std::fs::create_dir_all(parent)?; + } + let snapshots = snapshots.values().cloned().collect::>(); + std::fs::write(path, serde_json::to_string_pretty(&snapshots)?).map_err(Into::into) +} + +fn load>(path: P) -> Result, SnapshotError> { + let str = std::fs::read_to_string(path)?; + let snapshots: Vec = serde_json::from_str(&str)?; + info!("Loaded {} snapshots", snapshots.len()); + Ok(snapshots + .into_iter() + .map(|snapshot| (snapshot.key(), snapshot)) + .collect()) +} + +impl SnapshotServer { + /// Spawn the server in a new task and return. Used for tests. + #[cfg_attr(test, allow(unreachable_pub))] + pub async fn spawn>( + snapshot_path: P, + base_url: Uri, + offline: bool, + update: bool, + include_headers: Option>, + port: Option, + ) -> Self { + let listener = port.map(|port| { + TcpListener::bind(format!("127.0.0.1:{port}")) + .expect("Failed to bind an OS port for snapshot server") + }); + Self::inner_start( + snapshot_path, + base_url, + true, + offline, + update, + include_headers, + listener, + ) + .await + } + + /// Start the server and block. Can be used to run the server as a standalone application. + pub(crate) async fn start>( + snapshot_path: P, + base_url: Uri, + offline: bool, + update: bool, + include_headers: Option>, + listener: Option, + ) -> Self { + Self::inner_start( + snapshot_path, + base_url, + false, + offline, + update, + include_headers, + listener, + ) + .await + } + + /// Get the URI the server is listening at + #[cfg_attr(not(test), allow(dead_code))] + #[cfg_attr(test, allow(unreachable_pub))] + pub fn uri(&self) -> String { + format!("http://{}", self.socket_address) + } + + async fn inner_start>( + snapshot_path: P, + base_url: Uri, + spawn: bool, + offline: bool, + update: bool, + include_headers: Option>, + listener: Option, + ) -> Self { + if update { + info!("Running in update mode ⬆️"); + } else if offline { + info!("Running in offline mode ⛔️"); + } + + let snapshot_file = snapshot_path.as_ref(); + + let snapshots = load(snapshot_file).unwrap_or_else(|_| { + if offline { + warn!("Unable to load snapshot file in offline mode - all requests will fail"); + } + BTreeMap::default() + }); + + let _ = rustls::crypto::aws_lc_rs::default_provider().install_default(); + + let http_service = HttpClientService::new( + "test", + rustls::ClientConfig::builder() + .with_native_roots() + .expect("Able to load native roots") + .with_no_client_auth(), + Client::builder().build(), + ) + .expect("can create a HttpService"); + let app = Router::new() + .route("/", any(root_handler)) + .route("/{*path}", any(handler)) // won't match root, so we need the root handler above + .with_state(SnapshotServerState { + client: http_service, + base_url: base_url.clone(), + snapshots: Arc::new(Mutex::new(snapshots.clone())), + snapshot_file: Box::from(snapshot_file), + offline, + update, + include_headers, + }); + let listener = listener.unwrap_or( + TcpListener::bind("127.0.0.1:0") + .expect("Failed to bind an OS port for snapshot server"), + ); + let local_address = listener + .local_addr() + .expect("Failed to get snapshot server address."); + info!( + "Snapshot server listening on port {:?}", + local_address.port() + ); + if spawn { + tokio::spawn(async move { + axum_server::Server::from_tcp(listener) + .serve(app.into_make_service()) + .await + .unwrap(); + }); + } else { + axum_server::from_tcp(listener) + .serve(app.into_make_service()) + .await + .unwrap(); + } + Self { + socket_address: local_address, + } + } +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +struct Snapshot { + request: Request, + response: Response, +} + +impl Snapshot { + fn into_body(self) -> Result, ()> { + let mut response = http::Response::builder().status(self.response.status); + if let Some(headers) = response.headers_mut() { + for (name, values) in self.response.headers.into_iter() { + if let Ok(name) = HeaderName::from_str(&name.clone()) { + for value in values { + if let Ok(value) = HeaderValue::from_str(&value.clone()) { + headers.insert(name.clone(), value); + } + } + } else { + warn!("Invalid header name `{}` in snapshot", name); + } + } + } + let body_string = self.response.body.to_string(); + if let Ok(response) = response.body(router::body::from_bytes(body_string)) { + return Ok(response); + } + Err(()) + } + + fn key(&self) -> String { + snapshot_key( + self.request.method.as_deref(), + self.request.path.as_deref(), + &self.request.body, + ) + } +} + +fn snapshot_key(method: Option<&str>, path: Option<&str>, body: &Value) -> String { + if body.is_null() { + format!("{}-{}", method.unwrap_or("GET"), path.unwrap_or("/")) + } else { + let body = base64::engine::general_purpose::STANDARD.encode(body.to_string()); + format!( + "{}-{}-{}", + method.unwrap_or("GET"), + path.unwrap_or("/"), + body, + ) + } +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +struct Request { + method: Option, + path: Option, + body: Value, +} + +#[derive(Clone, Debug, Serialize, Deserialize)] +struct Response { + status: u16, + #[serde(default)] + headers: IndexMap>, + body: Value, +} + +/// Standalone snapshot server +pub(crate) mod standalone { + use std::net::TcpListener; + use std::path::PathBuf; + + use clap::Parser; + use http::Uri; + use tracing_core::Level; + + use super::SnapshotServer; + + #[derive(Parser, Debug)] + #[clap(name = "snapshot", about = "Apollo snapshot server")] + #[command(disable_version_flag(true))] + struct Args { + /// Snapshot location relative to the project directory. + #[arg(short, long, value_parser)] + snapshot_path: PathBuf, + + /// Base URL for the server. + #[arg(short = 'l', long, value_parser)] + url: Uri, + + /// Run in offline mode, without making any HTTP requests to the base URL. + #[arg(short, long)] + offline: bool, + + /// Force snapshot updates (overrides `offline`). + #[arg(short, long)] + update: bool, + + /// Optional port to listen on (defaults to an ephemeral port). + #[arg(short, long)] + port: Option, + + /// Turn on verbose output + #[arg(short = 'v', long)] + verbose: bool, + } + + /// Run the snapshot server as a standalone application + pub async fn main() { + let args = Args::parse(); + + let subscriber = tracing_subscriber::FmtSubscriber::builder() + .with_max_level(if args.verbose { + Level::DEBUG + } else { + Level::INFO + }) + .finish(); + tracing::subscriber::set_global_default(subscriber) + .expect("setting default subscriber failed"); + + let listener = args.port.map(|port| { + TcpListener::bind(format!("127.0.0.1:{port}")) + .expect("Failed to bind an OS port for snapshot server") + }); + + SnapshotServer::start( + args.snapshot_path, + args.url, + args.offline, + args.update, + None, + listener, + ) + .await; + } +} diff --git a/apollo-router/src/test_harness/http_snapshot_main.rs b/apollo-router/src/test_harness/http_snapshot_main.rs new file mode 100644 index 0000000000..27465f184c --- /dev/null +++ b/apollo-router/src/test_harness/http_snapshot_main.rs @@ -0,0 +1,6 @@ +use apollo_router::snapshot_server; + +#[tokio::main] +async fn main() { + snapshot_server().await +} diff --git a/apollo-router/src/testdata/jaeger.router.yaml b/apollo-router/src/testdata/jaeger.router.yaml index 0f7c367fe9..66872a0714 100644 --- a/apollo-router/src/testdata/jaeger.router.yaml +++ b/apollo-router/src/testdata/jaeger.router.yaml @@ -19,16 +19,6 @@ telemetry: scheduled_delay: 100ms agent: endpoint: default - logging: - experimental_when_header: - - name: apollo-router-log-request - value: test - headers: true # default: false - body: true # default: false - # log request for all requests coming from Iphones - - name: custom-header - match: ^foo.* - headers: true override_subgraph_url: products: http://localhost:4005 include_subgraph_errors: diff --git a/apollo-router/src/tracer.rs b/apollo-router/src/tracer.rs index e26628f979..d94b912bb0 100644 --- a/apollo-router/src/tracer.rs +++ b/apollo-router/src/tracer.rs @@ -126,7 +126,7 @@ mod test { .unwrap_or_else(|poisoned| poisoned.into_inner()); // Create a tracing layer with the configured tracer - let provider = opentelemetry::sdk::trace::TracerProvider::builder() + let provider = opentelemetry_sdk::trace::TracerProvider::builder() .with_simple_exporter( opentelemetry_stdout::SpanExporter::builder() .with_writer(std::io::stdout()) @@ -155,7 +155,7 @@ mod test { let my_id = TraceId::maybe_new(); assert!(my_id.is_none()); // Create a tracing layer with the configured tracer - let provider = opentelemetry::sdk::trace::TracerProvider::builder() + let provider = opentelemetry_sdk::trace::TracerProvider::builder() .with_simple_exporter(opentelemetry_stdout::SpanExporter::default()) .build(); let tracer = provider.versioned_tracer("noop", None::, None::, None); @@ -180,7 +180,7 @@ mod test { .lock() .unwrap_or_else(|poisoned| poisoned.into_inner()); // Create a tracing layer with the configured tracer - let provider = opentelemetry::sdk::trace::TracerProvider::builder() + let provider = opentelemetry_sdk::trace::TracerProvider::builder() .with_simple_exporter(opentelemetry_stdout::SpanExporter::default()) .build(); let tracer = provider.versioned_tracer("noop", None::, None::, None); diff --git a/apollo-router/src/uplink/license_enforcement.rs b/apollo-router/src/uplink/license_enforcement.rs index 1d23f9cc6c..132f2f6c72 100644 --- a/apollo-router/src/uplink/license_enforcement.rs +++ b/apollo-router/src/uplink/license_enforcement.rs @@ -11,8 +11,11 @@ use std::time::Duration; use std::time::SystemTime; use std::time::UNIX_EPOCH; +use apollo_compiler::ast; use apollo_compiler::schema::Directive; use apollo_compiler::schema::ExtendedType; +use apollo_compiler::Name; +use apollo_compiler::Node; use buildstructor::Builder; use displaydoc::Display; use itertools::Itertools; @@ -107,6 +110,7 @@ impl ParsedLinkSpec { .specified_argument_by_name(LINK_URL_ARGUMENT) .and_then(|value| { let url_string = value.as_str(); + let parsed_url = Url::parse(url_string.unwrap_or_default()).ok()?; let mut segments = parsed_url.path_segments()?; @@ -135,6 +139,43 @@ impl ParsedLinkSpec { }) } + fn from_join_directive_args( + args: &[(Name, Node)], + ) -> Option> { + let url_string = args + .iter() + .find(|(name, _)| name == &Name::new_unchecked(LINK_URL_ARGUMENT)) + .and_then(|(_, value)| value.as_str()); + + let parsed_url = Url::parse(url_string.unwrap_or_default()).ok()?; + + let mut segments = parsed_url.path_segments()?; + let spec_name = segments.next()?.to_string(); + let spec_url = format!( + "{}://{}/{}", + parsed_url.scheme(), + parsed_url.host()?, + spec_name + ); + let version_string = segments.next()?.strip_prefix('v')?; + let parsed_version = + semver::Version::parse(format!("{}.0", &version_string).as_str()).ok()?; + + let imported_as = args + .iter() + .find(|(name, _)| name == &Name::new_unchecked(LINK_AS_ARGUMENT)) + .and_then(|(_, value)| value.as_str()) + .map(|s| s.to_string()); + + Some(Ok(ParsedLinkSpec { + spec_name, + spec_url, + version: parsed_version, + imported_as, + url: url_string?.to_string(), + })) + } + // Implements directive name construction logic for link directives. // 1. If the link directive has an `as` argument, use that as the prefix. // 2. If the link directive's spec name is the same as the default name, use the default name with no prefix. @@ -212,6 +253,28 @@ impl LicenseEnforcementReport { }) .collect::>(); + let link_specs_in_join_directive = schema + .supergraph_schema() + .schema_definition + .directives + .get_all("join__directive") + .filter(|join| { + join.specified_argument_by_name("name") + .and_then(|name| name.as_str()) + .map(|name| name == LINK_DIRECTIVE_NAME) + .unwrap_or_default() + }) + .filter_map(|join| { + join.specified_argument_by_name("args") + .and_then(|arg| arg.as_object()) + }) + .filter_map(|link| { + ParsedLinkSpec::from_join_directive_args(link).map(|maybe_spec| { + maybe_spec.ok().map(|spec| (spec.spec_url.to_owned(), spec)) + })? + }) + .collect::>(); + let mut schema_violations: Vec = Vec::new(); for (_subgraph_name, subgraph_url) in schema.subgraphs() { @@ -288,6 +351,20 @@ impl LicenseEnforcementReport { } } } + SchemaRestriction::SpecInJoinDirective { + spec_url, + name, + version_req, + } => { + if let Some(link_spec) = link_specs_in_join_directive.get(spec_url) { + if version_req.matches(&link_spec.version) { + schema_violations.push(SchemaViolation::Spec { + url: link_spec.url.to_string(), + name: name.to_string(), + }); + } + } + } } } @@ -412,6 +489,19 @@ impl LicenseEnforcementReport { }], }, }, + SchemaRestriction::SpecInJoinDirective { + name: "connect".to_string(), + spec_url: "https://specs.apollo.dev/connect".to_string(), + version_req: semver::VersionReq { + comparators: vec![semver::Comparator { + op: semver::Op::Exact, + major: 0, + minor: 1.into(), + patch: 0.into(), + pre: semver::Prerelease::EMPTY, + }], + }, + }, SchemaRestriction::Spec { name: "context".to_string(), spec_url: "https://specs.apollo.dev/context".to_string(), @@ -615,6 +705,12 @@ pub(crate) enum SchemaRestriction { argument: String, explanation: String, }, + + SpecInJoinDirective { + spec_url: String, + name: String, + version_req: semver::VersionReq, + }, } #[derive(Debug, Clone, Serialize, Deserialize)] @@ -675,6 +771,7 @@ mod test { use crate::uplink::license_enforcement::License; use crate::uplink::license_enforcement::LicenseEnforcementReport; use crate::uplink::license_enforcement::OneOrMany; + use crate::uplink::license_enforcement::SchemaViolation; use crate::Configuration; #[track_caller] @@ -901,4 +998,24 @@ mod test { "shouldn't have found restricted features" ); } + + #[test] + fn schema_enforcement_connectors() { + let report = check( + include_str!("testdata/oss.router.yaml"), + include_str!("testdata/schema_enforcement_connectors.graphql"), + ); + + assert_eq!( + 1, + report.restricted_schema_in_use.len(), + "should have found restricted connect feature" + ); + if let SchemaViolation::Spec { url, name } = &report.restricted_schema_in_use[0] { + assert_eq!("https://specs.apollo.dev/connect/v0.1", url); + assert_eq!("connect", name); + } else { + panic!("should have reported connect feature violation") + } + } } diff --git a/apollo-router/src/uplink/mod.rs b/apollo-router/src/uplink/mod.rs index 2c13b02cb7..ef4d1a400e 100644 --- a/apollo-router/src/uplink/mod.rs +++ b/apollo-router/src/uplink/mod.rs @@ -491,7 +491,7 @@ mod test { use buildstructor::buildstructor; use futures::StreamExt; use graphql_client::GraphQLQuery; - use http::StatusCode; + use http_0_2::StatusCode; use insta::assert_yaml_snapshot; use serde_json::json; use test_query::FetchErrorCode; diff --git a/apollo-router/src/uplink/testdata/schema_enforcement_connectors.graphql b/apollo-router/src/uplink/testdata/schema_enforcement_connectors.graphql new file mode 100644 index 0000000000..d906273a06 --- /dev/null +++ b/apollo-router/src/uplink/testdata/schema_enforcement_connectors.graphql @@ -0,0 +1,121 @@ +schema + @link(url: "https://specs.apollo.dev/link/v1.0") + @link(url: "https://specs.apollo.dev/join/v0.4", for: EXECUTION) + @join__directive( + graphs: [CONNECTORS] + name: "link" + args: { + url: "https://specs.apollo.dev/connect/v0.1" + import: ["@connect", "@source"] + } + ) + @join__directive( + graphs: [CONNECTORS] + name: "source" + args: { + name: "json" + http: { baseURL: "https://jsonplaceholder.typicode.com/" } + } + ) { + query: Query +} + +directive @join__directive( + graphs: [join__Graph!] + name: String! + args: join__DirectiveArguments +) repeatable on SCHEMA | OBJECT | INTERFACE | FIELD_DEFINITION + +directive @join__enumValue(graph: join__Graph!) repeatable on ENUM_VALUE + +directive @join__field( + graph: join__Graph + requires: join__FieldSet + provides: join__FieldSet + type: String + external: Boolean + override: String + usedOverridden: Boolean + overrideLabel: String +) repeatable on FIELD_DEFINITION | INPUT_FIELD_DEFINITION + +directive @join__graph(name: String!, url: String!) on ENUM_VALUE + +directive @join__implements( + graph: join__Graph! + interface: String! +) repeatable on OBJECT | INTERFACE + +directive @join__type( + graph: join__Graph! + key: join__FieldSet + extension: Boolean! = false + resolvable: Boolean! = true + isInterfaceObject: Boolean! = false +) repeatable on OBJECT | INTERFACE | UNION | ENUM | INPUT_OBJECT | SCALAR + +directive @join__unionMember( + graph: join__Graph! + member: String! +) repeatable on UNION + +directive @link( + url: String + as: String + for: link__Purpose + import: [link__Import] +) repeatable on SCHEMA + +type Address @join__type(graph: CONNECTORS) { + street: String + suite: String + city: String + zipcode: String + geo: AddressGeo +} + +type AddressGeo @join__type(graph: CONNECTORS) { + lat: String + lng: String +} + +scalar join__DirectiveArguments + +scalar join__FieldSet + +enum join__Graph { + CONNECTORS @join__graph(name: "connectors", url: "none") + GRAPHQL @join__graph(name: "graphql", url: "https://localhost:4001") +} + +scalar link__Import + +enum link__Purpose { + """ + `SECURITY` features provide metadata necessary to securely resolve fields. + """ + SECURITY + + """ + `EXECUTION` features provide metadata necessary for operation execution. + """ + EXECUTION +} + +type Query @join__type(graph: CONNECTORS) @join__type(graph: GRAPHQL) { + users: [User] + @join__field(graph: CONNECTORS) + @join__directive( + graphs: [CONNECTORS] + name: "connect" + args: { source: "json", http: { GET: "/users" }, selection: "id name" } + ) +} + +type User + @join__type(graph: CONNECTORS, key: "id") + @join__type(graph: GRAPHQL, key: "id") { + id: ID! + name: String @join__field(graph: CONNECTORS) + c: String @join__field(graph: GRAPHQL) +} diff --git a/apollo-router/templates/sandbox_index.html b/apollo-router/templates/sandbox_index.html index ec9db97705..2642787544 100644 --- a/apollo-router/templates/sandbox_index.html +++ b/apollo-router/templates/sandbox_index.html @@ -64,6 +64,7 @@

Welcome to the Apollo Router

}, hideCookieToggle: false, endpointIsEditable: false, + initialRequestConnectorsDebugging: true, runtime: "apollo-router@{{APOLLO_ROUTER_VERSION}}" }); diff --git a/apollo-router/tests/apollo_otel_traces.rs b/apollo-router/tests/apollo_otel_traces.rs index 89fe5428a2..c6e3c5e18f 100644 --- a/apollo-router/tests/apollo_otel_traces.rs +++ b/apollo-router/tests/apollo_otel_traces.rs @@ -26,6 +26,7 @@ use axum::Extension; use axum::Json; use bytes::Bytes; use http::header::ACCEPT; +use http_body_util::BodyExt as _; use once_cell::sync::Lazy; use opentelemetry_proto::tonic::collector::trace::v1::ExportTraceServiceRequest; use prost::Message; @@ -50,7 +51,7 @@ async fn config( std::env::set_var("APOLLO_KEY", "test"); std::env::set_var("APOLLO_GRAPH_REF", "test"); - let listener = std::net::TcpListener::bind("127.0.0.1:0").unwrap(); + let listener = tokio::net::TcpListener::bind("127.0.0.1:0").await.unwrap(); let addr = listener.local_addr().unwrap(); let app = axum::Router::new() .route("/", post(traces_handler)) @@ -58,9 +59,7 @@ async fn config( .layer(tower_http::add_extension::AddExtensionLayer::new(reports)); let task = ROUTER_SERVICE_RUNTIME.spawn(async move { - axum::Server::from_tcp(listener) - .expect("must be able to create otlp receiver") - .serve(app.into_make_service()) + axum::serve(listener, app) .await .expect("could not start axum server") }); @@ -84,7 +83,7 @@ async fn config( .expect("Could not sub in endpoint"); config = jsonpath_lib::replace_with( config, - "$.telemetry.apollo.experimental_otlp_tracing_sampler", + "$.telemetry.apollo.otlp_tracing_sampler", &mut |_| Some(serde_json::Value::String("always_on".to_string())), ) .expect("Could not sub in otlp sampler"); @@ -318,9 +317,12 @@ where .expect("router service call failed"); // Drain the response - let mut found_report = match hyper::body::to_bytes(response.response.into_body()) + let mut found_report = match response + .response + .into_body() + .collect() .await - .map(|b| String::from_utf8(b.to_vec())) + .map(|b| String::from_utf8(b.to_bytes().to_vec())) { Ok(Ok(response)) => { if response.contains("errors") { diff --git a/apollo-router/tests/apollo_reports.rs b/apollo-router/tests/apollo_reports.rs index 006c3d8e97..80eff3cb12 100644 --- a/apollo-router/tests/apollo_reports.rs +++ b/apollo-router/tests/apollo_reports.rs @@ -34,6 +34,7 @@ use axum::Extension; use axum::Json; use flate2::read::GzDecoder; use http::header::ACCEPT; +use http_body_util::BodyExt as _; use once_cell::sync::Lazy; use prost::Message; use proto::reports::Report; @@ -61,7 +62,7 @@ async fn config( std::env::set_var("APOLLO_KEY", "test"); std::env::set_var("APOLLO_GRAPH_REF", "test"); - let listener = std::net::TcpListener::bind("127.0.0.1:0").unwrap(); + let listener = tokio::net::TcpListener::bind("127.0.0.1:0").await.unwrap(); let addr = listener.local_addr().unwrap(); let app = axum::Router::new() .route("/", post(report)) @@ -69,9 +70,7 @@ async fn config( .layer(tower_http::add_extension::AddExtensionLayer::new(reports)); let task = ROUTER_SERVICE_RUNTIME.spawn(async move { - axum::Server::from_tcp(listener) - .expect("mut be able to create report receiver") - .serve(app.into_make_service()) + axum::serve(listener, app.into_make_service()) .await .expect("could not start axum server") }); @@ -378,9 +377,12 @@ where .expect("router service call failed"); // Drain the response - let mut found_report = match hyper::body::to_bytes(response.response.into_body()) + let mut found_report = match response + .response + .into_body() + .collect() .await - .map(|b| String::from_utf8(b.to_vec())) + .map(|b| String::from_utf8(b.to_bytes().to_vec())) { Ok(Ok(response)) => { if response.contains("errors") { @@ -428,7 +430,7 @@ async fn get_batch_stats_report bool + Send + Sync + Copy + ' .expect("router service call failed"); // Drain the response (and throw it away) - let _found_report = hyper::body::to_bytes(response.response.into_body()).await; + let _found_report = response.response.into_body().collect().await; // Give the server a little time to export something // If this test fails, consider increasing this time. diff --git a/apollo-router/tests/common.rs b/apollo-router/tests/common.rs index e70cf5f0de..18b299bdcf 100644 --- a/apollo-router/tests/common.rs +++ b/apollo-router/tests/common.rs @@ -26,21 +26,21 @@ use mediatype::WriteParams; use mime::APPLICATION_JSON; use opentelemetry::global; use opentelemetry::propagation::TextMapPropagator; -use opentelemetry::sdk::trace::config; -use opentelemetry::sdk::trace::BatchSpanProcessor; -use opentelemetry::sdk::trace::TracerProvider; -use opentelemetry::sdk::Resource; use opentelemetry::testing::trace::NoopSpanExporter; use opentelemetry::trace::TraceContextExt; +use opentelemetry::KeyValue; use opentelemetry_api::trace::SpanContext; use opentelemetry_api::trace::TraceId; use opentelemetry_api::trace::TracerProvider as OtherTracerProvider; use opentelemetry_api::Context; -use opentelemetry_api::KeyValue; use opentelemetry_otlp::HttpExporterBuilder; use opentelemetry_otlp::Protocol; use opentelemetry_otlp::SpanExporterBuilder; use opentelemetry_otlp::WithExportConfig; +use opentelemetry_sdk::trace::config; +use opentelemetry_sdk::trace::BatchSpanProcessor; +use opentelemetry_sdk::trace::TracerProvider; +use opentelemetry_sdk::Resource; use opentelemetry_semantic_conventions::resource::SERVICE_NAME; use regex::Regex; use reqwest::Request; @@ -227,7 +227,7 @@ impl Telemetry { .with_service_name(service_name) .build_sync_agent_exporter() .expect("jaeger pipeline failed"), - opentelemetry::runtime::Tokio, + opentelemetry_sdk::runtime::Tokio, ) .with_scheduled_delay(Duration::from_millis(10)) .build(), @@ -246,7 +246,7 @@ impl Telemetry { ) .build_span_exporter() .expect("otlp pipeline failed"), - opentelemetry::runtime::Tokio, + opentelemetry_sdk::runtime::Tokio, ) .with_scheduled_delay(Duration::from_millis(10)) .build(), @@ -260,7 +260,7 @@ impl Telemetry { .with_service_name(service_name) .build_exporter() .expect("datadog pipeline failed"), - opentelemetry::runtime::Tokio, + opentelemetry_sdk::runtime::Tokio, ) .with_scheduled_delay(Duration::from_millis(10)) .build(), @@ -274,7 +274,7 @@ impl Telemetry { .with_service_name(service_name) .init_exporter() .expect("zipkin pipeline failed"), - opentelemetry::runtime::Tokio, + opentelemetry_sdk::runtime::Tokio, ) .with_scheduled_delay(Duration::from_millis(10)) .build(), @@ -295,7 +295,7 @@ impl Telemetry { let propagator = opentelemetry_jaeger::Propagator::new(); propagator.inject_context( &ctx, - &mut opentelemetry_http::HeaderInjector(request.headers_mut()), + &mut apollo_router::otel_compat::HeaderInjector(request.headers_mut()), ) } Telemetry::Datadog => { @@ -308,7 +308,7 @@ impl Telemetry { let propagator = opentelemetry_datadog::DatadogPropagator::new(); propagator.inject_context( &ctx, - &mut opentelemetry_http::HeaderInjector(request.headers_mut()), + &mut apollo_router::otel_compat::HeaderInjector(request.headers_mut()), ); if let Some(psr) = psr { @@ -318,17 +318,17 @@ impl Telemetry { } } Telemetry::Otlp { .. } => { - let propagator = opentelemetry::sdk::propagation::TraceContextPropagator::default(); + let propagator = opentelemetry_sdk::propagation::TraceContextPropagator::default(); propagator.inject_context( &ctx, - &mut opentelemetry_http::HeaderInjector(request.headers_mut()), + &mut apollo_router::otel_compat::HeaderInjector(request.headers_mut()), ) } Telemetry::Zipkin => { let propagator = opentelemetry_zipkin::Propagator::new(); propagator.inject_context( &ctx, - &mut opentelemetry_http::HeaderInjector(request.headers_mut()), + &mut apollo_router::otel_compat::HeaderInjector(request.headers_mut()), ) } _ => {} @@ -420,7 +420,9 @@ impl IntegrationTest { let url = format!("http://{address}/"); // Add a default override for products, if not specified - subgraph_overrides.entry("products".into()).or_insert(url); + subgraph_overrides + .entry("products".into()) + .or_insert(url.clone()); // Insert the overrides into the config let config_str = merge_overrides(&config, &subgraph_overrides, None, &redis_namespace); @@ -438,13 +440,23 @@ impl IntegrationTest { let subgraph_context = Arc::new(Mutex::new(None)); Mock::given(method("POST")) - .respond_with(TracedResponder{response_template:responder.unwrap_or_else(|| - ResponseTemplate::new(200).set_body_json(json!({"data":{"topProducts":[{"name":"Table"},{"name":"Couch"},{"name":"Chair"}]}}))), + .respond_with(TracedResponder { + response_template: responder.unwrap_or_else(|| { + ResponseTemplate::new(200).set_body_json(json!({ + "data": { + "topProducts": [ + { "name": "Table" }, + { "name": "Couch" }, + { "name": "Chair" }, + ], + }, + })) + }), telemetry: telemetry.clone(), extra_propagator: extra_propagator.clone(), subscriber_subgraph: Self::dispatch(&tracer_provider_subgraph), subgraph_callback, - subgraph_context: subgraph_context.clone() + subgraph_context: subgraph_context.clone(), }) .mount(&subgraphs) .await; @@ -525,7 +537,6 @@ impl IntegrationTest { .env("APOLLO_KEY", apollo_key) .env("APOLLO_GRAPH_REF", apollo_graph_ref); } - router .args(dbg!([ "--hr", @@ -551,7 +562,6 @@ impl IntegrationTest { let mut lines = reader.lines(); while let Ok(Some(line)) = lines.next_line().await { println!("{line}"); - // Extract the bind address from a log line that looks like this: GraphQL endpoint exposed at http://127.0.0.1:51087/ if let Some(captures) = bind_address_regex.captures(&line) { let address = captures.name("address").unwrap().as_str(); @@ -748,7 +758,7 @@ impl IntegrationTest { global::get_text_map_propagator(|propagator| { propagator.inject_context( &tracing::span::Span::current().context(), - &mut opentelemetry_http::HeaderInjector(request.headers_mut()), + &mut apollo_router::otel_compat::HeaderInjector(request.headers_mut()), ); }); request.headers_mut().remove(ACCEPT); @@ -789,7 +799,7 @@ impl IntegrationTest { global::get_text_map_propagator(|propagator| { propagator.inject_context( &span.context(), - &mut opentelemetry_http::HeaderInjector(request.headers_mut()), + &mut apollo_router::otel_compat::HeaderInjector(request.headers_mut()), ); }); @@ -1139,6 +1149,7 @@ fn merge_overrides( let overrides = subgraph_overrides .iter() .map(|(name, url)| (name.clone(), serde_json::Value::String(url.clone()))); + let overrides2 = overrides.clone(); match config .as_object_mut() .and_then(|o| o.get_mut("override_subgraph_url")) @@ -1153,6 +1164,23 @@ fn merge_overrides( override_url.extend(overrides); } } + if let Some(sources) = config + .as_object_mut() + .and_then(|o| o.get_mut("preview_connectors")) + .and_then(|o| o.as_object_mut()) + .and_then(|o| o.get_mut("subgraphs")) + .and_then(|o| o.as_object_mut()) + .and_then(|o| o.get_mut("connectors")) + .and_then(|o| o.as_object_mut()) + .and_then(|o| o.get_mut("sources")) + .and_then(|o| o.as_object_mut()) + { + for (name, url) in overrides2 { + let mut obj = serde_json::Map::new(); + obj.insert("override_url".to_string(), url.clone()); + sources.insert(name.to_string(), Value::Object(obj)); + } + } // Override the listening address always since we spawn the router on a // random port. diff --git a/apollo-router/tests/fixtures/apollo_reports.router.yaml b/apollo-router/tests/fixtures/apollo_reports.router.yaml index 644e286ee7..776dcb36f3 100644 --- a/apollo-router/tests/fixtures/apollo_reports.router.yaml +++ b/apollo-router/tests/fixtures/apollo_reports.router.yaml @@ -30,7 +30,7 @@ telemetry: scheduled_delay: 10ms experimental_local_field_metrics: false experimental_otlp_endpoint: "http://127.0.0.1" - experimental_otlp_tracing_sampler: always_off + otlp_tracing_sampler: always_off experimental_otlp_tracing_protocol: http field_level_instrumentation_sampler: always_on send_headers: diff --git a/apollo-router/tests/fixtures/apollo_reports_batch.router.yaml b/apollo-router/tests/fixtures/apollo_reports_batch.router.yaml index e60791ebbc..b1bd1c3b1c 100644 --- a/apollo-router/tests/fixtures/apollo_reports_batch.router.yaml +++ b/apollo-router/tests/fixtures/apollo_reports_batch.router.yaml @@ -30,7 +30,7 @@ telemetry: scheduled_delay: 10ms experimental_local_field_metrics: false experimental_otlp_endpoint: "http://127.0.0.1" - experimental_otlp_tracing_sampler: always_off + otlp_tracing_sampler: always_off experimental_otlp_tracing_protocol: http field_level_instrumentation_sampler: always_on send_headers: diff --git a/apollo-router/tests/fixtures/supergraph_connect.graphql b/apollo-router/tests/fixtures/supergraph_connect.graphql new file mode 100644 index 0000000000..a1ed2a27a1 --- /dev/null +++ b/apollo-router/tests/fixtures/supergraph_connect.graphql @@ -0,0 +1,73 @@ +schema + @link(url: "https://specs.apollo.dev/link/v1.0") + @link(url: "https://specs.apollo.dev/join/v0.5", for: EXECUTION) + @link(url: "https://specs.apollo.dev/connect/v0.1", for: EXECUTION) + @join__directive(graphs: [POSTS], name: "link", args: {url: "https://specs.apollo.dev/connect/v0.1", import: ["@connect", "@source"]}) + @join__directive(graphs: [POSTS], name: "source", args: {name: "jsonPlaceholder", http: {baseURL: "https://jsonplaceholder.typicode.com/"}}) + @join__directive(graphs: [POSTS], name: "source", args: {name: "routerHealth", http: {baseURL: "http://localhost:4000/"}}) +{ + query: Query +} + +directive @join__directive(graphs: [join__Graph!], name: String!, args: join__DirectiveArguments) repeatable on SCHEMA | OBJECT | INTERFACE | FIELD_DEFINITION + +directive @join__enumValue(graph: join__Graph!) repeatable on ENUM_VALUE + +directive @join__field(graph: join__Graph, requires: join__FieldSet, provides: join__FieldSet, type: String, external: Boolean, override: String, usedOverridden: Boolean, overrideLabel: String, contextArguments: [join__ContextArgument!]) repeatable on FIELD_DEFINITION | INPUT_FIELD_DEFINITION + +directive @join__graph(name: String!, url: String!) on ENUM_VALUE + +directive @join__implements(graph: join__Graph!, interface: String!) repeatable on OBJECT | INTERFACE + +directive @join__type(graph: join__Graph!, key: join__FieldSet, extension: Boolean! = false, resolvable: Boolean! = true, isInterfaceObject: Boolean! = false) repeatable on OBJECT | INTERFACE | UNION | ENUM | INPUT_OBJECT | SCALAR + +directive @join__unionMember(graph: join__Graph!, member: String!) repeatable on UNION + +directive @link(url: String, as: String, for: link__Purpose, import: [link__Import]) repeatable on SCHEMA + +input join__ContextArgument { + name: String! + type: String! + context: String! + selection: join__FieldValue! +} + +scalar join__DirectiveArguments + +scalar join__FieldSet + +scalar join__FieldValue + +enum join__Graph { + POSTS @join__graph(name: "posts", url: "http://localhost") +} + +scalar link__Import + +enum link__Purpose { + """ + `SECURITY` features provide metadata necessary to securely resolve fields. + """ + SECURITY + + """ + `EXECUTION` features provide metadata necessary for operation execution. + """ + EXECUTION +} + +type Post + @join__type(graph: POSTS) +{ + id: ID! + body: String + title: String + status: String +} + +type Query + @join__type(graph: POSTS) +{ + posts: [Post] @join__directive(graphs: [POSTS], name: "connect", args: {source: "jsonPlaceholder", http: {GET: "/posts"}, selection: "id\ntitle\nbody"}) + post(id: ID!): Post @join__directive(graphs: [POSTS], name: "connect", args: {source: "jsonPlaceholder", http: {GET: "/posts/{$args.id}"}, selection: "id\ntitle\nbody"}) @join__directive(graphs: [POSTS], name: "connect", args: {source: "routerHealth", http: {GET: "/health?_={$args.id}"}, selection: "id: $args.id\nstatus", entity: true}) +} diff --git a/apollo-router/tests/integration/file_upload.rs b/apollo-router/tests/integration/file_upload.rs index d179f39b66..be424db860 100644 --- a/apollo-router/tests/integration/file_upload.rs +++ b/apollo-router/tests/integration/file_upload.rs @@ -50,14 +50,15 @@ async fn it_uploads_file_to_subgraph() -> Result<(), BoxError> { .part("0", Part::text(FILE).file_name(FILE_NAME)); async fn subgraph_handler( - mut request: http::Request, + request: http::Request, ) -> impl axum::response::IntoResponse { let boundary = request .headers() .get(CONTENT_TYPE) .and_then(|v| multer::parse_boundary(v.to_str().ok()?).ok()) .expect("subgraph request should have valid Content-Type header"); - let mut multipart = multer::Multipart::new(request.body_mut(), boundary); + let mut multipart = + multer::Multipart::new(request.into_body().into_data_stream(), boundary); let operations_field = multipart .next_field() @@ -683,21 +684,21 @@ async fn it_fails_upload_without_file() -> Result<(), BoxError> { .subgraph_mapping("uploads", "/") .build() .run_test(|response| { - insta::assert_json_snapshot!(response, @r###" + insta::assert_json_snapshot!(response, @r#" { "errors": [ { - "message": "HTTP fetch failed from 'uploads': HTTP fetch failed from 'uploads': error from user's HttpBody stream: error reading a body from connection: Missing files in the request: '0'.", + "message": "HTTP fetch failed from 'uploads': HTTP fetch failed from 'uploads': error from user's Body stream: Missing files in the request: '0'.", "path": [], "extensions": { "code": "SUBREQUEST_HTTP_ERROR", "service": "uploads", - "reason": "HTTP fetch failed from 'uploads': error from user's HttpBody stream: error reading a body from connection: Missing files in the request: '0'." + "reason": "HTTP fetch failed from 'uploads': error from user's Body stream: Missing files in the request: '0'." } } ] } - "###); + "#); }) .await } @@ -761,21 +762,21 @@ async fn it_fails_with_file_size_limit() -> Result<(), BoxError> { .subgraph_mapping("uploads", "/") .build() .run_test(|response| { - insta::assert_json_snapshot!(response, @r###" + insta::assert_json_snapshot!(response, @r#" { "errors": [ { - "message": "HTTP fetch failed from 'uploads': HTTP fetch failed from 'uploads': error from user's HttpBody stream: error reading a body from connection: Exceeded the limit of 512.0 KB on 'fat.payload.bin' file.", + "message": "HTTP fetch failed from 'uploads': HTTP fetch failed from 'uploads': error from user's Body stream: Exceeded the limit of 512.0 KB on 'fat.payload.bin' file.", "path": [], "extensions": { "code": "SUBREQUEST_HTTP_ERROR", "service": "uploads", - "reason": "HTTP fetch failed from 'uploads': error from user's HttpBody stream: error reading a body from connection: Exceeded the limit of 512.0 KB on 'fat.payload.bin' file." + "reason": "HTTP fetch failed from 'uploads': error from user's Body stream: Exceeded the limit of 512.0 KB on 'fat.payload.bin' file." } } ] } - "###); + "#); }) .await } @@ -875,7 +876,7 @@ async fn it_fails_invalid_file_order() -> Result<(), BoxError> { .subgraph_mapping("uploads_clone", "/s2") .build() .run_test(|response| { - insta::assert_json_snapshot!(response, @r###" + insta::assert_json_snapshot!(response, @r#" { "data": { "file0": { @@ -886,17 +887,17 @@ async fn it_fails_invalid_file_order() -> Result<(), BoxError> { }, "errors": [ { - "message": "HTTP fetch failed from 'uploads_clone': HTTP fetch failed from 'uploads_clone': error from user's HttpBody stream: error reading a body from connection: Missing files in the request: '1'.", + "message": "HTTP fetch failed from 'uploads_clone': HTTP fetch failed from 'uploads_clone': error from user's Body stream: Missing files in the request: '1'.", "path": [], "extensions": { "code": "SUBREQUEST_HTTP_ERROR", "service": "uploads_clone", - "reason": "HTTP fetch failed from 'uploads_clone': error from user's HttpBody stream: error reading a body from connection: Missing files in the request: '1'." + "reason": "HTTP fetch failed from 'uploads_clone': error from user's Body stream: Missing files in the request: '1'." } } ] } - "###); + "#); }) .await } @@ -1018,6 +1019,7 @@ mod helper { use std::net::SocketAddr; use std::path::PathBuf; + use axum::body::Body; use axum::extract::State; use axum::response::IntoResponse; use axum::BoxError; @@ -1028,7 +1030,6 @@ mod helper { use http::header::CONTENT_TYPE; use http::Request; use http::StatusCode; - use hyper::Body; use itertools::Itertools; use multer::Multipart; use reqwest::multipart::Form; @@ -1131,15 +1132,13 @@ mod helper { let (shutdown_tx, shutdown_rx) = tokio::sync::oneshot::channel::<()>(); // Start the server using the tcp listener randomly assigned above - let server = axum::Server::from_tcp(bound.into_std().unwrap()) - .unwrap() - .serve(self.handler.into_make_service()) + let server = axum::serve(bound, self.handler.into_make_service()) .with_graceful_shutdown(async { shutdown_rx.await.ok(); }); // Spawn the server in the background, controlled by the shutdown signal - tokio::spawn(server); + tokio::spawn(async { server.await.unwrap() }); // Make the request and pass it into the validator callback let (_span, response) = router @@ -1265,7 +1264,8 @@ mod helper { .part("map", mappings); for (index, (file_name, file)) in names.into_iter().zip(files).enumerate() { let file_name: String = file_name.into(); - let part = Part::stream(hyper::Body::wrap_stream(file)).file_name(file_name); + + let part = Part::stream(reqwest::Body::wrap_stream(file)).file_name(file_name); request = request.part(index.to_string(), part); } @@ -1276,10 +1276,8 @@ mod helper { /// Handler that echos back the contents of the file that it receives /// /// Note: This will error if more than one file is received - pub async fn echo_single_file( - mut request: Request, - ) -> Result, FileUploadError> { - let (_, map, mut multipart) = decode_request(&mut request).await?; + pub async fn echo_single_file(request: Request) -> Result, FileUploadError> { + let (_, map, mut multipart) = decode_request(request).await?; // Assert that we only have 1 file if map.len() > 1 { @@ -1315,8 +1313,8 @@ mod helper { } /// Handler that echos back the contents of the files that it receives - pub async fn echo_files(mut request: Request) -> Result, FileUploadError> { - let (operation, map, mut multipart) = decode_request(&mut request).await?; + pub async fn echo_files(request: Request) -> Result, FileUploadError> { + let (operation, map, mut multipart) = decode_request(request).await?; // Make sure that we have some mappings if map.is_empty() { @@ -1369,10 +1367,8 @@ mod helper { } /// Handler that echos back the contents of the list of files that it receives - pub async fn echo_file_list( - mut request: Request, - ) -> Result, FileUploadError> { - let (operation, map, mut multipart) = decode_request(&mut request).await?; + pub async fn echo_file_list(request: Request) -> Result, FileUploadError> { + let (operation, map, mut multipart) = decode_request(request).await?; // Make sure that we have some mappings if map.is_empty() { @@ -1432,9 +1428,10 @@ mod helper { } /// A handler that always fails. Useful for tests that should not reach the subgraph at all. - pub async fn always_fail(mut request: Request) -> Result, FileUploadError> { + pub async fn always_fail(request: Request) -> Result, FileUploadError> { // Consume the stream - while request.body_mut().next().await.is_some() {} + let mut body = request.into_body().into_data_stream(); + while body.next().await.is_some() {} // Signal a failure Err(FileUploadError::ShouldHaveFailed) @@ -1445,9 +1442,9 @@ mod helper { /// Note: Make sure to use a router with state (Expected stream length, expected value). pub async fn verify_stream( State((expected_length, byte_value)): State<(usize, u8)>, - mut request: Request, + request: Request, ) -> Result, FileUploadError> { - let (_, _, mut multipart) = decode_request(&mut request).await?; + let (_, _, mut multipart) = decode_request(request).await?; let mut file = multipart .next_field() @@ -1524,8 +1521,9 @@ mod helper { /// Note: The order of the mapping must correspond with the order in the request, so /// we use a [BTreeMap] here to keep the order when traversing the list of files. async fn decode_request( - request: &mut Request, - ) -> Result<(Operation, BTreeMap>, Multipart), FileUploadError> { + request: Request, + ) -> Result<(Operation, BTreeMap>, Multipart<'static>), FileUploadError> + { let content_type = request .headers() .get(CONTENT_TYPE) @@ -1535,7 +1533,7 @@ mod helper { FileUploadError::BadHeaders(format!("could not parse multipart boundary: {e}")) })?)?; - let mut multipart = Multipart::new(request.body_mut(), boundary); + let mut multipart = Multipart::new(request.into_body().into_data_stream(), boundary); // Extract the operations // TODO: Should we be streaming here? diff --git a/apollo-router/tests/integration/fixtures/broken_plugin.router.yaml b/apollo-router/tests/integration/fixtures/broken_plugin.router.yaml index f8f8d372a8..9d7c12957d 100644 --- a/apollo-router/tests/integration/fixtures/broken_plugin.router.yaml +++ b/apollo-router/tests/integration/fixtures/broken_plugin.router.yaml @@ -19,16 +19,6 @@ telemetry: scheduled_delay: 100ms agent: endpoint: default - logging: - experimental_when_header: - - name: apollo-router-log-request - value: test - headers: true # default: false - body: true # default: false - # log request for all requests coming from Iphones - - name: custom-header - match: ^foo.* - headers: true override_subgraph_url: products: http://localhost:4005 include_subgraph_errors: diff --git a/apollo-router/tests/integration/fixtures/happy.router.yaml b/apollo-router/tests/integration/fixtures/happy.router.yaml index 0f7c367fe9..66872a0714 100644 --- a/apollo-router/tests/integration/fixtures/happy.router.yaml +++ b/apollo-router/tests/integration/fixtures/happy.router.yaml @@ -19,16 +19,6 @@ telemetry: scheduled_delay: 100ms agent: endpoint: default - logging: - experimental_when_header: - - name: apollo-router-log-request - value: test - headers: true # default: false - body: true # default: false - # log request for all requests coming from Iphones - - name: custom-header - match: ^foo.* - headers: true override_subgraph_url: products: http://localhost:4005 include_subgraph_errors: diff --git a/apollo-router/tests/integration/lifecycle.rs b/apollo-router/tests/integration/lifecycle.rs index 71af2dbcf8..48a2703d2e 100644 --- a/apollo-router/tests/integration/lifecycle.rs +++ b/apollo-router/tests/integration/lifecycle.rs @@ -10,6 +10,7 @@ use apollo_router::services::supergraph; use apollo_router::Context; use apollo_router::TestHarness; use async_trait::async_trait; +use axum::handler::HandlerWithoutStateExt; use futures::FutureExt; use schemars::JsonSchema; use serde::Deserialize; @@ -231,12 +232,10 @@ async fn test_experimental_notice() { .config( " telemetry: - logging: - experimental_when_header: - - name: apollo-router-log-request - value: test - headers: true - body: true + exporters: + tracing: + experimental_response_trace_id: + enabled: true ", ) .build() @@ -254,11 +253,7 @@ const TEST_PLUGIN_ORDERING_CONTEXT_KEY: &str = "ordering-trace"; /// #[tokio::test(flavor = "multi_thread")] async fn test_plugin_ordering() { - async fn coprocessor( - request: http::Request, - ) -> Result, BoxError> { - let body = hyper::body::to_bytes(request.into_body()).await?; - let mut json: serde_json::Value = serde_json::from_slice(&body)?; + async fn coprocessor(mut json: axum::Json) -> axum::Json { let stage = json["stage"].as_str().unwrap().to_owned(); json["context"]["entries"] .as_object_mut() @@ -268,20 +263,15 @@ async fn test_plugin_ordering() { .as_array_mut() .unwrap() .push(format!("coprocessor {stage}").into()); - Ok(http::Response::new(hyper::Body::from( - serde_json::to_string(&json)?, - ))) + json } async fn spawn_coprocessor() -> (String, ShutdownOnDrop) { let (tx, rx) = tokio::sync::oneshot::channel::<()>(); let shutdown_on_drop = ShutdownOnDrop(Some(tx)); - let service = hyper::service::make_service_fn(|_| async { - Ok::<_, hyper::Error>(hyper::service::service_fn(coprocessor)) - }); - // Bind to "port 0" to let the kernel choose an available port number. - let server = hyper::Server::bind(&([127, 0, 0, 1], 0).into()).serve(service); - let coprocessor_url = format!("http://{}", server.local_addr()); + let listener = tokio::net::TcpListener::bind("127.0.0.1:0").await.unwrap(); + let coprocessor_url = format!("http://{}", listener.local_addr().unwrap()); + let server = axum::serve(listener, coprocessor.into_make_service()); let server = server.with_graceful_shutdown(async { let _ = rx.await; }); diff --git a/apollo-router/tests/integration/snapshots/integration_tests__integration__lifecycle__cli_config_experimental.snap b/apollo-router/tests/integration/snapshots/integration_tests__integration__lifecycle__cli_config_experimental.snap index e7e67e9657..3addb79d26 100644 --- a/apollo-router/tests/integration/snapshots/integration_tests__integration__lifecycle__cli_config_experimental.snap +++ b/apollo-router/tests/integration/snapshots/integration_tests__integration__lifecycle__cli_config_experimental.snap @@ -10,4 +10,3 @@ stdout: List of all experimental configurations with related GitHub discussions: - experimental_response_trace_id: https://github.com/apollographql/router/discussions/2147 - - experimental_when_header: https://github.com/apollographql/router/discussions/1961 diff --git a/apollo-router/tests/integration/supergraph.rs b/apollo-router/tests/integration/supergraph.rs index 07b4c81089..de798ea4bf 100644 --- a/apollo-router/tests/integration/supergraph.rs +++ b/apollo-router/tests/integration/supergraph.rs @@ -1,4 +1,3 @@ -#[cfg(feature = "hyper_header_limits")] use std::collections::HashMap; use serde_json::json; @@ -7,26 +6,6 @@ use tower::BoxError; use crate::integration::common::Query; use crate::integration::IntegrationTest; -#[cfg(not(feature = "hyper_header_limits"))] -#[tokio::test(flavor = "multi_thread")] -async fn test_supergraph_error_http1_max_headers_config() -> Result<(), BoxError> { - let mut router = IntegrationTest::builder() - .config( - r#" - limits: - http1_max_request_headers: 100 - "#, - ) - .build() - .await; - - router.start().await; - router.assert_log_contains("'limits.http1_max_request_headers' requires 'hyper_header_limits' feature: enable 'hyper_header_limits' feature in order to use 'limits.http1_max_request_headers'").await; - router.assert_not_started().await; - Ok(()) -} - -#[cfg(feature = "hyper_header_limits")] #[tokio::test(flavor = "multi_thread")] async fn test_supergraph_errors_on_http1_max_headers() -> Result<(), BoxError> { let mut router = IntegrationTest::builder() @@ -59,7 +38,6 @@ async fn test_supergraph_errors_on_http1_max_headers() -> Result<(), BoxError> { Ok(()) } -#[cfg(feature = "hyper_header_limits")] #[tokio::test(flavor = "multi_thread")] async fn test_supergraph_allow_to_change_http1_max_headers() -> Result<(), BoxError> { let mut router = IntegrationTest::builder() diff --git a/apollo-router/tests/integration/telemetry/fixtures/jaeger-0.5-sample.router.yaml b/apollo-router/tests/integration/telemetry/fixtures/jaeger-0.5-sample.router.yaml index 6bd0fad86c..1e54d7c9e1 100644 --- a/apollo-router/tests/integration/telemetry/fixtures/jaeger-0.5-sample.router.yaml +++ b/apollo-router/tests/integration/telemetry/fixtures/jaeger-0.5-sample.router.yaml @@ -15,16 +15,6 @@ telemetry: scheduled_delay: 100ms collector: endpoint: http://127.0.0.1:14268/api/traces - logging: - experimental_when_header: - - name: apollo-router-log-request - value: test - headers: true # default: false - body: true # default: false - # log request for all requests coming from Iphones - - name: custom-header - match: ^foo.* - headers: true override_subgraph_url: products: http://localhost:4005 include_subgraph_errors: diff --git a/apollo-router/tests/integration/telemetry/fixtures/jaeger-advanced.router.yaml b/apollo-router/tests/integration/telemetry/fixtures/jaeger-advanced.router.yaml index c07050677a..dd3c3acb65 100644 --- a/apollo-router/tests/integration/telemetry/fixtures/jaeger-advanced.router.yaml +++ b/apollo-router/tests/integration/telemetry/fixtures/jaeger-advanced.router.yaml @@ -15,16 +15,6 @@ telemetry: scheduled_delay: 100ms collector: endpoint: http://127.0.0.1:14268/api/traces - logging: - experimental_when_header: - - name: apollo-router-log-request - value: test - headers: true # default: false - body: true # default: false - # log request for all requests coming from Iphones - - name: custom-header - match: ^foo.* - headers: true instrumentation: spans: mode: deprecated diff --git a/apollo-router/tests/integration/telemetry/fixtures/jaeger-no-sample.router.yaml b/apollo-router/tests/integration/telemetry/fixtures/jaeger-no-sample.router.yaml index ffa63c772d..600f66486b 100644 --- a/apollo-router/tests/integration/telemetry/fixtures/jaeger-no-sample.router.yaml +++ b/apollo-router/tests/integration/telemetry/fixtures/jaeger-no-sample.router.yaml @@ -18,16 +18,6 @@ telemetry: scheduled_delay: 100ms collector: endpoint: http://127.0.0.1:14268/api/traces - logging: - experimental_when_header: - - name: apollo-router-log-request - value: test - headers: true # default: false - body: true # default: false - # log request for all requests coming from Iphones - - name: custom-header - match: ^foo.* - headers: true override_subgraph_url: products: http://localhost:4005 include_subgraph_errors: diff --git a/apollo-router/tests/integration/telemetry/fixtures/jaeger.connectors.router.yaml b/apollo-router/tests/integration/telemetry/fixtures/jaeger.connectors.router.yaml new file mode 100644 index 0000000000..614c28bb1c --- /dev/null +++ b/apollo-router/tests/integration/telemetry/fixtures/jaeger.connectors.router.yaml @@ -0,0 +1,45 @@ +telemetry: + instrumentation: + spans: + connector: + attributes: + connector.source.name: true + connector.http.method: true + connector.http.response.status_code: + connector_http_response_status: code + connector.url.template: true + connector.http.response.header.content-type: + connector_http_response_header: "content-type" + exporters: + tracing: + experimental_response_trace_id: + enabled: true + header_name: apollo-custom-trace-id + propagation: + jaeger: true + common: + service_name: router + sampler: always_on + jaeger: + enabled: true + batch_processor: + scheduled_delay: 100ms + collector: + endpoint: http://127.0.0.1:14268/api/traces + +override_subgraph_url: + products: http://localhost:4005 +include_subgraph_errors: + all: true + +supergraph: + listen: "127.0.0.1:50642" +health_check: + enabled: true + listen: "127.0.0.1:50642" +preview_connectors: + subgraphs: + posts: # The name of the subgraph + sources: + routerHealth: # Refers to @source(name: "routerHealth") + override_url: http://127.0.0.1:50642 diff --git a/apollo-router/tests/integration/telemetry/fixtures/jaeger.router.yaml b/apollo-router/tests/integration/telemetry/fixtures/jaeger.router.yaml index 11d6dad4ba..f1256d9bc7 100644 --- a/apollo-router/tests/integration/telemetry/fixtures/jaeger.router.yaml +++ b/apollo-router/tests/integration/telemetry/fixtures/jaeger.router.yaml @@ -15,16 +15,6 @@ telemetry: scheduled_delay: 100ms collector: endpoint: http://127.0.0.1:14268/api/traces - logging: - experimental_when_header: - - name: apollo-router-log-request - value: test - headers: true # default: false - body: true # default: false - # log request for all requests coming from Iphones - - name: custom-header - match: ^foo.* - headers: true override_subgraph_url: products: http://localhost:4005 diff --git a/apollo-router/tests/integration/telemetry/fixtures/jaeger_decimal_trace_id.router.yaml b/apollo-router/tests/integration/telemetry/fixtures/jaeger_decimal_trace_id.router.yaml index e7c92e3599..6b537e3b39 100644 --- a/apollo-router/tests/integration/telemetry/fixtures/jaeger_decimal_trace_id.router.yaml +++ b/apollo-router/tests/integration/telemetry/fixtures/jaeger_decimal_trace_id.router.yaml @@ -16,16 +16,6 @@ telemetry: scheduled_delay: 100ms collector: endpoint: http://127.0.0.1:14268/api/traces - logging: - experimental_when_header: - - name: apollo-router-log-request - value: test - headers: true # default: false - body: true # default: false - # log request for all requests coming from Iphones - - name: custom-header - match: ^foo.* - headers: true override_subgraph_url: products: http://localhost:4005 diff --git a/apollo-router/tests/integration/telemetry/fixtures/json.router.yaml b/apollo-router/tests/integration/telemetry/fixtures/json.router.yaml index 8fa0f2a74e..72fcc49403 100644 --- a/apollo-router/tests/integration/telemetry/fixtures/json.router.yaml +++ b/apollo-router/tests/integration/telemetry/fixtures/json.router.yaml @@ -113,9 +113,5 @@ telemetry: agent: endpoint: default logging: - experimental_when_header: - - name: content-type - value: "application/json" - body: true stdout: format: json diff --git a/apollo-router/tests/integration/telemetry/fixtures/json.sampler_off.router.yaml b/apollo-router/tests/integration/telemetry/fixtures/json.sampler_off.router.yaml index 3190c14d34..7756b848f4 100644 --- a/apollo-router/tests/integration/telemetry/fixtures/json.sampler_off.router.yaml +++ b/apollo-router/tests/integration/telemetry/fixtures/json.sampler_off.router.yaml @@ -115,9 +115,5 @@ telemetry: agent: endpoint: default logging: - experimental_when_header: - - name: content-type - value: "application/json" - body: true stdout: format: json diff --git a/apollo-router/tests/integration/telemetry/fixtures/json.span_attributes.router.yaml b/apollo-router/tests/integration/telemetry/fixtures/json.span_attributes.router.yaml index 324a22780d..4ea4c39548 100644 --- a/apollo-router/tests/integration/telemetry/fixtures/json.span_attributes.router.yaml +++ b/apollo-router/tests/integration/telemetry/fixtures/json.span_attributes.router.yaml @@ -122,10 +122,6 @@ telemetry: agent: endpoint: default logging: - experimental_when_header: - - name: content-type - value: "application/json" - body: true stdout: format: json: diff --git a/apollo-router/tests/integration/telemetry/fixtures/json.uuid.router.yaml b/apollo-router/tests/integration/telemetry/fixtures/json.uuid.router.yaml index 7b9a97af99..62bd1eba7c 100644 --- a/apollo-router/tests/integration/telemetry/fixtures/json.uuid.router.yaml +++ b/apollo-router/tests/integration/telemetry/fixtures/json.uuid.router.yaml @@ -20,10 +20,6 @@ telemetry: agent: endpoint: default logging: - experimental_when_header: - - name: content-type - value: "application/json" - body: true stdout: format: json: diff --git a/apollo-router/tests/integration/telemetry/fixtures/no-telemetry.router.yaml b/apollo-router/tests/integration/telemetry/fixtures/no-telemetry.router.yaml index dd561d6029..1518708634 100644 --- a/apollo-router/tests/integration/telemetry/fixtures/no-telemetry.router.yaml +++ b/apollo-router/tests/integration/telemetry/fixtures/no-telemetry.router.yaml @@ -7,16 +7,6 @@ telemetry: common: service_name: router sampler: always_on - logging: - experimental_when_header: - - name: apollo-router-log-request - value: test - headers: true # default: false - body: true # default: false - # log request for all requests coming from Iphones - - name: custom-header - match: ^foo.* - headers: true override_subgraph_url: products: http://localhost:4005 include_subgraph_errors: diff --git a/apollo-router/tests/integration/telemetry/fixtures/prometheus.router.yaml b/apollo-router/tests/integration/telemetry/fixtures/prometheus.router.yaml index 1ae02a4dff..486e322c2a 100644 --- a/apollo-router/tests/integration/telemetry/fixtures/prometheus.router.yaml +++ b/apollo-router/tests/integration/telemetry/fixtures/prometheus.router.yaml @@ -1,6 +1,18 @@ limits: http_max_request_bytes: 200 telemetry: + instrumentation: + instruments: + default_requirement_level: required + router: + http.server.request.duration: + attributes: + server.port: false + server.address: false + http.response.status_code: + alias: status + error: + error: reason exporters: metrics: prometheus: @@ -21,14 +33,6 @@ telemetry: - 4 - 5 - 100 - attributes: - subgraph: - all: - request: - header: - - named: "x-custom-header" - rename: "custom_header" - default: "unknown" headers: all: request: diff --git a/apollo-router/tests/integration/telemetry/fixtures/subgraph_auth.router.yaml b/apollo-router/tests/integration/telemetry/fixtures/subgraph_auth.router.yaml index 48c9964bbf..be71589118 100644 --- a/apollo-router/tests/integration/telemetry/fixtures/subgraph_auth.router.yaml +++ b/apollo-router/tests/integration/telemetry/fixtures/subgraph_auth.router.yaml @@ -19,14 +19,6 @@ telemetry: - 4 - 5 - 100 - attributes: - subgraph: - all: - request: - header: - - named: "x-custom-header" - rename: "custom_header" - default: "unknown" headers: all: request: diff --git a/apollo-router/tests/integration/telemetry/fixtures/text.router.yaml b/apollo-router/tests/integration/telemetry/fixtures/text.router.yaml index ef009e55bd..61850d0fd3 100644 --- a/apollo-router/tests/integration/telemetry/fixtures/text.router.yaml +++ b/apollo-router/tests/integration/telemetry/fixtures/text.router.yaml @@ -112,10 +112,6 @@ telemetry: agent: endpoint: default logging: - experimental_when_header: - - name: content-type - value: "application/json" - body: true stdout: format: text: diff --git a/apollo-router/tests/integration/telemetry/fixtures/text.sampler_off.router.yaml b/apollo-router/tests/integration/telemetry/fixtures/text.sampler_off.router.yaml index 57a538efcb..e9c902d37b 100644 --- a/apollo-router/tests/integration/telemetry/fixtures/text.sampler_off.router.yaml +++ b/apollo-router/tests/integration/telemetry/fixtures/text.sampler_off.router.yaml @@ -109,10 +109,6 @@ telemetry: agent: endpoint: default logging: - experimental_when_header: - - name: content-type - value: "application/json" - body: true stdout: format: text: diff --git a/apollo-router/tests/integration/telemetry/fixtures/text.uuid.router.yaml b/apollo-router/tests/integration/telemetry/fixtures/text.uuid.router.yaml index 13b6084b49..28b93f1d10 100644 --- a/apollo-router/tests/integration/telemetry/fixtures/text.uuid.router.yaml +++ b/apollo-router/tests/integration/telemetry/fixtures/text.uuid.router.yaml @@ -20,10 +20,6 @@ telemetry: agent: endpoint: default logging: - experimental_when_header: - - name: content-type - value: "application/json" - body: true stdout: format: text: diff --git a/apollo-router/tests/integration/telemetry/metrics.rs b/apollo-router/tests/integration/telemetry/metrics.rs index 56a5d6223d..1f73d99e31 100644 --- a/apollo-router/tests/integration/telemetry/metrics.rs +++ b/apollo-router/tests/integration/telemetry/metrics.rs @@ -11,6 +11,10 @@ const SUBGRAPH_AUTH_CONFIG: &str = include_str!("fixtures/subgraph_auth.router.y #[tokio::test(flavor = "multi_thread")] async fn test_metrics_reloading() { + if !graph_os_enabled() { + eprintln!("test skipped"); + return; + } let mut router = IntegrationTest::builder() .config(PROMETHEUS_CONFIG) .build() @@ -58,14 +62,8 @@ async fn test_metrics_reloading() { &metrics, r#"apollo_router_cache_miss_count_total{kind="query planner",storage="memory",otel_scope_name="apollo/router"} 2"#, ); - check_metrics_contains( - &metrics, - r#"apollo_router_http_request_duration_seconds_bucket{status="200",otel_scope_name="apollo/router",le="100"}"#, - ); check_metrics_contains(&metrics, r#"apollo_router_cache_hit_time"#); check_metrics_contains(&metrics, r#"apollo_router_cache_miss_time"#); - check_metrics_contains(&metrics, r#"apollo_router_session_count_total"#); - check_metrics_contains(&metrics, r#"custom_header="test_custom""#); router .assert_metrics_does_not_contain(r#"_total_total{"#) @@ -92,6 +90,10 @@ fn check_metrics_contains(metrics: &str, text: &str) { #[tokio::test(flavor = "multi_thread")] async fn test_subgraph_auth_metrics() { + if !graph_os_enabled() { + eprintln!("test skipped"); + return; + } let mut router = IntegrationTest::builder() .config(SUBGRAPH_AUTH_CONFIG) .build() @@ -128,6 +130,10 @@ async fn test_subgraph_auth_metrics() { #[tokio::test(flavor = "multi_thread")] async fn test_metrics_bad_query() { + if !graph_os_enabled() { + eprintln!("test skipped"); + return; + } let mut router = IntegrationTest::builder() .config(SUBGRAPH_AUTH_CONFIG) .build() @@ -144,6 +150,10 @@ async fn test_metrics_bad_query() { #[tokio::test(flavor = "multi_thread")] async fn test_bad_queries() { + if !graph_os_enabled() { + eprintln!("test skipped"); + return; + } let mut router = IntegrationTest::builder() .config(PROMETHEUS_CONFIG) .build() @@ -154,7 +164,7 @@ async fn test_bad_queries() { router.execute_default_query().await; router .assert_metrics_contains( - r#"apollo_router_http_requests_total{status="200",otel_scope_name="apollo/router"}"#, + r#"http_server_request_duration_seconds_count{http_request_method="POST",status="200",otel_scope_name="apollo/router"} 1"#, None, ) .await; @@ -164,7 +174,7 @@ async fn test_bad_queries() { router .assert_metrics_contains( - r#"apollo_router_http_requests_total{error="'content-type' header must be one of: \"application/json\" or \"application/graphql-response+json\"",status="415",otel_scope_name="apollo/router"}"#, + r#"http_server_request_duration_seconds_count{error_type="Unsupported Media Type",http_request_method="POST",status="415",otel_scope_name="apollo/router"} 1"#, None, ) .await; @@ -174,7 +184,7 @@ async fn test_bad_queries() { .await; router .assert_metrics_contains( - r#"apollo_router_http_requests_total{error="Must provide query string",status="400",otel_scope_name="apollo/router"}"#, + r#"http_server_request_duration_seconds_count{error_type="Bad Request",http_request_method="POST",status="400",otel_scope_name="apollo/router"} 1"#, None, ) .await; @@ -184,7 +194,7 @@ async fn test_bad_queries() { .await; router .assert_metrics_contains( - r#"apollo_router_http_requests_total{error="Request body payload too large",status="413",otel_scope_name="apollo/router"} 1"#, + r#"http_server_request_duration_seconds_count{error_type="Payload Too Large",http_request_method="POST",status="413",otel_scope_name="apollo/router"} 1"#, None, ) .await; @@ -251,6 +261,10 @@ async fn test_graphql_metrics() { #[tokio::test(flavor = "multi_thread")] async fn test_gauges_on_reload() { + if !graph_os_enabled() { + eprintln!("test skipped"); + return; + } let mut router = IntegrationTest::builder() .config(include_str!("fixtures/no-telemetry.router.yaml")) .build() diff --git a/apollo-router/tests/integration_tests.rs b/apollo-router/tests/integration_tests.rs index 4346ba2f71..30dd22efe5 100644 --- a/apollo-router/tests/integration_tests.rs +++ b/apollo-router/tests/integration_tests.rs @@ -162,7 +162,7 @@ async fn empty_posts_should_not_work() { HeaderValue::from_static(APPLICATION_JSON.essence_str()), ) .method(Method::POST) - .body(hyper::Body::empty()) + .body(axum::body::Body::empty()) .unwrap(); let (router, registry) = setup_router_and_registry(serde_json::json!({})).await; @@ -401,6 +401,142 @@ async fn automated_persisted_queries() { assert_eq!(registry.totals(), expected_service_hits); } +#[tokio::test(flavor = "multi_thread")] +async fn persisted_queries() { + use hyper::header::HeaderValue; + use serde_json::json; + + /// Construct a persisted query request from an ID. + fn pq_request(persisted_query_id: &str) -> router::Request { + supergraph::Request::fake_builder() + .extension( + "persistedQuery", + json!({ + "version": 1, + "sha256Hash": persisted_query_id + }), + ) + .build() + .expect("expecting valid request") + .try_into() + .expect("could not convert supergraph::Request to router::Request") + } + + // set up a PQM with one query + const PERSISTED_QUERY_ID: &str = "GetMyNameID"; + const PERSISTED_QUERY_BODY: &str = "query GetMyName { me { name } }"; + let expected_data = serde_json_bytes::json!({ + "me": { + "name": "Ada Lovelace" + } + }); + let map = [( + FullPersistedQueryOperationId { + operation_id: PERSISTED_QUERY_ID.to_string(), + client_name: None, + }, + PERSISTED_QUERY_BODY.to_string(), + )] + .into_iter() + .collect(); + let (_mock_guard, uplink_config) = mock_pq_uplink(&map).await; + + let config = serde_json::json!({ + "persisted_queries": { + "enabled": true + }, + "apq": { + "enabled": false + } + }); + + let mut config: Configuration = serde_json::from_value(config).unwrap(); + config.uplink = Some(uplink_config); + let (router, registry) = setup_router_and_registry_with_config(config).await.unwrap(); + + // Successfully run a persisted query. + let actual = query_with_router(router.clone(), pq_request(PERSISTED_QUERY_ID)).await; + assert!(actual.errors.is_empty()); + assert_eq!(actual.data.as_ref(), Some(&expected_data)); + assert_eq!(registry.totals(), hashmap! {"accounts".to_string() => 1}); + + // Error on unpersisted query. + const UNKNOWN_QUERY_ID: &str = "unknown_query"; + const UNPERSISTED_QUERY_BODY: &str = "query GetYourName { you: me { name } }"; + let expected_data = serde_json_bytes::json!({ + "you": { + "name": "Ada Lovelace" + } + }); + let actual = query_with_router(router.clone(), pq_request(UNKNOWN_QUERY_ID)).await; + assert_eq!( + actual.errors, + vec![apollo_router::graphql::Error::builder() + .message(format!( + "Persisted query '{UNKNOWN_QUERY_ID}' not found in the persisted query list" + )) + .extension_code("PERSISTED_QUERY_NOT_IN_LIST") + .build()] + ); + assert_eq!(actual.data, None); + assert_eq!(registry.totals(), hashmap! {"accounts".to_string() => 1}); + + // We didn't break normal GETs. + let actual = query_with_router( + router.clone(), + supergraph::Request::fake_builder() + .query(UNPERSISTED_QUERY_BODY) + .method(Method::GET) + .build() + .unwrap() + .try_into() + .unwrap(), + ) + .await; + assert!(actual.errors.is_empty()); + assert_eq!(actual.data.as_ref(), Some(&expected_data)); + assert_eq!(registry.totals(), hashmap! {"accounts".to_string() => 2}); + + // We didn't break normal POSTs. + let actual = query_with_router( + router.clone(), + supergraph::Request::fake_builder() + .query(UNPERSISTED_QUERY_BODY) + .method(Method::POST) + .build() + .unwrap() + .try_into() + .unwrap(), + ) + .await; + assert!(actual.errors.is_empty()); + assert_eq!(actual.data, Some(expected_data)); + assert_eq!(registry.totals(), hashmap! {"accounts".to_string() => 3}); + + // Proper error when sending malformed request body + let actual = query_with_router( + router.clone(), + http::Request::builder() + .uri("http://default") + .method(Method::POST) + .header( + CONTENT_TYPE, + HeaderValue::from_static(APPLICATION_JSON.essence_str()), + ) + .body(axum::body::Body::empty()) + .unwrap() + .into(), + ) + .await; + assert_eq!(actual.errors.len(), 1); + + assert_eq!(actual.errors[0].message, "Invalid GraphQL request"); + assert_eq!( + actual.errors[0].extensions["code"], + "INVALID_GRAPHQL_REQUEST" + ); +} + #[tokio::test(flavor = "multi_thread")] async fn missing_variables() { let request = supergraph::Request::fake_builder() diff --git a/apollo-router/tests/samples/enterprise/connectors-debugging/README.md b/apollo-router/tests/samples/enterprise/connectors-debugging/README.md new file mode 100644 index 0000000000..76bc6d14bd --- /dev/null +++ b/apollo-router/tests/samples/enterprise/connectors-debugging/README.md @@ -0,0 +1 @@ +Example testing the connectors debugging extensions feature \ No newline at end of file diff --git a/apollo-router/tests/samples/enterprise/connectors-debugging/configuration.yaml b/apollo-router/tests/samples/enterprise/connectors-debugging/configuration.yaml new file mode 100644 index 0000000000..18a47e6c98 --- /dev/null +++ b/apollo-router/tests/samples/enterprise/connectors-debugging/configuration.yaml @@ -0,0 +1,16 @@ +include_subgraph_errors: + all: true + +preview_connectors: + debug_extensions: true + subgraphs: + connectors: + sources: + jsonPlaceholder: + override_url: http://localhost:4007 + +telemetry: + exporters: + logging: + stdout: + format: text \ No newline at end of file diff --git a/apollo-router/tests/samples/enterprise/connectors-debugging/http_snapshots.json b/apollo-router/tests/samples/enterprise/connectors-debugging/http_snapshots.json new file mode 100644 index 0000000000..4b0d82a263 --- /dev/null +++ b/apollo-router/tests/samples/enterprise/connectors-debugging/http_snapshots.json @@ -0,0 +1,65 @@ +[ + { + "request": { + "method": "GET", + "path": "posts/1", + "body": null, + "headers": { + "x-test-from": "client-value" + } + }, + "response": { + "status": 200, + "headers": { + "content-type": ["application/json; charset=utf-8"], + "date": ["Tue, 07 Jan 2025 18:34:52 GMT"] + }, + "body": { + "userId": 1, + "id": 1, + "title": "sunt aut facere repellat provident occaecati excepturi optio reprehenderit", + "body": "quia et suscipit\nsuscipit recusandae consequuntur expedita et cum\nreprehenderit molestiae ut ut quas totam\nnostrum rerum est autem sunt rem eveniet architecto" + } + } + }, + { + "request": { + "method": "GET", + "path": "users/1", + "body": null, + "headers": { + "x-test-from": "client-value" + } + }, + "response": { + "status": 200, + "headers": { + "content-type": ["application/json; charset=utf-8"], + "date": ["Tue, 07 Jan 2025 18:34:52 GMT"] + }, + "body": { + "id": 1, + "name": "Leanne Graham", + "username": "Bret", + "email": "Sincere@april.biz", + "address": { + "street": "Kulas Light", + "suite": "Apt. 556", + "city": "Gwenborough", + "zipcode": "92998-3874", + "geo": { + "lat": "-37.3159", + "lng": "81.1496" + } + }, + "phone": "1-770-736-8031 x56442", + "website": "hildegard.org", + "company": { + "name": "Romaguera-Crona", + "catchPhrase": "Multi-layered client-server neural-net", + "bs": "harness real-time e-markets" + } + } + } + } +] diff --git a/apollo-router/tests/samples/enterprise/connectors-debugging/plan.json b/apollo-router/tests/samples/enterprise/connectors-debugging/plan.json new file mode 100644 index 0000000000..59d793ef4a --- /dev/null +++ b/apollo-router/tests/samples/enterprise/connectors-debugging/plan.json @@ -0,0 +1,135 @@ +{ + "enterprise": true, + "redis": false, + "snapshot": true, + "actions": [ + { + "type": "Start", + "schema_path": "./supergraph.graphql", + "configuration_path": "./configuration.yaml", + "subgraphs": { + "jsonPlaceholder": { + "snapshot": { + "path": "./http_snapshots.json", + "base_url": "https://jsonplaceholder.typicode.com/", + "port": 59714 + } + } + } + }, + { + "type": "Request", + "headers": { + "Apollo-Connectors-Debugging": "true", + "x-test-from": "client-value" + }, + "request": { + "query": "query { post(id: 1) { id author { name } title } }" + }, + "expected_response": { + "data": { + "post": { + "id": 1, + "author": { "name": "Leanne Graham" }, + "title": null + } + }, + "extensions": { + "apolloConnectorsDebugging": { + "version": "1", + "data": [ + { + "request": { + "url": "http://127.0.0.1:59714/posts/1", + "method": "GET", + "headers": [["x-from", "client-value"]], + "body": null + }, + "response": { + "status": 200, + "headers": [ + ["content-type", "application/json; charset=utf-8"], + ["date", "Tue, 07 Jan 2025 18:34:52 GMT"], + ["content-length", "275"] + ], + "body": { + "kind": "json", + "content": { + "userId": 1, + "id": 1, + "title": "sunt aut facere repellat provident occaecati excepturi optio reprehenderit", + "body": "quia et suscipit\nsuscipit recusandae consequuntur expedita et cum\nreprehenderit molestiae ut ut quas totam\nnostrum rerum est autem sunt rem eveniet architecto" + }, + "selection": { + "source": "id\ntitle: postTitle\nbody\nauthor: {\n id: userId\n}", + "transformed": "id\ntitle: postTitle\nauthor: {\n __typename: $->echo(\"User\")\n id: userId\n}", + "result": { + "id": 1, + "author": { "__typename": "User", "id": 1 } + }, + "errors": [ + { + "message": "Property .postTitle not found in object", + "path": "postTitle", + "count": 1 + } + ] + } + } + } + }, + { + "request": { + "url": "http://127.0.0.1:59714/users/1", + "method": "GET", + "headers": [["x-from", "client-value"]], + "body": null + }, + "response": { + "status": 200, + "headers": [ + ["content-type", "application/json; charset=utf-8"], + ["date", "Tue, 07 Jan 2025 18:34:52 GMT"], + ["content-length", "401"] + ], + "body": { + "kind": "json", + "content": { + "id": 1, + "name": "Leanne Graham", + "username": "Bret", + "email": "Sincere@april.biz", + "address": { + "street": "Kulas Light", + "suite": "Apt. 556", + "city": "Gwenborough", + "zipcode": "92998-3874", + "geo": { "lat": "-37.3159", "lng": "81.1496" } + }, + "phone": "1-770-736-8031 x56442", + "website": "hildegard.org", + "company": { + "name": "Romaguera-Crona", + "catchPhrase": "Multi-layered client-server neural-net", + "bs": "harness real-time e-markets" + } + }, + "selection": { + "source": "id\nname\nusername", + "transformed": "name", + "result": { "name": "Leanne Graham" }, + "errors": [] + } + } + } + } + ] + } + } + } + }, + { + "type": "Stop" + } + ] +} diff --git a/apollo-router/tests/samples/enterprise/connectors-debugging/supergraph.graphql b/apollo-router/tests/samples/enterprise/connectors-debugging/supergraph.graphql new file mode 100644 index 0000000000..6151a9abce --- /dev/null +++ b/apollo-router/tests/samples/enterprise/connectors-debugging/supergraph.graphql @@ -0,0 +1,82 @@ +schema + @link(url: "https://specs.apollo.dev/link/v1.0") + @link(url: "https://specs.apollo.dev/join/v0.5", for: EXECUTION) + @link(url: "https://specs.apollo.dev/connect/v0.1", for: EXECUTION) + @join__directive(graphs: [CONNECTORS], name: "link", args: {url: "https://specs.apollo.dev/connect/v0.1", import: ["@connect", "@source"]}) + @join__directive(graphs: [CONNECTORS], name: "source", args: {name: "jsonPlaceholder", http: {baseURL: "https://jsonplaceholder.typicode.com/", headers: [{name: "x-from", from: "x-test-from"}]}}) +{ + query: Query +} + +directive @join__directive(graphs: [join__Graph!], name: String!, args: join__DirectiveArguments) repeatable on SCHEMA | OBJECT | INTERFACE | FIELD_DEFINITION + +directive @join__enumValue(graph: join__Graph!) repeatable on ENUM_VALUE + +directive @join__field(graph: join__Graph, requires: join__FieldSet, provides: join__FieldSet, type: String, external: Boolean, override: String, usedOverridden: Boolean, overrideLabel: String, contextArguments: [join__ContextArgument!]) repeatable on FIELD_DEFINITION | INPUT_FIELD_DEFINITION + +directive @join__graph(name: String!, url: String!) on ENUM_VALUE + +directive @join__implements(graph: join__Graph!, interface: String!) repeatable on OBJECT | INTERFACE + +directive @join__type(graph: join__Graph!, key: join__FieldSet, extension: Boolean! = false, resolvable: Boolean! = true, isInterfaceObject: Boolean! = false) repeatable on OBJECT | INTERFACE | UNION | ENUM | INPUT_OBJECT | SCALAR + +directive @join__unionMember(graph: join__Graph!, member: String!) repeatable on UNION + +directive @link(url: String, as: String, for: link__Purpose, import: [link__Import]) repeatable on SCHEMA + +input join__ContextArgument { + name: String! + type: String! + context: String! + selection: join__FieldValue! +} + +scalar join__DirectiveArguments + +scalar join__FieldSet + +scalar join__FieldValue + +enum join__Graph { + CONNECTORS @join__graph(name: "connectors", url: "http://none") +} + +scalar link__Import + +enum link__Purpose { + """ + `SECURITY` features provide metadata necessary to securely resolve fields. + """ + SECURITY + + """ + `EXECUTION` features provide metadata necessary for operation execution. + """ + EXECUTION +} + +type Post + @join__type(graph: CONNECTORS) +{ + id: ID! + body: String + title: String + author: User +} + +type Query + @join__type(graph: CONNECTORS) +{ + posts: [Post] @join__directive(graphs: [CONNECTORS], name: "connect", args: {source: "jsonPlaceholder", http: {GET: "/posts"}, selection: "id title body author: { id: userId }"}) + post(id: ID!): Post @join__directive(graphs: [CONNECTORS], name: "connect", args: {source: "jsonPlaceholder", http: {GET: "/posts/{$args.id}"}, selection: "id title: postTitle body author: { id: userId }", entity: true}) + user(id: ID!): User @join__directive(graphs: [CONNECTORS], name: "connect", args: {source: "jsonPlaceholder", http: {GET: "/users/{$args.id}"}, selection: "id name username", entity: true}) +} + +type User + @join__type(graph: CONNECTORS) +{ + id: ID! + name: String + username: String + posts: [Post] @join__directive(graphs: [CONNECTORS], name: "connect", args: {source: "jsonPlaceholder", http: {GET: "/users/{$this.id}/posts"}, selection: "id title body"}) +} diff --git a/apollo-router/tests/samples/enterprise/connectors-debugging/supergraph.yaml b/apollo-router/tests/samples/enterprise/connectors-debugging/supergraph.yaml new file mode 100644 index 0000000000..705a6fa8a0 --- /dev/null +++ b/apollo-router/tests/samples/enterprise/connectors-debugging/supergraph.yaml @@ -0,0 +1,37 @@ +# rover supergraph compose --config apollo-router/tests/samples/enterprise/connectors-debugging/supergraph.yaml > apollo-router/tests/samples/enterprise/connectors-debugging/supergraph.graphql +federation_version: =2.10.0-preview.3 +subgraphs: + connectors: # required for snapshot overrides + routing_url: http://none + schema: + sdl: | + extend schema + @link(url: "https://specs.apollo.dev/federation/v2.10", import: ["@key"]) + @link(url: "https://specs.apollo.dev/connect/v0.1", import: ["@connect", "@source"]) + @source( + name: "jsonPlaceholder" + http: { + baseURL: "https://jsonplaceholder.typicode.com/" + headers: [{ name: "x-from", from: "x-test-from" }] + } + ) + + type Post { + id: ID! + body: String + title: String + author: User + } + + type Query { + posts: [Post] @connect(source: "jsonPlaceholder", http: {GET: "/posts"}, selection: "id title body author: { id: userId }") + post(id: ID!): Post @connect(source: "jsonPlaceholder", http: {GET: "/posts/{$$args.id}"}, selection: "id title: postTitle body author: { id: userId }", entity: true) + user(id: ID!): User @connect(source: "jsonPlaceholder", http: {GET: "/users/{$$args.id}"}, selection: "id name username", entity: true) + } + + type User { + id: ID! + name: String + username: String + posts: [Post] @connect(source: "jsonPlaceholder", http: {GET: "/users/{$$this.id}/posts"}, selection: "id title body") + } \ No newline at end of file diff --git a/apollo-router/tests/samples/enterprise/connectors-defer/README.md b/apollo-router/tests/samples/enterprise/connectors-defer/README.md new file mode 100644 index 0000000000..9326529386 --- /dev/null +++ b/apollo-router/tests/samples/enterprise/connectors-defer/README.md @@ -0,0 +1 @@ +This tests using defer and connectors. It uses a mutation because there was an expansion bug with mutation root type definitions that appeared only when using defer. \ No newline at end of file diff --git a/apollo-router/tests/samples/enterprise/connectors-defer/configuration.yaml b/apollo-router/tests/samples/enterprise/connectors-defer/configuration.yaml new file mode 100644 index 0000000000..9b50a0b3f1 --- /dev/null +++ b/apollo-router/tests/samples/enterprise/connectors-defer/configuration.yaml @@ -0,0 +1,15 @@ +include_subgraph_errors: + all: true + +preview_connectors: + subgraphs: + connectors: + sources: + jsonPlaceholder: + override_url: http://localhost:4007 + +telemetry: + exporters: + logging: + stdout: + format: text \ No newline at end of file diff --git a/apollo-router/tests/samples/enterprise/connectors-defer/http_snapshots.json b/apollo-router/tests/samples/enterprise/connectors-defer/http_snapshots.json new file mode 100644 index 0000000000..673dbb3096 --- /dev/null +++ b/apollo-router/tests/samples/enterprise/connectors-defer/http_snapshots.json @@ -0,0 +1,38 @@ +[ + { + "request": { + "method": "POST", + "path": "/", + "body": null + }, + "response": { + "status": 200, + "headers": { + "content-type": ["application/json; charset=utf-8"] + }, + "body": { + "f": "1", + "entity": { + "id": "2" + } + } + } + }, + { + "request": { + "method": "GET", + "path": "e/2", + "body": null + }, + "response": { + "status": 200, + "headers": { + "content-type": ["application/json; charset=utf-8"] + }, + "body": { + "id": "2", + "f": "3" + } + } + } +] diff --git a/apollo-router/tests/samples/enterprise/connectors-defer/plan.json b/apollo-router/tests/samples/enterprise/connectors-defer/plan.json new file mode 100644 index 0000000000..a4644fa825 --- /dev/null +++ b/apollo-router/tests/samples/enterprise/connectors-defer/plan.json @@ -0,0 +1,41 @@ +{ + "enterprise": true, + "redis": false, + "snapshot": true, + "actions": [ + { + "type": "Start", + "schema_path": "./supergraph.graphql", + "configuration_path": "./configuration.yaml", + "subgraphs": { + "test": { + "snapshot": { + "path": "./http_snapshots.json", + "base_url": "http://localhost:4007/" + } + } + } + }, + { + "type": "Request", + "headers": { + "Accept": "multipart/mixed;deferSpec=20220824" + }, + "request": { + "query": "mutation { m { f ... @defer { entity { id f } } } }" + }, + "expected_response": [ + { "data": { "m": { "f": "1" } }, "hasNext": true }, + { + "hasNext": false, + "incremental": [ + { "data": { "entity": { "id": "2", "f": "3" } }, "path": ["m"] } + ] + } + ] + }, + { + "type": "Stop" + } + ] +} diff --git a/apollo-router/tests/samples/enterprise/connectors-defer/supergraph.graphql b/apollo-router/tests/samples/enterprise/connectors-defer/supergraph.graphql new file mode 100644 index 0000000000..f341d69bbe --- /dev/null +++ b/apollo-router/tests/samples/enterprise/connectors-defer/supergraph.graphql @@ -0,0 +1,83 @@ +schema + @link(url: "https://specs.apollo.dev/link/v1.0") + @link(url: "https://specs.apollo.dev/join/v0.5", for: EXECUTION) + @link(url: "https://specs.apollo.dev/connect/v0.1", for: EXECUTION) + @join__directive(graphs: [CONNECTORS], name: "link", args: {url: "https://specs.apollo.dev/connect/v0.1", import: ["@connect", "@source"]}) + @join__directive(graphs: [CONNECTORS], name: "source", args: {name: "test", http: {baseURL: "http://localhost:4007/"}}) +{ + query: Query + mutation: Mutation +} + +directive @join__directive(graphs: [join__Graph!], name: String!, args: join__DirectiveArguments) repeatable on SCHEMA | OBJECT | INTERFACE | FIELD_DEFINITION + +directive @join__enumValue(graph: join__Graph!) repeatable on ENUM_VALUE + +directive @join__field(graph: join__Graph, requires: join__FieldSet, provides: join__FieldSet, type: String, external: Boolean, override: String, usedOverridden: Boolean, overrideLabel: String, contextArguments: [join__ContextArgument!]) repeatable on FIELD_DEFINITION | INPUT_FIELD_DEFINITION + +directive @join__graph(name: String!, url: String!) on ENUM_VALUE + +directive @join__implements(graph: join__Graph!, interface: String!) repeatable on OBJECT | INTERFACE + +directive @join__type(graph: join__Graph!, key: join__FieldSet, extension: Boolean! = false, resolvable: Boolean! = true, isInterfaceObject: Boolean! = false) repeatable on OBJECT | INTERFACE | UNION | ENUM | INPUT_OBJECT | SCALAR + +directive @join__unionMember(graph: join__Graph!, member: String!) repeatable on UNION + +directive @link(url: String, as: String, for: link__Purpose, import: [link__Import]) repeatable on SCHEMA + +type E + @join__type(graph: CONNECTORS, key: "id") +{ + id: ID! + f: ID +} + +input join__ContextArgument { + name: String! + type: String! + context: String! + selection: join__FieldValue! +} + +scalar join__DirectiveArguments + +scalar join__FieldSet + +scalar join__FieldValue + +enum join__Graph { + CONNECTORS @join__graph(name: "connectors", url: "http://none") +} + +scalar link__Import + +enum link__Purpose { + """ + `SECURITY` features provide metadata necessary to securely resolve fields. + """ + SECURITY + + """ + `EXECUTION` features provide metadata necessary for operation execution. + """ + EXECUTION +} + +type M + @join__type(graph: CONNECTORS) +{ + f: ID + entity: E +} + +type Mutation + @join__type(graph: CONNECTORS) +{ + m: M @join__directive(graphs: [CONNECTORS], name: "connect", args: {source: "test", http: {POST: "/"}, selection: "f entity { id }"}) +} + +type Query + @join__type(graph: CONNECTORS) +{ + e(id: ID!): E @join__directive(graphs: [CONNECTORS], name: "connect", args: {source: "test", http: {GET: "/e/{$args.id}"}, selection: "f", entity: true}) +} diff --git a/apollo-router/tests/samples/enterprise/connectors-defer/supergraph.yaml b/apollo-router/tests/samples/enterprise/connectors-defer/supergraph.yaml new file mode 100644 index 0000000000..24fc65a323 --- /dev/null +++ b/apollo-router/tests/samples/enterprise/connectors-defer/supergraph.yaml @@ -0,0 +1,29 @@ +# rover supergraph compose --config apollo-router/tests/samples/enterprise/connectors-defer/supergraph.yaml > apollo-router/tests/samples/enterprise/connectors-defer/supergraph.graphql +federation_version: =2.10.0-preview.3 +subgraphs: + connectors: # required for snapshot overrides + routing_url: http://none + schema: + sdl: | + extend schema + @link(url: "https://specs.apollo.dev/federation/v2.10", import: ["@key"]) + @link(url: "https://specs.apollo.dev/connect/v0.1", import: ["@connect", "@source"]) + @source(name: "test", http: {baseURL: "http://localhost:4007/"}) + + type Query { + e(id: ID!): E @connect(source: "test", http: { GET: "/e/{$$args.id}" }, selection: "f", entity: true) + } + + type Mutation { + m: M @connect(source: "test", http: { POST: "/" }, selection: "f entity { id }") + } + + type M { + f: ID + entity: E + } + + type E @key(fields: "id") { + id: ID! + f: ID + } \ No newline at end of file diff --git a/apollo-router/tests/samples/enterprise/connectors/README.md b/apollo-router/tests/samples/enterprise/connectors/README.md new file mode 100644 index 0000000000..16f7564531 --- /dev/null +++ b/apollo-router/tests/samples/enterprise/connectors/README.md @@ -0,0 +1 @@ +Example testing the connectors feature \ No newline at end of file diff --git a/apollo-router/tests/samples/enterprise/connectors/configuration.yaml b/apollo-router/tests/samples/enterprise/connectors/configuration.yaml new file mode 100644 index 0000000000..b6c01d5a9b --- /dev/null +++ b/apollo-router/tests/samples/enterprise/connectors/configuration.yaml @@ -0,0 +1,17 @@ +override_subgraph_url: + connectors: http://localhost:4005 +include_subgraph_errors: + all: true + +preview_connectors: + subgraphs: + connectors: + sources: + jsonPlaceholder: + override_url: http://localhost:4007 + +telemetry: + exporters: + logging: + stdout: + format: text \ No newline at end of file diff --git a/apollo-router/tests/samples/enterprise/connectors/http_snapshots.json b/apollo-router/tests/samples/enterprise/connectors/http_snapshots.json new file mode 100644 index 0000000000..2373dbdc5b --- /dev/null +++ b/apollo-router/tests/samples/enterprise/connectors/http_snapshots.json @@ -0,0 +1,61 @@ +[ + { + "request": { + "method": "GET", + "path": "posts/1", + "body": null + }, + "response": { + "status": 200, + "headers": { + "content-type": [ + "application/json; charset=utf-8" + ] + }, + "body": { + "userId": 1, + "id": 1, + "title": "sunt aut facere repellat provident occaecati excepturi optio reprehenderit", + "body": "quia et suscipit\nsuscipit recusandae consequuntur expedita et cum\nreprehenderit molestiae ut ut quas totam\nnostrum rerum est autem sunt rem eveniet architecto" + } + } + }, + { + "request": { + "method": "GET", + "path": "users/1", + "body": null + }, + "response": { + "status": 200, + "headers": { + "content-type": [ + "application/json; charset=utf-8" + ] + }, + "body": { + "id": 1, + "name": "Leanne Graham", + "username": "Bret", + "email": "Sincere@april.biz", + "address": { + "street": "Kulas Light", + "suite": "Apt. 556", + "city": "Gwenborough", + "zipcode": "92998-3874", + "geo": { + "lat": "-37.3159", + "lng": "81.1496" + } + }, + "phone": "1-770-736-8031 x56442", + "website": "hildegard.org", + "company": { + "name": "Romaguera-Crona", + "catchPhrase": "Multi-layered client-server neural-net", + "bs": "harness real-time e-markets" + } + } + } + } +] \ No newline at end of file diff --git a/apollo-router/tests/samples/enterprise/connectors/plan.json b/apollo-router/tests/samples/enterprise/connectors/plan.json new file mode 100644 index 0000000000..9bd3eda34b --- /dev/null +++ b/apollo-router/tests/samples/enterprise/connectors/plan.json @@ -0,0 +1,40 @@ +{ + "enterprise": true, + "redis": false, + "snapshot": true, + "actions": [ + { + "type": "Start", + "schema_path": "./supergraph.graphql", + "configuration_path": "./configuration.yaml", + "subgraphs": { + "jsonPlaceholder": { + "snapshot": { + "path": "./http_snapshots.json", + "base_url": "https://jsonplaceholder.typicode.com/" + } + } + } + }, + { + "type": "Request", + "request": { + "query": "query { post(id: 1) { id author { name } title } }" + }, + "expected_response": { + "data": { + "post": { + "id": 1, + "author": { + "name": "Leanne Graham" + }, + "title": "sunt aut facere repellat provident occaecati excepturi optio reprehenderit" + } + } + } + }, + { + "type": "Stop" + } + ] +} diff --git a/apollo-router/tests/samples/enterprise/connectors/supergraph.graphql b/apollo-router/tests/samples/enterprise/connectors/supergraph.graphql new file mode 100644 index 0000000000..ae088619ce --- /dev/null +++ b/apollo-router/tests/samples/enterprise/connectors/supergraph.graphql @@ -0,0 +1,81 @@ +schema + @link(url: "https://specs.apollo.dev/link/v1.0") + @link(url: "https://specs.apollo.dev/join/v0.5", for: EXECUTION) + @join__directive(graphs: [CONNECTORS], name: "link", args: {url: "https://specs.apollo.dev/connect/v0.1", import: ["@connect", "@source"]}) + @join__directive(graphs: [CONNECTORS], name: "source", args: {name: "jsonPlaceholder", http: {baseURL: "https://jsonplaceholder.typicode.com/"}}) +{ + query: Query +} + +directive @join__directive(graphs: [join__Graph!], name: String!, args: join__DirectiveArguments) repeatable on SCHEMA | OBJECT | INTERFACE | FIELD_DEFINITION + +directive @join__enumValue(graph: join__Graph!) repeatable on ENUM_VALUE + +directive @join__field(graph: join__Graph, requires: join__FieldSet, provides: join__FieldSet, type: String, external: Boolean, override: String, usedOverridden: Boolean, overrideLabel: String, contextArguments: [join__ContextArgument!]) repeatable on FIELD_DEFINITION | INPUT_FIELD_DEFINITION + +directive @join__graph(name: String!, url: String!) on ENUM_VALUE + +directive @join__implements(graph: join__Graph!, interface: String!) repeatable on OBJECT | INTERFACE + +directive @join__type(graph: join__Graph!, key: join__FieldSet, extension: Boolean! = false, resolvable: Boolean! = true, isInterfaceObject: Boolean! = false) repeatable on OBJECT | INTERFACE | UNION | ENUM | INPUT_OBJECT | SCALAR + +directive @join__unionMember(graph: join__Graph!, member: String!) repeatable on UNION + +directive @link(url: String, as: String, for: link__Purpose, import: [link__Import]) repeatable on SCHEMA + +input join__ContextArgument { + name: String! + type: String! + context: String! + selection: join__FieldValue! +} + +scalar join__DirectiveArguments + +scalar join__FieldSet + +scalar join__FieldValue + +enum join__Graph { + CONNECTORS @join__graph(name: "connectors", url: "none") +} + +scalar link__Import + +enum link__Purpose { + """ + `SECURITY` features provide metadata necessary to securely resolve fields. + """ + SECURITY + + """ + `EXECUTION` features provide metadata necessary for operation execution. + """ + EXECUTION +} + +type Post + @join__type(graph: CONNECTORS) +{ + id: ID! + body: String + title: String + author: User +} + +type Query + @join__type(graph: CONNECTORS) +{ + posts: [Post] @join__directive(graphs: [CONNECTORS], name: "connect", args: {source: "jsonPlaceholder", http: {GET: "/posts"}, selection: "id\ntitle\nbody\nauthor: { id: userId }"}) + post(id: ID!): Post @join__directive(graphs: [CONNECTORS], name: "connect", args: {source: "jsonPlaceholder", http: {GET: "/posts/{$args.id}"}, selection: "id\ntitle\nbody\nauthor: { id: userId }", entity: true}) + user(id: ID!): User @join__directive(graphs: [CONNECTORS], name: "connect", args: {source: "jsonPlaceholder", http: {GET: "/users/{$args.id}"}, selection: "id\nname\nusername", entity: true}) +} + +type User + @join__type(graph: CONNECTORS) +{ + id: ID! + name: String + username: String + posts: [Post] @join__directive(graphs: [CONNECTORS], name: "connect", args: {source: "jsonPlaceholder", http: {GET: "/users/{$this.id}/posts"}, selection: "id\ntitle\nbody"}) +} \ No newline at end of file diff --git a/apollo-router/tests/samples_tests.rs b/apollo-router/tests/samples_tests.rs index 5beba9d4b5..875daf61fd 100644 --- a/apollo-router/tests/samples_tests.rs +++ b/apollo-router/tests/samples_tests.rs @@ -125,6 +125,11 @@ fn lookup_dir( continue; } + #[cfg(not(feature = "snapshot"))] + if plan.snapshot { + continue; + } + tests.push(Trial::test(name, move || test(&path, plan))); } else { lookup_dir(&path, &name, tests)?; @@ -199,7 +204,7 @@ impl TestExecution { subgraphs, update_url_overrides, } => { - self.reload_subgraphs(subgraphs, *update_url_overrides, out) + self.reload_subgraphs(subgraphs, *update_url_overrides, path, out) .await } Action::Request { @@ -240,8 +245,10 @@ impl TestExecution { self.subgraphs = subgraphs.clone(); let (mut subgraphs_server, url) = self.start_subgraphs(out).await; - let subgraph_overrides = self.load_subgraph_mocks(&mut subgraphs_server, &url).await; - writeln!(out, "got subgraph mocks: {subgraph_overrides:?}").unwrap(); + let subgraph_overrides = self + .load_subgraph_mocks(&mut subgraphs_server, &url, path, out) + .await; + writeln!(out, "got subgraph mocks: {subgraph_overrides:?}")?; let config = open_file(&path.join(configuration_path), out)?; let schema_path = path.join(schema_path); @@ -293,7 +300,7 @@ impl TestExecution { let subgraph_url = Self::subgraph_url(&subgraphs_server); let subgraph_overrides = self - .load_subgraph_mocks(&mut subgraphs_server, &subgraph_url) + .load_subgraph_mocks(&mut subgraphs_server, &subgraph_url, path, out) .await; let config = open_file(&path.join(configuration_path), out)?; @@ -342,33 +349,74 @@ impl TestExecution { &mut self, subgraphs_server: &mut MockServer, url: &str, + #[cfg_attr(not(feature = "snapshot"), allow(unused_variables))] path: &Path, + #[cfg_attr(not(feature = "snapshot"), allow(unused_variables, clippy::ptr_arg))] + out: &mut String, ) -> HashMap { let mut subgraph_overrides = HashMap::new(); + #[cfg_attr(not(feature = "snapshot"), allow(unused_variables))] for (name, subgraph) in &self.subgraphs { - for SubgraphRequestMock { request, response } in &subgraph.requests { - let mut builder = Mock::given(body_partial_json(&request.body)); - - if let Some(s) = request.method.as_deref() { - builder = builder.and(method(s)); + if let Some(snapshot) = subgraph.snapshot.as_ref() { + #[cfg(feature = "snapshot")] + { + use std::str::FromStr; + + use http::header::CONTENT_LENGTH; + use http::header::CONTENT_TYPE; + use http::Uri; + + let snapshot_server = apollo_router::SnapshotServer::spawn( + &path.join(&snapshot.path), + Uri::from_str(&snapshot.base_url).unwrap(), + true, + snapshot.update.unwrap_or(false), + Some(vec![CONTENT_TYPE.to_string(), CONTENT_LENGTH.to_string()]), + snapshot.port, + ) + .await; + let snapshot_url = snapshot_server.uri(); + writeln!( + out, + "snapshot server for {name} listening on {snapshot_url}" + ) + .unwrap(); + subgraph_overrides + .entry(name.to_string()) + .or_insert(snapshot_url.clone()); } + #[cfg(not(feature = "snapshot"))] + panic!("Tests using the snapshot feature must have `snapshot` set to `true`") + } else { + for SubgraphRequestMock { request, response } in + subgraph.requests.as_ref().unwrap_or(&vec![]) + { + let mut builder = match &request.body { + Some(body) => Mock::given(body_partial_json(body)), + None => Mock::given(wiremock::matchers::AnyMatcher), + }; - if let Some(s) = request.path.as_deref() { - builder = builder.and(wiremock::matchers::path(s)); - } + if let Some(s) = request.method.as_deref() { + builder = builder.and(method(s)); + } - for (header_name, header_value) in &request.headers { - builder = builder.and(header(header_name.as_str(), header_value.as_str())); - } + if let Some(s) = request.path.as_deref() { + builder = builder.and(wiremock::matchers::path(s)); + } - let mut res = ResponseTemplate::new(response.status.unwrap_or(200)); - for (header_name, header_value) in &response.headers { - res = res.append_header(header_name.as_str(), header_value.as_str()); + for (header_name, header_value) in &request.headers { + builder = builder.and(header(header_name.as_str(), header_value.as_str())); + } + + let mut res = ResponseTemplate::new(response.status.unwrap_or(200)); + for (header_name, header_value) in &response.headers { + res = res.append_header(header_name.as_str(), header_value.as_str()); + } + builder + .respond_with(res.set_body_json(&response.body)) + .mount(subgraphs_server) + .await; } - builder - .respond_with(res.set_body_json(&response.body)) - .mount(subgraphs_server) - .await; } // Add a default override for products, if not specified @@ -384,6 +432,7 @@ impl TestExecution { &mut self, subgraphs: &HashMap, update_url_overrides: bool, + path: &Path, out: &mut String, ) -> Result<(), Failed> { writeln!(out, "reloading subgraphs with: {subgraphs:?}").unwrap(); @@ -398,7 +447,7 @@ impl TestExecution { let subgraph_url = Self::subgraph_url(&subgraphs_server); let subgraph_overrides = self - .load_subgraph_mocks(&mut subgraphs_server, &subgraph_url) + .load_subgraph_mocks(&mut subgraphs_server, &subgraph_url, path, out) .await; self.subgraphs_server = Some(subgraphs_server); @@ -715,6 +764,8 @@ struct Plan { enterprise: bool, #[serde(default)] redis: bool, + #[serde(default)] + snapshot: bool, actions: Vec, } @@ -755,10 +806,20 @@ enum Action { Stop, } +#[derive(Clone, Debug, Deserialize)] +#[cfg_attr(not(feature = "snapshot"), allow(dead_code))] +struct Snapshot { + path: String, + base_url: String, + update: Option, + port: Option, +} + #[derive(Clone, Debug, Deserialize)] #[serde(deny_unknown_fields)] struct Subgraph { - requests: Vec, + snapshot: Option, + requests: Option>, } #[derive(Clone, Debug, Deserialize)] @@ -775,7 +836,7 @@ struct HttpRequest { path: Option, #[serde(default)] headers: HashMap, - body: Value, + body: Option, } #[derive(Clone, Debug, Deserialize)] diff --git a/apollo-router/tests/snapshots/apollo_reports__batch_send_header-2.snap b/apollo-router/tests/snapshots/apollo_reports__batch_send_header-2.snap index c0b4f2c27d..52a4df2b88 100644 --- a/apollo-router/tests/snapshots/apollo_reports__batch_send_header-2.snap +++ b/apollo-router/tests/snapshots/apollo_reports__batch_send_header-2.snap @@ -1221,4 +1221,4 @@ end_time: "[end_time]" operation_count: 0 operation_count_by_type: [] traces_pre_aggregated: true -extended_references_enabled: false +extended_references_enabled: true diff --git a/apollo-router/tests/snapshots/apollo_reports__batch_send_header.snap b/apollo-router/tests/snapshots/apollo_reports__batch_send_header.snap index c0b4f2c27d..52a4df2b88 100644 --- a/apollo-router/tests/snapshots/apollo_reports__batch_send_header.snap +++ b/apollo-router/tests/snapshots/apollo_reports__batch_send_header.snap @@ -1221,4 +1221,4 @@ end_time: "[end_time]" operation_count: 0 operation_count_by_type: [] traces_pre_aggregated: true -extended_references_enabled: false +extended_references_enabled: true diff --git a/apollo-router/tests/snapshots/apollo_reports__batch_trace_id-2.snap b/apollo-router/tests/snapshots/apollo_reports__batch_trace_id-2.snap index 93d9f40183..e9696d86ca 100644 --- a/apollo-router/tests/snapshots/apollo_reports__batch_trace_id-2.snap +++ b/apollo-router/tests/snapshots/apollo_reports__batch_trace_id-2.snap @@ -1215,4 +1215,4 @@ end_time: "[end_time]" operation_count: 0 operation_count_by_type: [] traces_pre_aggregated: true -extended_references_enabled: false +extended_references_enabled: true diff --git a/apollo-router/tests/snapshots/apollo_reports__batch_trace_id.snap b/apollo-router/tests/snapshots/apollo_reports__batch_trace_id.snap index 93d9f40183..e9696d86ca 100644 --- a/apollo-router/tests/snapshots/apollo_reports__batch_trace_id.snap +++ b/apollo-router/tests/snapshots/apollo_reports__batch_trace_id.snap @@ -1215,4 +1215,4 @@ end_time: "[end_time]" operation_count: 0 operation_count_by_type: [] traces_pre_aggregated: true -extended_references_enabled: false +extended_references_enabled: true diff --git a/apollo-router/tests/snapshots/apollo_reports__client_name-2.snap b/apollo-router/tests/snapshots/apollo_reports__client_name-2.snap index 2e10210b7e..93e4e10d59 100644 --- a/apollo-router/tests/snapshots/apollo_reports__client_name-2.snap +++ b/apollo-router/tests/snapshots/apollo_reports__client_name-2.snap @@ -615,4 +615,4 @@ end_time: "[end_time]" operation_count: 0 operation_count_by_type: [] traces_pre_aggregated: true -extended_references_enabled: false +extended_references_enabled: true diff --git a/apollo-router/tests/snapshots/apollo_reports__client_name.snap b/apollo-router/tests/snapshots/apollo_reports__client_name.snap index 2e10210b7e..93e4e10d59 100644 --- a/apollo-router/tests/snapshots/apollo_reports__client_name.snap +++ b/apollo-router/tests/snapshots/apollo_reports__client_name.snap @@ -615,4 +615,4 @@ end_time: "[end_time]" operation_count: 0 operation_count_by_type: [] traces_pre_aggregated: true -extended_references_enabled: false +extended_references_enabled: true diff --git a/apollo-router/tests/snapshots/apollo_reports__client_version-2.snap b/apollo-router/tests/snapshots/apollo_reports__client_version-2.snap index c87e5b9814..8b6af45d4a 100644 --- a/apollo-router/tests/snapshots/apollo_reports__client_version-2.snap +++ b/apollo-router/tests/snapshots/apollo_reports__client_version-2.snap @@ -615,4 +615,4 @@ end_time: "[end_time]" operation_count: 0 operation_count_by_type: [] traces_pre_aggregated: true -extended_references_enabled: false +extended_references_enabled: true diff --git a/apollo-router/tests/snapshots/apollo_reports__client_version.snap b/apollo-router/tests/snapshots/apollo_reports__client_version.snap index c87e5b9814..8b6af45d4a 100644 --- a/apollo-router/tests/snapshots/apollo_reports__client_version.snap +++ b/apollo-router/tests/snapshots/apollo_reports__client_version.snap @@ -615,4 +615,4 @@ end_time: "[end_time]" operation_count: 0 operation_count_by_type: [] traces_pre_aggregated: true -extended_references_enabled: false +extended_references_enabled: true diff --git a/apollo-router/tests/snapshots/apollo_reports__condition_else-2.snap b/apollo-router/tests/snapshots/apollo_reports__condition_else-2.snap index 8fd3f3243e..22de9282b5 100644 --- a/apollo-router/tests/snapshots/apollo_reports__condition_else-2.snap +++ b/apollo-router/tests/snapshots/apollo_reports__condition_else-2.snap @@ -621,4 +621,4 @@ end_time: "[end_time]" operation_count: 0 operation_count_by_type: [] traces_pre_aggregated: true -extended_references_enabled: false +extended_references_enabled: true diff --git a/apollo-router/tests/snapshots/apollo_reports__condition_else.snap b/apollo-router/tests/snapshots/apollo_reports__condition_else.snap index 8fd3f3243e..22de9282b5 100644 --- a/apollo-router/tests/snapshots/apollo_reports__condition_else.snap +++ b/apollo-router/tests/snapshots/apollo_reports__condition_else.snap @@ -621,4 +621,4 @@ end_time: "[end_time]" operation_count: 0 operation_count_by_type: [] traces_pre_aggregated: true -extended_references_enabled: false +extended_references_enabled: true diff --git a/apollo-router/tests/snapshots/apollo_reports__condition_if-2.snap b/apollo-router/tests/snapshots/apollo_reports__condition_if-2.snap index 282dbbb14d..cbf2c723c4 100644 --- a/apollo-router/tests/snapshots/apollo_reports__condition_if-2.snap +++ b/apollo-router/tests/snapshots/apollo_reports__condition_if-2.snap @@ -634,4 +634,4 @@ end_time: "[end_time]" operation_count: 0 operation_count_by_type: [] traces_pre_aggregated: true -extended_references_enabled: false +extended_references_enabled: true diff --git a/apollo-router/tests/snapshots/apollo_reports__condition_if.snap b/apollo-router/tests/snapshots/apollo_reports__condition_if.snap index 282dbbb14d..cbf2c723c4 100644 --- a/apollo-router/tests/snapshots/apollo_reports__condition_if.snap +++ b/apollo-router/tests/snapshots/apollo_reports__condition_if.snap @@ -634,4 +634,4 @@ end_time: "[end_time]" operation_count: 0 operation_count_by_type: [] traces_pre_aggregated: true -extended_references_enabled: false +extended_references_enabled: true diff --git a/apollo-router/tests/snapshots/apollo_reports__demand_control_stats.snap b/apollo-router/tests/snapshots/apollo_reports__demand_control_stats.snap index 25304f10d9..6a5d9b6932 100644 --- a/apollo-router/tests/snapshots/apollo_reports__demand_control_stats.snap +++ b/apollo-router/tests/snapshots/apollo_reports__demand_control_stats.snap @@ -233,4 +233,4 @@ operation_count_by_type: subtype: "" operation_count: 1 traces_pre_aggregated: true -extended_references_enabled: false +extended_references_enabled: true diff --git a/apollo-router/tests/snapshots/apollo_reports__demand_control_trace-2.snap b/apollo-router/tests/snapshots/apollo_reports__demand_control_trace-2.snap index 20611f5035..06a8190dd8 100644 --- a/apollo-router/tests/snapshots/apollo_reports__demand_control_trace-2.snap +++ b/apollo-router/tests/snapshots/apollo_reports__demand_control_trace-2.snap @@ -615,4 +615,4 @@ end_time: "[end_time]" operation_count: 0 operation_count_by_type: [] traces_pre_aggregated: true -extended_references_enabled: false +extended_references_enabled: true diff --git a/apollo-router/tests/snapshots/apollo_reports__demand_control_trace.snap b/apollo-router/tests/snapshots/apollo_reports__demand_control_trace.snap index 20611f5035..06a8190dd8 100644 --- a/apollo-router/tests/snapshots/apollo_reports__demand_control_trace.snap +++ b/apollo-router/tests/snapshots/apollo_reports__demand_control_trace.snap @@ -615,4 +615,4 @@ end_time: "[end_time]" operation_count: 0 operation_count_by_type: [] traces_pre_aggregated: true -extended_references_enabled: false +extended_references_enabled: true diff --git a/apollo-router/tests/snapshots/apollo_reports__demand_control_trace_batched-2.snap b/apollo-router/tests/snapshots/apollo_reports__demand_control_trace_batched-2.snap index 8513f44223..92cddf56c0 100644 --- a/apollo-router/tests/snapshots/apollo_reports__demand_control_trace_batched-2.snap +++ b/apollo-router/tests/snapshots/apollo_reports__demand_control_trace_batched-2.snap @@ -1215,4 +1215,4 @@ end_time: "[end_time]" operation_count: 0 operation_count_by_type: [] traces_pre_aggregated: true -extended_references_enabled: false +extended_references_enabled: true diff --git a/apollo-router/tests/snapshots/apollo_reports__demand_control_trace_batched.snap b/apollo-router/tests/snapshots/apollo_reports__demand_control_trace_batched.snap index 8513f44223..92cddf56c0 100644 --- a/apollo-router/tests/snapshots/apollo_reports__demand_control_trace_batched.snap +++ b/apollo-router/tests/snapshots/apollo_reports__demand_control_trace_batched.snap @@ -1215,4 +1215,4 @@ end_time: "[end_time]" operation_count: 0 operation_count_by_type: [] traces_pre_aggregated: true -extended_references_enabled: false +extended_references_enabled: true diff --git a/apollo-router/tests/snapshots/apollo_reports__new_field_stats.snap b/apollo-router/tests/snapshots/apollo_reports__new_field_stats.snap index 805a2d472e..7755592e44 100644 --- a/apollo-router/tests/snapshots/apollo_reports__new_field_stats.snap +++ b/apollo-router/tests/snapshots/apollo_reports__new_field_stats.snap @@ -251,4 +251,4 @@ operation_count_by_type: subtype: "" operation_count: 1 traces_pre_aggregated: true -extended_references_enabled: false +extended_references_enabled: true diff --git a/apollo-router/tests/snapshots/apollo_reports__non_defer-2.snap b/apollo-router/tests/snapshots/apollo_reports__non_defer-2.snap index 4cfbf4acc1..7276773806 100644 --- a/apollo-router/tests/snapshots/apollo_reports__non_defer-2.snap +++ b/apollo-router/tests/snapshots/apollo_reports__non_defer-2.snap @@ -615,4 +615,4 @@ end_time: "[end_time]" operation_count: 0 operation_count_by_type: [] traces_pre_aggregated: true -extended_references_enabled: false +extended_references_enabled: true diff --git a/apollo-router/tests/snapshots/apollo_reports__non_defer.snap b/apollo-router/tests/snapshots/apollo_reports__non_defer.snap index 4cfbf4acc1..7276773806 100644 --- a/apollo-router/tests/snapshots/apollo_reports__non_defer.snap +++ b/apollo-router/tests/snapshots/apollo_reports__non_defer.snap @@ -615,4 +615,4 @@ end_time: "[end_time]" operation_count: 0 operation_count_by_type: [] traces_pre_aggregated: true -extended_references_enabled: false +extended_references_enabled: true diff --git a/apollo-router/tests/snapshots/apollo_reports__send_header-2.snap b/apollo-router/tests/snapshots/apollo_reports__send_header-2.snap index d08aba4499..a9985e9cdf 100644 --- a/apollo-router/tests/snapshots/apollo_reports__send_header-2.snap +++ b/apollo-router/tests/snapshots/apollo_reports__send_header-2.snap @@ -618,4 +618,4 @@ end_time: "[end_time]" operation_count: 0 operation_count_by_type: [] traces_pre_aggregated: true -extended_references_enabled: false +extended_references_enabled: true diff --git a/apollo-router/tests/snapshots/apollo_reports__send_header.snap b/apollo-router/tests/snapshots/apollo_reports__send_header.snap index d08aba4499..a9985e9cdf 100644 --- a/apollo-router/tests/snapshots/apollo_reports__send_header.snap +++ b/apollo-router/tests/snapshots/apollo_reports__send_header.snap @@ -618,4 +618,4 @@ end_time: "[end_time]" operation_count: 0 operation_count_by_type: [] traces_pre_aggregated: true -extended_references_enabled: false +extended_references_enabled: true diff --git a/apollo-router/tests/snapshots/apollo_reports__send_variable_value-2.snap b/apollo-router/tests/snapshots/apollo_reports__send_variable_value-2.snap index e10815fe3b..fd18b7b812 100644 --- a/apollo-router/tests/snapshots/apollo_reports__send_variable_value-2.snap +++ b/apollo-router/tests/snapshots/apollo_reports__send_variable_value-2.snap @@ -617,4 +617,4 @@ end_time: "[end_time]" operation_count: 0 operation_count_by_type: [] traces_pre_aggregated: true -extended_references_enabled: false +extended_references_enabled: true diff --git a/apollo-router/tests/snapshots/apollo_reports__send_variable_value.snap b/apollo-router/tests/snapshots/apollo_reports__send_variable_value.snap index e10815fe3b..fd18b7b812 100644 --- a/apollo-router/tests/snapshots/apollo_reports__send_variable_value.snap +++ b/apollo-router/tests/snapshots/apollo_reports__send_variable_value.snap @@ -617,4 +617,4 @@ end_time: "[end_time]" operation_count: 0 operation_count_by_type: [] traces_pre_aggregated: true -extended_references_enabled: false +extended_references_enabled: true diff --git a/apollo-router/tests/snapshots/apollo_reports__stats.snap b/apollo-router/tests/snapshots/apollo_reports__stats.snap index a2b3c0eb79..8dd5a20997 100644 --- a/apollo-router/tests/snapshots/apollo_reports__stats.snap +++ b/apollo-router/tests/snapshots/apollo_reports__stats.snap @@ -143,4 +143,4 @@ operation_count_by_type: subtype: "" operation_count: 1 traces_pre_aggregated: true -extended_references_enabled: false +extended_references_enabled: true diff --git a/apollo-router/tests/snapshots/apollo_reports__trace_id-2.snap b/apollo-router/tests/snapshots/apollo_reports__trace_id-2.snap index 4cfbf4acc1..7276773806 100644 --- a/apollo-router/tests/snapshots/apollo_reports__trace_id-2.snap +++ b/apollo-router/tests/snapshots/apollo_reports__trace_id-2.snap @@ -615,4 +615,4 @@ end_time: "[end_time]" operation_count: 0 operation_count_by_type: [] traces_pre_aggregated: true -extended_references_enabled: false +extended_references_enabled: true diff --git a/apollo-router/tests/snapshots/apollo_reports__trace_id.snap b/apollo-router/tests/snapshots/apollo_reports__trace_id.snap index 4cfbf4acc1..7276773806 100644 --- a/apollo-router/tests/snapshots/apollo_reports__trace_id.snap +++ b/apollo-router/tests/snapshots/apollo_reports__trace_id.snap @@ -615,4 +615,4 @@ end_time: "[end_time]" operation_count: 0 operation_count_by_type: [] traces_pre_aggregated: true -extended_references_enabled: false +extended_references_enabled: true diff --git a/deny.toml b/deny.toml index 8e79104c33..651acc0254 100644 --- a/deny.toml +++ b/deny.toml @@ -54,6 +54,7 @@ allow = [ "MIT", "MPL-2.0", "Elastic-2.0", + "OpenSSL", # required by aws-lc-sys "Unicode-DFS-2016", "Unicode-3.0", "Zlib" @@ -95,25 +96,7 @@ highlight = "all" # List of crates to deny deny = [ - # `cargo-scaffold` uses `git2` which uses `libssh2-sys` and `libgit2-sys`. - # Both require `openssl-sys`. Adding this rule in this way is sufficient to - # allow those to use `openssl-sys` (not a runtime dependency), leveraging the - # capabilities of `cargo-deny` to "block" `openssl-sys`. However, this isn't - # defensive enough on its own since we could introduce `git2` in - # `apollo-router` and we would inadvertently get `openssl-sys` and it would - # _not_ be blocked. That's bad! Unfortunately, the `wrappers` technique of - # `cargo-deny` only enables exceptions for _direct_ dependencies. To defend - # against the above risk, we add additional rules here (below) which _only_ - # allows `git2` in `cargo-scaffold`. This is a bit wonky, since we may at - # some point want `git2` but does accomplish what we want with the desired - # exception. - { name = "openssl-sys", wrappers = ["git2", "libssh2-sys", "libgit2-sys"] }, - # Note! This line is required to support the above exception. - { name = "git2", wrappers = ["auth-git2", "cargo-scaffold"] }, - # Note! This line is required to support the above exception. - { name = "libgit2-sys", wrappers = ["git2"] }, - # Note! This line is required to support the above exception. - { name = "libssh2-sys", wrappers = ["libgit2-sys"] }, + { name = "openssl-sys" }, ] # This section is considered when running `cargo deny check sources`. diff --git a/dockerfiles/tracing/docker-compose.datadog.yml b/dockerfiles/tracing/docker-compose.datadog.yml index cad8686c22..069a8bf01c 100644 --- a/dockerfiles/tracing/docker-compose.datadog.yml +++ b/dockerfiles/tracing/docker-compose.datadog.yml @@ -1,9 +1,8 @@ version: "3.9" services: - apollo-router: container_name: apollo-router - image: ghcr.io/apollographql/router:v1.59.1 + image: ghcr.io/apollographql/router:v2.0.0-preview.4 volumes: - ./supergraph.graphql:/etc/config/supergraph.graphql - ./router/datadog.router.yaml:/etc/config/configuration.yaml @@ -14,7 +13,7 @@ services: "-s", "/etc/config/supergraph.graphql", "--log", - "info" + "info", ] ports: - 4000:4000 diff --git a/dockerfiles/tracing/docker-compose.jaeger.yml b/dockerfiles/tracing/docker-compose.jaeger.yml index f84e25a10d..4c09491e24 100644 --- a/dockerfiles/tracing/docker-compose.jaeger.yml +++ b/dockerfiles/tracing/docker-compose.jaeger.yml @@ -1,10 +1,9 @@ version: "3.9" services: - apollo-router: container_name: apollo-router #build: ./router - image: ghcr.io/apollographql/router:v1.59.1 + image: ghcr.io/apollographql/router:v2.0.0-preview.4 volumes: - ./supergraph.graphql:/etc/config/supergraph.graphql - ./router/jaeger.router.yaml:/etc/config/configuration.yaml @@ -15,7 +14,7 @@ services: "-s", "/etc/config/supergraph.graphql", "--log", - "info" + "info", ] ports: - 4000:4000 diff --git a/dockerfiles/tracing/docker-compose.zipkin.yml b/dockerfiles/tracing/docker-compose.zipkin.yml index cf71996327..aa64bfdf6e 100644 --- a/dockerfiles/tracing/docker-compose.zipkin.yml +++ b/dockerfiles/tracing/docker-compose.zipkin.yml @@ -1,10 +1,9 @@ version: "3.9" services: - apollo-router: container_name: apollo-router build: ./router - image: ghcr.io/apollographql/router:v1.59.1 + image: ghcr.io/apollographql/router:v2.0.0-preview.4 volumes: - ./supergraph.graphql:/etc/config/supergraph.graphql - ./router/zipkin.router.yaml:/etc/config/configuration.yaml @@ -15,7 +14,7 @@ services: "-s", "/etc/config/supergraph.graphql", "--log", - "info" + "info", ] ports: - 4000:4000 diff --git a/docs/source/reference/migration/from-router-v1.mdx b/docs/source/reference/migration/from-router-v1.mdx new file mode 100644 index 0000000000..20049b65e1 --- /dev/null +++ b/docs/source/reference/migration/from-router-v1.mdx @@ -0,0 +1,194 @@ +--- +title: Upgrading from Versions 1.x +subtitle: Upgrade from version 1.x to 2.x of GraphOS Router +description: Learn how to upgrade from version 1.x to 2.x of the GraphOS Router. +--- + +Learn how to upgrade your GraphOS Router deployment from version 1.x to 2.x. + + + +GraphOS Router 2.x is available as a [Developer Preview](https://www.apollographql.com/docs/graphos/reference/router-release-lifecycle#developer-preview). The most recent version is **[v2.0.0-preview.1](https://github.com/apollographql/router/releases/tag/v2.0.0-preview.1)**. This upgrade guide will be updated throughout the Developer Preview and will have additions after each release. + + + +## New configuration in GraphOS Router v2.0.0-preview.x + +The router v2.x introduces some new features that can impact the configuration from v1.x. + +### Apollo operation usage reporting via OTLP + +The router supports reporting operation usage metrics to GraphOS via OpenTelemetry Protocol (OTLP). + +Prior to version 1.49.0 of the router, all GraphOS reporting was performed using a [private tracing format](/graphos/metrics/sending-operation-metrics#reporting-format). In v1.49.0, we introduced support for using OTel to perform this reporting. In v1.x, this is controlled using the `experimental_otlp_tracing_sampler` flag, and it's off by default. + +Now in v2.x, this flag is renamed to `otlp_tracing_sampler`, and it's enabled by default. + +Learn more about configuring [usage reporting via OTLP](/router/configuration/telemetry/apollo-telemetry#usage-reporting-via-opentelemetry-protocol-otlp). + +### Metrics reporting defaults + +Default values of some GraphOS reporting metrics have been changed from v1.x to the following in v2.x: + +- `telemetry.apollo.signature_normalization_algorithm` now defaults to `enhanced`. (In v1.x the default is `legacy`.) +- `telemetry.apollo.metrics_reference_mode` now defaults to `extended`. (In v1.x the default is `standard`.) + +## Removed features in GraphOS Router v2.x + +### Support for Scaffold + + + +In Router v1.x Scaffold could be used to generate boilerplate source code for a Rust Plugin which extended functionality in a custom Router. This facility has been removed. + +Source code generated using Scaffold will continue to compile; so existing Rust plugins will be unaffected by this change. + +### `--apollo-uplink-poll-interval` + + + +The router no longer uses the `--apollo-uplink-poll-interval` command-line argument, or the `APOLLO_UPLINK_POLL_INTERVAL` environment variable. It already did something unusual in v1.x: the configured interval would only be used for the very first poll, and not any subsequent polls. + +## Upgrading to GraphOS Router v2.x + +Upgrading from GraphOS Router v1.x to v2.x requires the following steps. + +### Configure your Apollo usage reporting protocol + +If your router v1.x is configured with `experimental_otlp_tracing_sampler`, rename it to `otlp_tracing_sampler`. + +To [report operation usage metrics to GraphOS via OTLP](#apollo-operation-usage-reporting-via-otlp), you can either remove your configuration of `otlp_tracing_sampler` and use its default value, or you can explicitly enable it by setting it to [`always_on` or a sampling ratio](/router/configuration/telemetry/apollo-telemetry#configuring-usage-reporting-via-otlp). + +Otherwise, to use the behavior of v1.x, turn off usage reporting via OTLP by setting `otlp_tracing_sampler` to `always_off`. + +### Configure metrics reporting defaults + +If you plan to use the [new metrics reporting defaults](#metrics-reporting-defaults), then no changes are required. + +Otherwise, to use the defaults from v1.x, set the following: + +```yaml +telemetry: + apollo: + signature_normalization_algorithm: legacy + metrics_reference_mode: standard +``` + +### Configure supergraph endpoint path + +If you use the default endpoint path or your path has no matched parameters or wildcards, then no changes are required. + +If your path uses named parameters, it is now bound with a braces syntax instead of a colon: + +```yaml +supergraph: + # Previously: + # path: /foo/:bar/baz + path: /foo/{bar}/baz +``` + +If your path uses a wildcard, it must be wrapped in braces and must bind a name: + +```yaml +supergraph: + # Previously: + # path: /foo/* + path: /foo/{*rest} +``` + +### Logging + + + +If you used the `experimental_when_header` feature previously like this for example: + +```yaml title="router.previous.yaml" +telemetry: + exporters: + logging: + # If one of these headers matches we will log supergraph and subgraphs requests/responses + experimental_when_header: # REMOVED + - name: apollo-router-log-request + value: my_client + headers: true # default: false + body: true # default: false +``` + +This feature no longer exists and can be replaced with another one included in custom telemetry. Here is how you would configure custom telemetry to achieve the same result: + +```yaml title="router.yaml" +telemetry: + instrumentation: + events: + router: + request: # Display router request log + level: info + condition: + eq: + - request_header: apollo-router-log-request + - my_client + response: # Display router response log + level: info + condition: + eq: + - request_header: apollo-router-log-request + - my_client + supergraph: + request: # Display supergraph request log + level: info + condition: + eq: + - request_header: apollo-router-log-request + - my_client + response: + level: info + condition: + eq: + - request_header: apollo-router-log-request + - my_client + subgraph: + request: # Display subgraph request log + level: info + condition: + eq: + - supergraph_request_header: apollo-router-log-request + - my_client + response: # Display subgraph response log + level: info + condition: + eq: + - supergraph_request_header: apollo-router-log-request + - my_client +``` + +### Metrics + +- `apollo_router_http_request_retry_total` has been removed and will be supported now by `http.client.request.duration` metrics + with the `http.request.resend_count` attribute which is automatically set when `default_requirement_level` is set to `recommended`. + +- `apollo_router_timeout` has been removed. This metric conflated timed-out requests from client to the router, and requests from the router to subgraphs. + Timed-out requests have HTTP status code 504. Use the `http.response.status_code` attribute on the `http.server.request.duration` metric to identify timed-out router requests, + and the same attribute on the `http.client.request.duration` metric to identify timed-out subgraph requests. + +- `apollo_router_http_requests_total` has been removed and replaced by `http.server.request.duration` metric for requests from clients to router and `http.client.request.duration` for requests from router to subgraphs. + +- `apollo_router_http_request_duration_seconds_bucket` has been removed and replaced by `http.server.request.duration` metric for requests from clients to router and `http.client.request.duration` for requests from router to subgraphs. + +- `apollo_router_session_count_total` has been removed and replaced by `http.server.active_requests`. + +### Check CORS configuration + +If you already configured [CORS](../../routing/security/cors) on the router, the configuration won't change but with v1.x it accepted bad configuration and displayed an error log when some header names or regexes were invalid. +With v2.x it will end up with an error and it won't start at all to avoid confusions and misconfigurations. + +### Deploy your router + +Make sure that you are referencing the correct router release: **v2.0.0-preview.0** + +During upgrade, carefully monitor logs and resource consumption to ensure that your router has successfully upgraded and that your router has enough resources to perform as expected. The router will automatically migrate required configuration changes, but it is good practice to review the steps above and decide if you want to change your configuration. + +For example, you may decide that you wish to change the way in which you sample OTLP traces in this new release and not simply preserve your existing configuration. + +## Reporting upgrade issues + +If you encounter an upgrade issue that isn't resolved by this article, please search for existing [GitHub discussions](https://github.com/apollographql/router/discussions/) and start a new discussion if you don't find what you're looking for. diff --git a/docs/source/reference/router/configuration.mdx b/docs/source/reference/router/configuration.mdx index 468d138683..50d27a7104 100644 --- a/docs/source/reference/router/configuration.mdx +++ b/docs/source/reference/router/configuration.mdx @@ -235,22 +235,6 @@ If you specify multiple URLs, separate them with commas (no whitespace). For default behavior and possible values, see [Apollo Uplink](/federation/managed-federation/uplink/). - - - - - -##### `--apollo-uplink-poll-interval` - -`APOLLO_UPLINK_POLL_INTERVAL` - - - - -The amount of time between polls to Apollo Uplink. - -The default value is `10s` (ten seconds), which is also the minimum allowed value. - @@ -478,17 +462,18 @@ The path must start with `/`. A path can contain parameters and wildcards: -- `/:parameter` matches a single segment. For example: - - `/abc/:wildcard/def` matches `/abc/1/def` and `/abc/whatever/def`, but it doesn't match `/abc/1/2/def` or `/abc/def` +- `/{parameter}` matches a single segment. For example: -- `/*parameter` matches all segments in the rest of a path. For example: - - `/abc/*wildcard` matches `/abc/1/def` and `/abc/w/h/a/t/e/v/e/r`, but it doesn't match `/abc/` or `/not_abc_at_all` + - `/abc/{my_param}/def` matches `/abc/1/def` and `/abc/whatever/def`, but it doesn't match `/abc/1/2/def` or `/abc/def` + +- `/{*parameter}` matches all segments in the rest of a path. For example: + - `/abc/{*wildcard}` matches `/abc/1/def` and `/abc/w/h/a/t/e/v/e/r`, but it doesn't match `/abc/` or `/not_abc_at_all` -- Both `:` and `*` syntaxes require a name, even though you can’t use those names anywhere. +- Both parameters and wildcards require a name, even though you can’t use those names anywhere. -- The router doesn't support wildcards in the _middle_ of a path (e.g., `/*/graphql`). Instead, use a path parameter (e.g., `/:parameter/graphql`). +- The router doesn't support wildcards in the _middle_ of a path (e.g., `/{*wild}/graphql`). Instead, use a path parameter (e.g., `/{parameter}/graphql`). @@ -1111,14 +1096,6 @@ If router receives more headers than the buffer size, it responds to the client Limit the maximum buffer size for the HTTP1 connection. Default is ~400kib. -Note for Rust Crate Users: If you are using the Router as a Rust crate, the `http1_request_max_buf_size` option requires the `hyper_header_limits` feature and also necessitates using Apollo's fork of the Hyper crate until the [changes are merged upstream](https://github.com/hyperium/hyper/pull/3523). -You can include this fork by adding the following patch to your Cargo.toml file: - -```toml -[patch.crates-io] -"hyper" = { git = "https://github.com/apollographql/hyper.git", tag = "header-customizations-20241108" } -``` - #### Parser-based limits ##### `parser_max_tokens` diff --git a/docs/source/reference/router/telemetry/instrumentation/events.mdx b/docs/source/reference/router/telemetry/instrumentation/events.mdx index dd06fd6c26..3d03145f28 100644 --- a/docs/source/reference/router/telemetry/instrumentation/events.mdx +++ b/docs/source/reference/router/telemetry/instrumentation/events.mdx @@ -17,7 +17,7 @@ You can configure events for each service in `router.yaml`. Events can be standa -The `router`, `supergraph` and `subgraph` sections are used to define custom event configuration for each service: +The `router`, `supergraph`, `subgraph` and `connector` sections are used to define custom event configuration for each service: ```yaml title="future.router.yaml" telemetry: @@ -29,6 +29,8 @@ telemetry: # ... subgraph: # highlight-line # ... + connector: # highlight-line + # ... ``` ### Standard events @@ -229,6 +231,8 @@ telemetry: # Custom event configuration for supergraph service ... subgraph: # Custom event configuration for subgraph service ... + connector: + # Custom event configuration for HTTP connectors ... ``` ## Event configuration reference diff --git a/docs/source/reference/router/telemetry/instrumentation/instruments.mdx b/docs/source/reference/router/telemetry/instrumentation/instruments.mdx index aa5bee1100..6a98c41c9f 100644 --- a/docs/source/reference/router/telemetry/instrumentation/instruments.mdx +++ b/docs/source/reference/router/telemetry/instrumentation/instruments.mdx @@ -27,6 +27,12 @@ OpenTelemetry specifies multiple [standard metric instruments](https://opentelem * `http.client.request.duration` - A histogram of request durations for requests handled by subgraphs. * `http.client.response.body.size` - A histogram of response body sizes for requests handled by subgraphs. +* For connector HTTP requests: + + * `http.client.request.body.size` - A histogram of request body sizes for connectors HTTP requests. + * `http.client.request.duration` - A histogram of request durations for connectors HTTP requests. + * `http.client.response.body.size` - A histogram of response body sizes for connectors HTTP responses. + The [`default_requirement_level` setting](#default_requirement_level) configures whether or not these instruments are enabled by default. Out of the box, its default value of `required` enables them. You must explicitly configure an instrument for different behavior. @@ -47,6 +53,10 @@ telemetry: http.client.request.body.size: true # (default false) http.client.request.duration: true # (default false) http.client.response.body.size: true # (default false) + connector: + http.client.request.body.size: true # (default false) + http.client.request.duration: true # (default false) + http.client.response.body.size: true # (default false) ``` They can be customized by attaching or removing attributes. See [attributes](#attributes) to learn more about configuring attributes. @@ -64,6 +74,10 @@ telemetry: http.client.request.duration: attributes: subgraph.name: true + connector: + http.client.request.duration: + attributes: + connector.source.name: true ``` ### Apollo standard instruments @@ -82,6 +96,7 @@ The example configuration below defines four custom instruments: - `acme.request.duration` on the `router` service - `acme.graphql.requests` on the `supergraph` service - `acme.graphql.subgraph.errors` on the `subgraph` service +- `acme.user.not.found` on a connector HTTP response - `acme.graphql.list.lengths` on each JSON element returned to the client (defined on `graphql`) ```yaml title="router.yaml" @@ -118,6 +133,21 @@ telemetry: unit: count description: "my description" + connector: + acme.user.not.found: + value: unit + type: counter + unit: count + description: "Count of 404 responses from the user API" + condition: + all: + - eq: + - 404 + - connector_http_response_status: code + - eq: + - "user_api" + - connector_source: name + graphql: acme.graphql.list.lengths: value: diff --git a/docs/source/reference/router/telemetry/instrumentation/selectors.mdx b/docs/source/reference/router/telemetry/instrumentation/selectors.mdx index 02a8867e54..aefb405193 100644 --- a/docs/source/reference/router/telemetry/instrumentation/selectors.mdx +++ b/docs/source/reference/router/telemetry/instrumentation/selectors.mdx @@ -23,7 +23,7 @@ telemetry: ## Selector configuration reference -Each service of the router pipeline (`router`, `supergraph`, `subgraph`) has its own available selectors. +Each service of the router pipeline (`router`, `supergraph`, `subgraph`, `connector`) has its own available selectors. You can also extract GraphQL metrics from the response data the router returns to clients. ### Router @@ -100,6 +100,25 @@ The subgraph service executes multiple times during query execution, with each e | `error` | No | `reason` | A string value containing error reason when it's a critical error | | `cache` | No | `hit` \| `miss` | Returns the number of cache hit or miss for this subgraph request | +### Connector + +#### HTTP + +Apollo Connectors for REST APIs make HTTP calls to the upstream HTTP API. These selectors let you extract metrics from these HTTP requests and responses. + +| Selector | Defaultable | Values | Description | +|----------------------------------|-------------|------------------|-------------------------------------------------------------------| +| `subgraph_name` | No | `true`\|`false` | The name of the subgraph containing the connector | +| `connector_source ` | No | `name` | The name of the `@source` associated with this connector, if any | +| `connector_http_request_header` | Yes | | The name of a connector request header | +| `connector_http_response_header` | Yes | | The name of a connector response header | +| `connector_http_response_status` | No | `code`\|`reason` | The status of a connector response | +| `connector_http_method` | No | `true`\|`false` | The HTTP method of a connector request | +| `connector_url_template ` | No | `true`\|`false` | The URL template of a connector request | +| `static` | No | | A static string value | +| `error` | No | `reason` | A string value containing error reason when it's a critical error | + + ### GraphQL GraphQL metrics are extracted from the response data the router returns to client requests. diff --git a/docs/source/reference/router/telemetry/instrumentation/spans.mdx b/docs/source/reference/router/telemetry/instrumentation/spans.mdx index 70016ebdb0..cae17cf72e 100644 --- a/docs/source/reference/router/telemetry/instrumentation/spans.mdx +++ b/docs/source/reference/router/telemetry/instrumentation/spans.mdx @@ -14,7 +14,7 @@ A **span** captures contextual information about requests and responses as they' -The `router`, `supergraph` and `subgraph` sections are used to define custom span configuration for each service: +The `router`, `supergraph`, `subgraph` and `connector` sections are used to define custom span configuration for each service: ```yaml title="router.yaml" telemetry: @@ -29,6 +29,9 @@ telemetry: subgraph: # highlight-line attributes: {} # ... + connector: # highlight-line + attributes: {} + # ... ``` ### `attributes` @@ -238,6 +241,9 @@ telemetry: subgraph: attributes: {} # ... + connector: + attributes: {} + # ... ``` ## Spans configuration reference diff --git a/docs/source/reference/router/telemetry/instrumentation/standard-attributes.mdx b/docs/source/reference/router/telemetry/instrumentation/standard-attributes.mdx index eb264481e3..bea6b3927f 100644 --- a/docs/source/reference/router/telemetry/instrumentation/standard-attributes.mdx +++ b/docs/source/reference/router/telemetry/instrumentation/standard-attributes.mdx @@ -109,3 +109,14 @@ Standard attributes of the `subgraph` service: | `subgraph.graphql.operation.type` | `query`\|`mutation`\|`subscription` | The operation kind from the subgraph query | | `subgraph.graphql.document` | | The GraphQL query to the subgraph (need `spec_compliant` [mode](/router/configuration/telemetry/instrumentation/spans/#mode) to disable it) | | `http.request.resend_count` | `true`\|`false` | Number of retries for an http request to a subgraph | + +#### Connector + +Standard attributes of the `connector` service: + +| Attribute | Values | Description | +|--------------------------|-------------------------------------|------------------------------------------------------------------| +| `subgraph.name` | | The name of the subgraph containing the connector | +| `connector.source.name` | | The name of the `@source` associated with this connector, if any | +| `connector.http.method` | | The HTTP method for the connector (`GET` or `POST`, for example) | +| `connector.url.template` | | The URL template for the connector | diff --git a/docs/source/reference/router/telemetry/instrumentation/standard-instruments.mdx b/docs/source/reference/router/telemetry/instrumentation/standard-instruments.mdx index ec9fc56327..4506f26dbb 100644 --- a/docs/source/reference/router/telemetry/instrumentation/standard-instruments.mdx +++ b/docs/source/reference/router/telemetry/instrumentation/standard-instruments.mdx @@ -115,7 +115,7 @@ The initial call to Uplink during router startup is not reflected in metrics. - `apollo.router.telemetry.studio.reports` - The number of reports submitted to GraphOS Studio by the router. - `report.type`: The type of report submitted: "traces" or "metrics" - - `report.protocol`: Either "apollo" or "otlp", depending on the experimental_otlp_tracing_sampler configuration. + - `report.protocol`: Either "apollo" or "otlp", depending on the otlp_tracing_sampler configuration. ### Deprecated diff --git a/docs/source/reference/router/telemetry/log-exporters/overview.mdx b/docs/source/reference/router/telemetry/log-exporters/overview.mdx index f1cef8cff1..bbfe532694 100644 --- a/docs/source/reference/router/telemetry/log-exporters/overview.mdx +++ b/docs/source/reference/router/telemetry/log-exporters/overview.mdx @@ -142,32 +142,13 @@ For OpenTelemetry conventions for resources, see [Resource Semantic Conventions] ### Request/Response logging - - - By default, the router _doesn't_ log the following values that might contain sensitive data, even if a sufficient log level is set: - Request bodies - Response bodies - Headers -You can enable selective logging of these values via the `experimental_when_header` option: - -```yaml title="router.yaml" -telemetry: - exporters: - logging: - # If one of these headers matches we will log supergraph and subgraphs requests/responses - experimental_when_header: - - name: apollo-router-log-request - value: my_client - headers: true # default: false - body: true # default: false - # log request for all requests coming from Iphones - - name: user-agent - match: ^Mozilla/5.0 (iPhone* - headers: true -``` +You can enable selective logging of these values using [standard events](../instrumentation/events) with [conditions](../instrumentation/conditions) ## Logging common reference diff --git a/docs/source/reference/router/telemetry/metrics-exporters/overview.mdx b/docs/source/reference/router/telemetry/metrics-exporters/overview.mdx index 5ca04f389b..2aee51423c 100644 --- a/docs/source/reference/router/telemetry/metrics-exporters/overview.mdx +++ b/docs/source/reference/router/telemetry/metrics-exporters/overview.mdx @@ -115,83 +115,6 @@ telemetry: - 20.00 ``` - -### `attributes` - -You can add custom attributes (OpenTelemetry) and labels (Prometheus) to the `apollo_router_http_requests` metric. Attributes can be: - -* static values (preferably using a [resource](#resource)) -* headers from the request or response -* a value from a context -* a value from the request or response body ([JSON path](https://goessner.net/articles/JsonPath/)) - - - -Use [resource attributes](#resource) instead to provide information about telemetry resources, including hosts and environments. - - - -An example of configuring these attributes is shown below: - -```yaml title="router.yaml" -telemetry: - exporters: - metrics: - common: - attributes: - supergraph: # Attribute configuration for requests to/responses from the router - static: - - name: "version" - value: "v1.0.0" - request: - header: - - named: "content-type" - rename: "payload_type" - default: "application/json" - - named: "x-custom-header-to-add" - response: - body: - # Apply the value of the provided path of the router's response body as an attribute - - path: .errors[0].extensions.http.status - name: error_from_body - # Use the unique extension code to identify the kind of error - - path: .errors[0].extensions.code - name: error_code - context: - # Apply the indicated element from the plugin chain's context as an attribute - - named: my_key - subgraph: # Attribute configuration for requests to/responses from subgraphs - all: - static: - # Always apply this attribute to all metrics for all subgraphs - - name: kind - value: subgraph_request # each subgraph request updates a metric separately - errors: # Only work if it's a valid GraphQL error (for example if the subgraph returns an http error or if the router can't reach the subgraph) - include_messages: true # Will include the error message in a message attribute - extensions: # Include extensions data - - name: subgraph_error_extended_type # Name of the attribute - path: .type # JSON query path to fetch data from extensions - - name: message - path: .reason - # Will create this kind of metric for example apollo_router_http_requests_error_total{message="cannot contact the subgraph",subgraph="my_subgraph_name",subgraph_error_extended_type="SubrequestHttpError"} - subgraphs: - my_subgraph_name: # Apply these rules only for the subgraph named `my_subgraph_name` - request: - header: - - named: "x-custom-header" - body: - # Apply the value of the provided path of the router's request body as an attribute (here it's the query) - - path: .query - name: query - default: UNKNOWN -``` - - - -OpenTelemetry includes many [standard attributes](https://opentelemetry.io/docs/specs/semconv/attributes-registry/) that you can use via custom [instruments](/router/configuration/telemetry/instrumentation/instruments). - - - ### `views` You can override default attributes and default buckets for specific metrics thanks to this configuration. diff --git a/docs/source/routing/customization/custom-binary.mdx b/docs/source/routing/customization/custom-binary.mdx index 1446a57930..5b4cc6c2d8 100644 --- a/docs/source/routing/customization/custom-binary.mdx +++ b/docs/source/routing/customization/custom-binary.mdx @@ -18,46 +18,23 @@ Learn how to compile a custom binary from Apollo Router Core source, which is re To compile the router, you need to have [Rust 1.83.0 or later](https://www.rust-lang.org/tools/install) installed. -After you install the above, also install the `cargo-scaffold` tool: - -```sh -cargo install cargo-scaffold -``` - ## 1. Create a new project -1. Use the `cargo scaffold` command to create a project for your custom router: +1. Use the `cargo new` command to create a project for your custom router: ```bash - cargo scaffold https://github.com/apollographql/router.git -r apollo-router-scaffold/templates/base -t main + cargo new --bin starstuff ``` -2. The `cargo scaffold` command prompts you for some configuration settings. For the purposes of this tutorial, set your project's name to `starstuff`. +For the purposes of this tutorial, set your project's name to `starstuff`. -3. After your project is created, change to the `starstuff` directory: +2. After your project is created, change to the `starstuff` directory: ```bash cd starstuff ``` -The generated project has the following layout: - -```bash -starstuff -├── Cargo.toml # Dependencies are declared here -├── README.md -├── router.yaml # Router yaml config -├── src -│ ├── main.rs # Entry point -│ └── plugins # Custom plugins are located here -│ └── mod.rs -└── xtask # Build support files - ├── Cargo.toml - └── src - └── main.rs -``` - -The router uses an auto-discovery mechanism for plugins, so any plugins you add via dependency are automatically available to the router at runtime. +Write the source code for your custom binary. ## 2. Compile the router @@ -99,30 +76,9 @@ If you're using managed federation, you set the `APOLLO_KEY` and `APOLLO_GRAPH_R ## 4. Create a plugin -1. From within your project directory, scaffold a new plugin with the following command: - - ```bash - cargo router plugin create hello_world - ``` - -2. The command prompts you to choose a starting template: - - ```bash {2} - Select a plugin template: - > "basic" - "auth" - "tracing" - ``` - - The available templates are: +1. From within your project directory, implement your new plugin. - * `basic` - a barebones plugin - * `auth` - an authentication plugin for making an external call - * `tracing` - a telemetry plugin that adds a custom metric span and a log message - - For the purposes of this tutorial, choose `basic`. - -4. Add configuration options for the created plugin to your `router.yaml` file: +2. Add configuration options for the created plugin to your `router.yaml` file: ```yaml title="router.yaml" plugins: @@ -130,7 +86,7 @@ If you're using managed federation, you set the `APOLLO_KEY` and `APOLLO_GRAPH_R message: "starting my plugin" ``` -5. Run the router again: +3. Run the router again: ```bash cargo run -- --hot-reload --config router.yaml --supergraph supergraph-schema.graphql @@ -144,16 +100,6 @@ If you're using managed federation, you set the `APOLLO_KEY` and `APOLLO_GRAPH_R Nice work! You now have a custom router binary with an associated plugin. Next, you can extend the plugin with the functionality you need or add more plugins. -## Removing a plugin - -To remove a previously added plugin from your router project, use the following command: - -```bash -cargo router plugin remove hello_world -``` - -Note that depending on the structure of your plugin, the command might fail to remove all of its associated files. - ## Memory allocator On Linux the `apollo-router` crate sets [jemalloc](http://jemalloc.net/) @@ -172,38 +118,6 @@ If you make a library crate, also specify `default-features = false` in order to leave the choice open for the eventual executable crate. (Cargo default features are only disabled if *all* dependents specify `default-features = false`.) -## Docker - -You can use the provided [Dockerfile](https://github.com/apollographql/router/tree/main/apollo-router-scaffold/templates/base/Dockerfile) to build a release container. - -Make sure your router is configured to listen to `0.0.0.0` so you can query it from outside the container: - -```yml - supergraph: - listen: 0.0.0.0:4000 -``` - -Use your `APOLLO_KEY` and `APOLLO_GRAPH_REF` environment variables to run the router in managed federation. - - ```bash - docker build -t my_custom_router . - docker run -e APOLLO_KEY="your apollo key" -e APOLLO_GRAPH_REF="your apollo graph ref" my_custom_router - ``` - -Otherwise add a `COPY` step to the Dockerfile, and edit the entrypoint: - -```Dockerfile -# Copy configuration for docker image -COPY router.yaml /dist/config.yaml -# Copy supergraph for docker image -COPY my_supergraph.graphql /dist/supergraph.graphql - -# [...] and change the entrypoint - -# Default executable is the router -ENTRYPOINT ["/dist/router", "-s", "/dist/supergraph.graphql"] -``` - ## Related topics * [Optimizing Custom Router Builds](/graphos/routing/self-hosted/containerization/optimize-build) diff --git a/docs/source/routing/graphos-reporting.mdx b/docs/source/routing/graphos-reporting.mdx index 56fb8e294a..d68ee0e503 100644 --- a/docs/source/routing/graphos-reporting.mdx +++ b/docs/source/routing/graphos-reporting.mdx @@ -37,6 +37,8 @@ You can enable usage reporting via OTLP by an option that can also configure the - In router v1.x, this is controlled using the `experimental_otlp_tracing_sampler` option and is disabled by default. +- In router v2.x and later, this option is renamed to `otlp_tracing_sampler` and is enabled by default. + The supported values of the OTLP sampler option are the following: - `always_off`: send all traces via Apollo Usage Reporting protocol. Default for v1.x. @@ -49,7 +51,7 @@ The OTLP sampler is applied _after_ the common tracing sampler. In the following telemetry: apollo: # Send 0.7 OTLP / 0.3 Apollo - experimental_otlp_tracing_sampler: 0.7 + otlp_tracing_sampler: 0.7 exporters: tracing: @@ -389,4 +391,4 @@ telemetry: If you're writing a plugin, you can get the Studio Trace ID by reading the value of `apollo_operation_id` from the context. - \ No newline at end of file + diff --git a/docs/source/routing/security/request-limits.mdx b/docs/source/routing/security/request-limits.mdx index 3feb97f84f..c081f350be 100644 --- a/docs/source/routing/security/request-limits.mdx +++ b/docs/source/routing/security/request-limits.mdx @@ -286,13 +286,6 @@ If router receives more headers than the buffer size, it responds to the client Limit the maximum buffer size for the HTTP1 connection. Default is ~400kib. -Note for Rust Crate Users: If you are using the Router as a Rust crate, the `http1_request_max_buf_size` option requires the `hyper_header_limits` feature and also necessitates using Apollo's fork of the Hyper crate until the [changes are merged upstream](https://github.com/hyperium/hyper/pull/3523). -You can include this fork by adding the following patch to your Cargo.toml file: -```toml -[patch.crates-io] -"hyper" = { git = "https://github.com/apollographql/hyper.git", tag = "header-customizations-20241108" } -``` - ## Parser-based limits ### `parser_max_tokens` diff --git a/examples/add-timestamp-header/rhai/Cargo.toml b/examples/add-timestamp-header/rhai/Cargo.toml index 5a095bda82..2b7e308fbb 100644 --- a/examples/add-timestamp-header/rhai/Cargo.toml +++ b/examples/add-timestamp-header/rhai/Cargo.toml @@ -7,7 +7,7 @@ edition = "2021" [dependencies] anyhow = "1" apollo-router = { path = "../../../apollo-router" } -http = "0.2" +http = "1.2" serde_json = "1" tokio = { version = "1", features = ["full"] } -tower = { version = "0.4", features = ["full"] } +tower = { version = "0.5", features = ["full"] } diff --git a/examples/async-auth/rust/Cargo.toml b/examples/async-auth/rust/Cargo.toml index 5749743aea..ecae1a3805 100644 --- a/examples/async-auth/rust/Cargo.toml +++ b/examples/async-auth/rust/Cargo.toml @@ -8,10 +8,10 @@ edition = "2021" anyhow = "1" apollo-router = { path = "../../../apollo-router" } async-trait = "0.1" -http = "0.2" +http = "1.2" schemars = { version = "0.8", features = ["url"] } serde = "1" serde_json = "1" serde_json_bytes.workspace = true tokio = { version = "1", features = ["full"] } -tower = { version = "0.4", features = ["full"] } +tower = { version = "0.5", features = ["full"] } diff --git a/examples/cache-control/rhai/Cargo.toml b/examples/cache-control/rhai/Cargo.toml index 70039854a6..bacd1987ab 100644 --- a/examples/cache-control/rhai/Cargo.toml +++ b/examples/cache-control/rhai/Cargo.toml @@ -7,7 +7,7 @@ edition = "2021" [dependencies] anyhow = "1" apollo-router = { path = "../../../apollo-router" } -http = "0.2" +http = "1.2" serde_json = "1" tokio = { version = "1", features = ["full"] } -tower = { version = "0.4", features = ["full"] } +tower = { version = "0.5", features = ["full"] } diff --git a/examples/context/rust/Cargo.toml b/examples/context/rust/Cargo.toml index 75b80dcefe..79925e975e 100644 --- a/examples/context/rust/Cargo.toml +++ b/examples/context/rust/Cargo.toml @@ -8,6 +8,6 @@ edition = "2021" anyhow = "1" apollo-router = { path = "../../../apollo-router" } async-trait = "0.1" -http = "0.2" -tower = { version = "0.4", features = ["full"] } +http = "1.2" +tower = { version = "0.5", features = ["full"] } tracing = "0.1" diff --git a/examples/cookies-to-headers/rhai/Cargo.toml b/examples/cookies-to-headers/rhai/Cargo.toml index 8902489130..b37a076332 100644 --- a/examples/cookies-to-headers/rhai/Cargo.toml +++ b/examples/cookies-to-headers/rhai/Cargo.toml @@ -7,7 +7,7 @@ edition = "2021" [dependencies] anyhow = "1" apollo-router = { path = "../../../apollo-router" } -http = "0.2" +http = "1.2" serde_json = "1" tokio = { version = "1", features = ["full"] } -tower = { version = "0.4", features = ["full"] } +tower = { version = "0.5", features = ["full"] } diff --git a/examples/coprocessor-subgraph/rust/Cargo.toml b/examples/coprocessor-subgraph/rust/Cargo.toml index 342a094553..0e8ddb7c6f 100644 --- a/examples/coprocessor-subgraph/rust/Cargo.toml +++ b/examples/coprocessor-subgraph/rust/Cargo.toml @@ -8,13 +8,15 @@ edition = "2021" anyhow = "1" apollo-router = { path = "../../../apollo-router" } async-trait = "0.1" +bytes = "1.6.0" futures = "0.3" -http = "0.2.8" -hyper = "0.14.23" +http = "1.2.0" +http-body-util = "0.1.2" +hyper = "1.5.1" multimap = "0.9.0" schemars = { version = "0.8", features = ["url"] } serde = "1" serde_json = "1" tokio = { version = "1", features = ["full"] } -tower = { version = "0.4", features = ["full"] } +tower = { version = "0.5", features = ["full"] } tracing = "0.1" diff --git a/examples/coprocessor-subgraph/rust/src/echo_co_processor.rs b/examples/coprocessor-subgraph/rust/src/echo_co_processor.rs index 594c0d2990..5fa7cd3a30 100644 --- a/examples/coprocessor-subgraph/rust/src/echo_co_processor.rs +++ b/examples/coprocessor-subgraph/rust/src/echo_co_processor.rs @@ -6,8 +6,11 @@ use apollo_router::register_plugin; use apollo_router::services::router; use apollo_router::Endpoint; use apollo_router::ListenAddr; +use bytes::Bytes; use futures::future::BoxFuture; use http::StatusCode; +use http_body_util::BodyExt; +use http_body_util::Full; use multimap::MultiMap; use schemars::JsonSchema; use serde::Deserialize; @@ -83,7 +86,7 @@ impl Service for SimpleEndpoint { let fut = async move { let body = req.router_request.into_body(); - let body = hyper::body::to_bytes(body).await.unwrap(); + let body = body.collect().await.unwrap().to_bytes(); let mut json_body: serde_json::Value = serde_json::from_slice(&body).unwrap(); tracing::info!("✉️ got payload:"); @@ -145,7 +148,11 @@ impl Service for SimpleEndpoint { // return the modified payload let http_response = http::Response::builder() .status(StatusCode::OK) - .body(hyper::Body::from(serde_json::to_vec(&json_body).unwrap())) + .body( + Full::new(Bytes::from(serde_json::to_vec(&json_body).unwrap())) + .map_err(|_never| "there is an error") + .boxed_unsync(), + ) .unwrap(); let mut router_response = router::Response::from(http_response); router_response.context = req.context; diff --git a/examples/data-response-mutate/rhai/Cargo.toml b/examples/data-response-mutate/rhai/Cargo.toml index 789dc3894c..6fcebf8b6b 100644 --- a/examples/data-response-mutate/rhai/Cargo.toml +++ b/examples/data-response-mutate/rhai/Cargo.toml @@ -7,7 +7,7 @@ edition = "2021" [dependencies] anyhow = "1" apollo-router = { path = "../../../apollo-router" } -http = "0.2" +http = "1.2" serde_json = "1" tokio = { version = "1", features = ["full"] } -tower = { version = "0.4", features = ["full"] } +tower = { version = "0.5", features = ["full"] } diff --git a/examples/error-response-mutate/rhai/Cargo.toml b/examples/error-response-mutate/rhai/Cargo.toml index eb8271ca13..3a413d69b3 100644 --- a/examples/error-response-mutate/rhai/Cargo.toml +++ b/examples/error-response-mutate/rhai/Cargo.toml @@ -7,7 +7,7 @@ edition = "2021" [dependencies] anyhow = "1" apollo-router = { path = "../../../apollo-router" } -http = "0.2" +http = "1.2" serde_json = "1" tokio = { version = "1", features = ["full"] } -tower = { version = "0.4", features = ["full"] } +tower = { version = "0.5", features = ["full"] } diff --git a/examples/forbid-anonymous-operations/rhai/Cargo.toml b/examples/forbid-anonymous-operations/rhai/Cargo.toml index f39649e681..1ce0fc5013 100644 --- a/examples/forbid-anonymous-operations/rhai/Cargo.toml +++ b/examples/forbid-anonymous-operations/rhai/Cargo.toml @@ -7,7 +7,7 @@ edition = "2021" [dependencies] anyhow = "1" apollo-router = { path = "../../../apollo-router" } -http = "0.2" +http = "1.2" serde_json = "1" tokio = { version = "1", features = ["full"] } -tower = { version = "0.4", features = ["full"] } +tower = { version = "0.5", features = ["full"] } diff --git a/examples/forbid-anonymous-operations/rust/Cargo.toml b/examples/forbid-anonymous-operations/rust/Cargo.toml index 15c9ef4997..8ba671e150 100644 --- a/examples/forbid-anonymous-operations/rust/Cargo.toml +++ b/examples/forbid-anonymous-operations/rust/Cargo.toml @@ -8,8 +8,8 @@ edition = "2021" anyhow = "1" apollo-router = { path = "../../../apollo-router" } async-trait = "0.1" -http = "0.2" +http = "1.2" serde_json = "1" tokio = { version = "1", features = ["full"] } -tower = { version = "0.4", features = ["full"] } +tower = { version = "0.5", features = ["full"] } tracing = "0.1" diff --git a/examples/hello-world/rust/Cargo.toml b/examples/hello-world/rust/Cargo.toml index c631098e34..7a4e28bfba 100644 --- a/examples/hello-world/rust/Cargo.toml +++ b/examples/hello-world/rust/Cargo.toml @@ -12,5 +12,5 @@ schemars = { version = "0.8", features = ["url"] } serde = "1" serde_json = "1" tokio = { version = "1", features = ["full"] } -tower = { version = "0.4", features = ["full"] } +tower = { version = "0.5", features = ["full"] } tracing = "0.1" diff --git a/examples/jwt-claims/rhai/Cargo.toml b/examples/jwt-claims/rhai/Cargo.toml index 8d10a41f70..b827715542 100644 --- a/examples/jwt-claims/rhai/Cargo.toml +++ b/examples/jwt-claims/rhai/Cargo.toml @@ -7,7 +7,7 @@ edition = "2021" [dependencies] anyhow = "1" apollo-router = { path = "../../../apollo-router" } -http = "0.2" +http = "1.2" serde_json = "1" tokio = { version = "1", features = ["full"] } -tower = { version = "0.4", features = ["full"] } +tower = { version = "0.5", features = ["full"] } diff --git a/examples/logging/rhai/Cargo.toml b/examples/logging/rhai/Cargo.toml index a281c2a94f..d39656eba3 100644 --- a/examples/logging/rhai/Cargo.toml +++ b/examples/logging/rhai/Cargo.toml @@ -7,7 +7,7 @@ edition = "2021" [dependencies] anyhow = "1" apollo-router = { path = "../../../apollo-router" } -http = "0.2" +http = "1.2" serde_json = "1" tokio = { version = "1.17", features = ["full"] } -tower = { version = "0.4", features = ["full"] } +tower = { version = "0.5", features = ["full"] } diff --git a/examples/op-name-to-header/rhai/Cargo.toml b/examples/op-name-to-header/rhai/Cargo.toml index 823a01d25d..ace78c7426 100644 --- a/examples/op-name-to-header/rhai/Cargo.toml +++ b/examples/op-name-to-header/rhai/Cargo.toml @@ -7,7 +7,7 @@ edition = "2021" [dependencies] anyhow = "1" apollo-router = { path = "../../../apollo-router" } -http = "0.2" +http = "1.2" serde_json = "1" tokio = { version = "1", features = ["full"] } -tower = { version = "0.4", features = ["full"] } +tower = { version = "0.5", features = ["full"] } diff --git a/examples/status-code-propagation/rust/Cargo.toml b/examples/status-code-propagation/rust/Cargo.toml index c396718afe..10beba56c1 100644 --- a/examples/status-code-propagation/rust/Cargo.toml +++ b/examples/status-code-propagation/rust/Cargo.toml @@ -8,9 +8,9 @@ edition = "2021" anyhow = "1" apollo-router = { path = "../../../apollo-router" } async-trait = "0.1" -http = "0.2" +http = "1.2" schemars = { version = "0.8", features = ["url"] } serde = "1" serde_json = "1" tokio = { version = "1", features = ["full"] } -tower = { version = "0.4", features = ["full"] } +tower = { version = "0.5", features = ["full"] } diff --git a/examples/subgraph-request-log/rhai/Cargo.toml b/examples/subgraph-request-log/rhai/Cargo.toml index 51bb198014..d22615b6f4 100644 --- a/examples/subgraph-request-log/rhai/Cargo.toml +++ b/examples/subgraph-request-log/rhai/Cargo.toml @@ -7,7 +7,7 @@ edition = "2021" [dependencies] anyhow = "1" apollo-router = { path = "../../../apollo-router" } -http = "0.2" +http = "1.2" serde_json = "1" tokio = { version = "1", features = ["full"] } -tower = { version = "0.4", features = ["full"] } +tower = { version = "0.5", features = ["full"] } diff --git a/examples/supergraph-sdl/rust/Cargo.toml b/examples/supergraph-sdl/rust/Cargo.toml index 49157427b5..4e7bfb1e94 100644 --- a/examples/supergraph-sdl/rust/Cargo.toml +++ b/examples/supergraph-sdl/rust/Cargo.toml @@ -8,5 +8,5 @@ anyhow = "1" apollo-compiler = "1.25.0" apollo-router = { path = "../../../apollo-router" } async-trait = "0.1" -tower = { version = "0.4", features = ["full"] } +tower = { version = "0.5", features = ["full"] } tracing = "0.1" diff --git a/examples/surrogate-cache-key/rhai/Cargo.toml b/examples/surrogate-cache-key/rhai/Cargo.toml index f1df802b90..6b13520dc7 100644 --- a/examples/surrogate-cache-key/rhai/Cargo.toml +++ b/examples/surrogate-cache-key/rhai/Cargo.toml @@ -7,7 +7,7 @@ edition = "2021" [dependencies] anyhow = "1" apollo-router = { path = "../../../apollo-router" } -http = "0.2" +http = "1.2" serde_json = "1" tokio = { version = "1", features = ["full"] } -tower = { version = "0.4", features = ["full"] } +tower = { version = "0.5", features = ["full"] } diff --git a/examples/throw-error/rhai/Cargo.toml b/examples/throw-error/rhai/Cargo.toml index 0ecd75ce83..d648aebde3 100644 --- a/examples/throw-error/rhai/Cargo.toml +++ b/examples/throw-error/rhai/Cargo.toml @@ -7,10 +7,11 @@ edition = "2021" [dependencies] anyhow = "1" apollo-router = { path = "../../../apollo-router" } -http = "0.2" +http = "1.2" +http-body-util = "0.1.2" serde_json = "1" tokio = { version = "1", features = ["full"] } -tower = { version = "0.4", features = ["full"] } +tower = { version = "0.5", features = ["full"] } [dev-dependencies] hyper = "0.14.24" diff --git a/examples/throw-error/rhai/src/main.rs b/examples/throw-error/rhai/src/main.rs index 900ae48273..e0fa3a48d1 100644 --- a/examples/throw-error/rhai/src/main.rs +++ b/examples/throw-error/rhai/src/main.rs @@ -15,6 +15,7 @@ mod tests { use apollo_router::services::supergraph; use apollo_router::Context; use http::StatusCode; + use http_body_util::BodyExt; use serde_json::json; use tower::ServiceExt; @@ -64,9 +65,13 @@ mod tests { .expect("a router response"); assert_eq!(StatusCode::UNAUTHORIZED, service_response.response.status()); - let body = hyper::body::to_bytes(service_response.response) + let body = service_response + .response + .into_body() + .collect() .await - .unwrap(); + .unwrap() + .to_bytes(); assert_eq!( expected_response, serde_json::from_slice::(&body).unwrap() diff --git a/fuzz/Cargo.toml b/fuzz/Cargo.toml index 3b73906d4a..31dee1518a 100644 --- a/fuzz/Cargo.toml +++ b/fuzz/Cargo.toml @@ -10,7 +10,7 @@ edition = "2018" cargo-fuzz = true [dependencies] -libfuzzer-sys = "0.4" +libfuzzer-sys = "=0.4.7" apollo-parser.workspace = true apollo-smith.workspace = true env_logger = "0.11.0" diff --git a/helm/chart/router/Chart.yaml b/helm/chart/router/Chart.yaml index 3302fa15fb..2ea08b0716 100644 --- a/helm/chart/router/Chart.yaml +++ b/helm/chart/router/Chart.yaml @@ -20,10 +20,10 @@ type: application # so it matches the shape of our release process and release automation. # By proxy of that decision, this version uses SemVer 2.0.0, though the prefix # of "v" is not included. -version: 1.59.1 +version: 2.0.0-preview.4 # This is the version number of the application being deployed. This version number should be # incremented each time you make changes to the application. Versions are not expected to # follow Semantic Versioning. They should reflect the version the application is using. # It is recommended to use it with quotes. -appVersion: "v1.59.1" +appVersion: "v2.0.0-preview.4" diff --git a/helm/chart/router/README.md b/helm/chart/router/README.md index 705a48e567..fedd4ff148 100644 --- a/helm/chart/router/README.md +++ b/helm/chart/router/README.md @@ -2,7 +2,7 @@ [router](https://github.com/apollographql/router) Rust Graph Routing runtime for Apollo Federation -![Version: 1.59.1](https://img.shields.io/badge/Version-1.59.1-informational?style=flat-square) ![Type: application](https://img.shields.io/badge/Type-application-informational?style=flat-square) ![AppVersion: v1.59.1](https://img.shields.io/badge/AppVersion-v1.59.1-informational?style=flat-square) +![Version: 2.0.0-preview.4](https://img.shields.io/badge/Version-2.0.0--preview.4-informational?style=flat-square) ![Type: application](https://img.shields.io/badge/Type-application-informational?style=flat-square) ![AppVersion: v2.0.0-preview.4](https://img.shields.io/badge/AppVersion-v2.0.0--preview.4-informational?style=flat-square) ## Prerequisites @@ -11,7 +11,7 @@ ## Get Repo Info ```console -helm pull oci://ghcr.io/apollographql/helm-charts/router --version 1.59.1 +helm pull oci://ghcr.io/apollographql/helm-charts/router --version 2.0.0-preview.4 ``` ## Install Chart @@ -19,7 +19,7 @@ helm pull oci://ghcr.io/apollographql/helm-charts/router --version 1.59.1 **Important:** only helm3 is supported ```console -helm upgrade --install [RELEASE_NAME] oci://ghcr.io/apollographql/helm-charts/router --version 1.59.1 --values my-values.yaml +helm upgrade --install [RELEASE_NAME] oci://ghcr.io/apollographql/helm-charts/router --version 2.0.0-preview.4 --values my-values.yaml ``` _See [configuration](#configuration) below._ diff --git a/licenses.html b/licenses.html index 208b06d19a..071b9bf4d7 100644 --- a/licenses.html +++ b/licenses.html @@ -44,17 +44,16 @@

Third Party Licenses

Overview of licenses:

@@ -66,14 +65,12 @@

Used by:

  • aws-config
  • aws-credential-types
  • -
  • aws-runtime
  • -
  • aws-sigv4
  • aws-smithy-async
  • aws-smithy-http
  • aws-smithy-json
  • aws-smithy-query
  • -
  • aws-smithy-runtime
  • aws-smithy-runtime-api
  • +
  • aws-smithy-runtime
  • aws-smithy-types
  • aws-smithy-xml
  • aws-types
  • @@ -260,10 +257,10 @@

    Apache License 2.0

    Used by:

    • miniz_oxide
    • -
    • pin-project
    • pin-project-internal
    • pin-project-lite
    • -
    • portable-atomic
    • +
    • pin-project
    • +
    • sync_wrapper
    • sync_wrapper
    • zstd-safe
    • zstd-sys
    • @@ -1292,8 +1289,8 @@

      Used by:

      Apache License 2.0

      Used by:

                                        Apache License
      @@ -1507,6 +1504,7 @@ 

      Used by:

    • encode_unicode
    • encoding_rs
    • fragile
    • +
    • iri-string
    • static_assertions
    • tinyvec
    • utf16_iter
    • @@ -1953,12 +1951,262 @@

      Used by:

      Apache License 2.0

      Used by:

      -
      -                                 Apache License
      +                
      +                                 Apache License
      +                           Version 2.0, January 2004
      +                        http://www.apache.org/licenses/
      +
      +   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
      +
      +   1. Definitions.
      +
      +      "License" shall mean the terms and conditions for use, reproduction,
      +      and distribution as defined by Sections 1 through 9 of this document.
      +
      +      "Licensor" shall mean the copyright owner or entity authorized by
      +      the copyright owner that is granting the License.
      +
      +      "Legal Entity" shall mean the union of the acting entity and all
      +      other entities that control, are controlled by, or are under common
      +      control with that entity. For the purposes of this definition,
      +      "control" means (i) the power, direct or indirect, to cause the
      +      direction or management of such entity, whether by contract or
      +      otherwise, or (ii) ownership of fifty percent (50%) or more of the
      +      outstanding shares, or (iii) beneficial ownership of such entity.
      +
      +      "You" (or "Your") shall mean an individual or Legal Entity
      +      exercising permissions granted by this License.
      +
      +      "Source" form shall mean the preferred form for making modifications,
      +      including but not limited to software source code, documentation
      +      source, and configuration files.
      +
      +      "Object" form shall mean any form resulting from mechanical
      +      transformation or translation of a Source form, including but
      +      not limited to compiled object code, generated documentation,
      +      and conversions to other media types.
      +
      +      "Work" shall mean the work of authorship, whether in Source or
      +      Object form, made available under the License, as indicated by a
      +      copyright notice that is included in or attached to the work
      +      (an example is provided in the Appendix below).
      +
      +      "Derivative Works" shall mean any work, whether in Source or Object
      +      form, that is based on (or derived from) the Work and for which the
      +      editorial revisions, annotations, elaborations, or other modifications
      +      represent, as a whole, an original work of authorship. For the purposes
      +      of this License, Derivative Works shall not include works that remain
      +      separable from, or merely link (or bind by name) to the interfaces of,
      +      the Work and Derivative Works thereof.
      +
      +      "Contribution" shall mean any work of authorship, including
      +      the original version of the Work and any modifications or additions
      +      to that Work or Derivative Works thereof, that is intentionally
      +      submitted to Licensor for inclusion in the Work by the copyright owner
      +      or by an individual or Legal Entity authorized to submit on behalf of
      +      the copyright owner. For the purposes of this definition, "submitted"
      +      means any form of electronic, verbal, or written communication sent
      +      to the Licensor or its representatives, including but not limited to
      +      communication on electronic mailing lists, source code control systems,
      +      and issue tracking systems that are managed by, or on behalf of, the
      +      Licensor for the purpose of discussing and improving the Work, but
      +      excluding communication that is conspicuously marked or otherwise
      +      designated in writing by the copyright owner as "Not a Contribution."
      +
      +      "Contributor" shall mean Licensor and any individual or Legal Entity
      +      on behalf of whom a Contribution has been received by Licensor and
      +      subsequently incorporated within the Work.
      +
      +   2. Grant of Copyright License. Subject to the terms and conditions of
      +      this License, each Contributor hereby grants to You a perpetual,
      +      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
      +      copyright license to reproduce, prepare Derivative Works of,
      +      publicly display, publicly perform, sublicense, and distribute the
      +      Work and such Derivative Works in Source or Object form.
      +
      +   3. Grant of Patent License. Subject to the terms and conditions of
      +      this License, each Contributor hereby grants to You a perpetual,
      +      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
      +      (except as stated in this section) patent license to make, have made,
      +      use, offer to sell, sell, import, and otherwise transfer the Work,
      +      where such license applies only to those patent claims licensable
      +      by such Contributor that are necessarily infringed by their
      +      Contribution(s) alone or by combination of their Contribution(s)
      +      with the Work to which such Contribution(s) was submitted. If You
      +      institute patent litigation against any entity (including a
      +      cross-claim or counterclaim in a lawsuit) alleging that the Work
      +      or a Contribution incorporated within the Work constitutes direct
      +      or contributory patent infringement, then any patent licenses
      +      granted to You under this License for that Work shall terminate
      +      as of the date such litigation is filed.
      +
      +   4. Redistribution. You may reproduce and distribute copies of the
      +      Work or Derivative Works thereof in any medium, with or without
      +      modifications, and in Source or Object form, provided that You
      +      meet the following conditions:
      +
      +      (a) You must give any other recipients of the Work or
      +          Derivative Works a copy of this License; and
      +
      +      (b) You must cause any modified files to carry prominent notices
      +          stating that You changed the files; and
      +
      +      (c) You must retain, in the Source form of any Derivative Works
      +          that You distribute, all copyright, patent, trademark, and
      +          attribution notices from the Source form of the Work,
      +          excluding those notices that do not pertain to any part of
      +          the Derivative Works; and
      +
      +      (d) If the Work includes a "NOTICE" text file as part of its
      +          distribution, then any Derivative Works that You distribute must
      +          include a readable copy of the attribution notices contained
      +          within such NOTICE file, excluding those notices that do not
      +          pertain to any part of the Derivative Works, in at least one
      +          of the following places: within a NOTICE text file distributed
      +          as part of the Derivative Works; within the Source form or
      +          documentation, if provided along with the Derivative Works; or,
      +          within a display generated by the Derivative Works, if and
      +          wherever such third-party notices normally appear. The contents
      +          of the NOTICE file are for informational purposes only and
      +          do not modify the License. You may add Your own attribution
      +          notices within Derivative Works that You distribute, alongside
      +          or as an addendum to the NOTICE text from the Work, provided
      +          that such additional attribution notices cannot be construed
      +          as modifying the License.
      +
      +      You may add Your own copyright statement to Your modifications and
      +      may provide additional or different license terms and conditions
      +      for use, reproduction, or distribution of Your modifications, or
      +      for any such Derivative Works as a whole, provided Your use,
      +      reproduction, and distribution of the Work otherwise complies with
      +      the conditions stated in this License.
      +
      +   5. Submission of Contributions. Unless You explicitly state otherwise,
      +      any Contribution intentionally submitted for inclusion in the Work
      +      by You to the Licensor shall be under the terms and conditions of
      +      this License, without any additional terms or conditions.
      +      Notwithstanding the above, nothing herein shall supersede or modify
      +      the terms of any separate license agreement you may have executed
      +      with Licensor regarding such Contributions.
      +
      +   6. Trademarks. This License does not grant permission to use the trade
      +      names, trademarks, service marks, or product names of the Licensor,
      +      except as required for reasonable and customary use in describing the
      +      origin of the Work and reproducing the content of the NOTICE file.
      +
      +   7. Disclaimer of Warranty. Unless required by applicable law or
      +      agreed to in writing, Licensor provides the Work (and each
      +      Contributor provides its Contributions) on an "AS IS" BASIS,
      +      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
      +      implied, including, without limitation, any warranties or conditions
      +      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
      +      PARTICULAR PURPOSE. You are solely responsible for determining the
      +      appropriateness of using or redistributing the Work and assume any
      +      risks associated with Your exercise of permissions under this License.
      +
      +   8. Limitation of Liability. In no event and under no legal theory,
      +      whether in tort (including negligence), contract, or otherwise,
      +      unless required by applicable law (such as deliberate and grossly
      +      negligent acts) or agreed to in writing, shall any Contributor be
      +      liable to You for damages, including any direct, indirect, special,
      +      incidental, or consequential damages of any character arising as a
      +      result of this License or out of the use or inability to use the
      +      Work (including but not limited to damages for loss of goodwill,
      +      work stoppage, computer failure or malfunction, or any and all
      +      other commercial damages or losses), even if such Contributor
      +      has been advised of the possibility of such damages.
      +
      +   9. Accepting Warranty or Additional Liability. While redistributing
      +      the Work or Derivative Works thereof, You may choose to offer,
      +      and charge a fee for, acceptance of support, warranty, indemnity,
      +      or other liability obligations and/or rights consistent with this
      +      License. However, in accepting such obligations, You may act only
      +      on Your own behalf and on Your sole responsibility, not on behalf
      +      of any other Contributor, and only if You agree to indemnify,
      +      defend, and hold each Contributor harmless for any liability
      +      incurred by, or claims asserted against, such Contributor by reason
      +      of your accepting any such warranty or additional liability.
      +
      +   END OF TERMS AND CONDITIONS
      +
      +   APPENDIX: How to apply the Apache License to your work.
      +
      +      To apply the Apache License to your work, attach the following
      +      boilerplate notice, with the fields enclosed by brackets "[]"
      +      replaced with your own identifying information. (Don't include
      +      the brackets!)  The text should be enclosed in the appropriate
      +      comment syntax for the file format. We also recommend that a
      +      file or class name and description of purpose be included on the
      +      same "printed page" as the copyright notice for easier
      +      identification within third-party archives.
      +
      +   Copyright [yyyy] [name of copyright owner]
      +
      +   Licensed under the Apache License, Version 2.0 (the "License");
      +   you may not use this file except in compliance with the License.
      +   You may obtain a copy of the License at
      +
      +       http://www.apache.org/licenses/LICENSE-2.0
      +
      +   Unless required by applicable law or agreed to in writing, software
      +   distributed under the License is distributed on an "AS IS" BASIS,
      +   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
      +   See the License for the specific language governing permissions and
      +   limitations under the License.
      +
      + +
    • +

      Apache License 2.0

      +

      Used by:

      + +
                                       Apache License
                                  Version 2.0, January 2004
      -                        https://www.apache.org/licenses/
      +                        http://www.apache.org/licenses/
       
          TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
       
      @@ -2132,18 +2380,42 @@ 

      Used by:

      of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS -
      -
    • -
    • -

      Apache License 2.0

      -

      Used by:

      - -
                                       Apache License
      -                           Version 2.0, January 2004
      -                        http://www.apache.org/licenses/
      +
      +   APPENDIX: How to apply the Apache License to your work.
      +
      +      To apply the Apache License to your work, attach the following
      +      boilerplate notice, with the fields enclosed by brackets "[]"
      +      replaced with your own identifying information. (Don't include
      +      the brackets!)  The text should be enclosed in the appropriate
      +      comment syntax for the file format. We also recommend that a
      +      file or class name and description of purpose be included on the
      +      same "printed page" as the copyright notice for easier
      +      identification within third-party archives.
      +
      +   Copyright (c) Microsoft Corporation.
      +
      +   Licensed under the Apache License, Version 2.0 (the "License");
      +   you may not use this file except in compliance with the License.
      +   You may obtain a copy of the License at
      +
      +       http://www.apache.org/licenses/LICENSE-2.0
      +
      +   Unless required by applicable law or agreed to in writing, software
      +   distributed under the License is distributed on an "AS IS" BASIS,
      +   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
      +   See the License for the specific language governing permissions and
      +   limitations under the License.
      +
      +
    • +
    • +

      Apache License 2.0

      +

      Used by:

      + +
                                       Apache License
      +                           Version 2.0, January 2004
      +                        http://www.apache.org/licenses/
       
          TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
       
      @@ -2317,52 +2589,40 @@ 

      Used by:

      of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright 2020 Tomasz "Soveu" Marx + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. +
    • Apache License 2.0

      Used by:

                                       Apache License
                                  Version 2.0, January 2004
      @@ -2552,7 +2812,7 @@ 

      Used by:

      same "printed page" as the copyright notice for easier identification within third-party archives. - Copyright (c) Microsoft Corporation. + Copyright 2023 The Fuchsia Authors Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -2565,13 +2825,24 @@

      Used by:

      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. +
    • Apache License 2.0

      Used by:

                                       Apache License
                                  Version 2.0, January 2004
      @@ -2761,7 +3032,7 @@ 

      Used by:

      same "printed page" as the copyright notice for easier identification within third-party archives. - Copyright 2020 Tomasz "Soveu" Marx + Copyright 2023 The OpenTelemetry Authors Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -2774,14 +3045,16 @@

      Used by:

      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. -
    • Apache License 2.0

      Used by:

                                       Apache License
                                  Version 2.0, January 2004
      @@ -2971,7 +3244,7 @@ 

      Used by:

      same "printed page" as the copyright notice for easier identification within third-party archives. - Copyright 2023 The Fuchsia Authors + Copyright [yyyy] [name of copyright owner] Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -2984,24 +3257,13 @@

      Used by:

      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. -
    • Apache License 2.0

      Used by:

                                       Apache License
                                  Version 2.0, January 2004
      @@ -3191,7 +3453,7 @@ 

      Used by:

      same "printed page" as the copyright notice for easier identification within third-party archives. - Copyright 2023 The OpenTelemetry Authors + Copyright [yyyy] [name of copyright owner] Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -3204,16 +3466,13 @@

      Used by:

      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. -
      +
    • Apache License 2.0

      Used by:

                                       Apache License
                                  Version 2.0, January 2004
      @@ -3395,7 +3654,7 @@ 

      Used by:

      APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" + boilerplate notice, with the fields enclosed by brackets "{}" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a @@ -3403,7 +3662,7 @@

      Used by:

      same "printed page" as the copyright notice for easier identification within third-party archives. - Copyright [yyyy] [name of copyright owner] + Copyright 2017 Juniper Networks, Inc. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -3422,7 +3681,8 @@

      Used by:

      Apache License 2.0

      Used by:

                                       Apache License
                                  Version 2.0, January 2004
      @@ -3604,7 +3864,7 @@ 

      Used by:

      APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" + boilerplate notice, with the fields enclosed by brackets "{}" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a @@ -3612,7 +3872,7 @@

      Used by:

      same "printed page" as the copyright notice for easier identification within third-party archives. - Copyright [yyyy] [name of copyright owner] + Copyright 2019 Michael P. Jung Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -3625,13 +3885,14 @@

      Used by:

      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. -
      + +
    • Apache License 2.0

      Used by:

                                       Apache License
                                  Version 2.0, January 2004
      @@ -3821,7 +4082,7 @@ 

      Used by:

      same "printed page" as the copyright notice for easier identification within third-party archives. - Copyright 2017 Juniper Networks, Inc. + Copyright 2019 TiKV Project Authors. Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -3840,8 +4101,13 @@

      Used by:

      Apache License 2.0

      Used by:

                                       Apache License
                                  Version 2.0, January 2004
      @@ -4031,7 +4297,7 @@ 

      Used by:

      same "printed page" as the copyright notice for easier identification within third-party archives. - Copyright 2019 Michael P. Jung + Copyright {yyyy} {name of copyright owner} Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -4044,14 +4310,34 @@

      Used by:

      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. -
    • Apache License 2.0

      Used by:

                                       Apache License
                                  Version 2.0, January 2004
      @@ -4241,7 +4527,7 @@ 

      Used by:

      same "printed page" as the copyright notice for easier identification within third-party archives. - Copyright 2019 TiKV Project Authors. + Copyright {yyyy} {name of copyright owner} Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -4254,17 +4540,14 @@

      Used by:

      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. +
    • Apache License 2.0

      Used by:

                                       Apache License
                                  Version 2.0, January 2004
      @@ -4443,18 +4726,8 @@ 

      Used by:

      END OF TERMS AND CONDITIONS - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "{}" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright {yyyy} {name of copyright owner} + Copyright 2019 Yoshua Wuyts + Copyright 2016-2018 Michael Tilli (Pyfisch) & `httpdate` contributors Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -4473,452 +4746,18 @@

      Used by:

      Apache License 2.0

      Used by:

                                       Apache License
                                  Version 2.0, January 2004
                               http://www.apache.org/licenses/
       
      -   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
      +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
       
      -   1. Definitions.
      +1.  Definitions.
       
      -      "License" shall mean the terms and conditions for use, reproduction,
      -      and distribution as defined by Sections 1 through 9 of this document.
      -
      -      "Licensor" shall mean the copyright owner or entity authorized by
      -      the copyright owner that is granting the License.
      -
      -      "Legal Entity" shall mean the union of the acting entity and all
      -      other entities that control, are controlled by, or are under common
      -      control with that entity. For the purposes of this definition,
      -      "control" means (i) the power, direct or indirect, to cause the
      -      direction or management of such entity, whether by contract or
      -      otherwise, or (ii) ownership of fifty percent (50%) or more of the
      -      outstanding shares, or (iii) beneficial ownership of such entity.
      -
      -      "You" (or "Your") shall mean an individual or Legal Entity
      -      exercising permissions granted by this License.
      -
      -      "Source" form shall mean the preferred form for making modifications,
      -      including but not limited to software source code, documentation
      -      source, and configuration files.
      -
      -      "Object" form shall mean any form resulting from mechanical
      -      transformation or translation of a Source form, including but
      -      not limited to compiled object code, generated documentation,
      -      and conversions to other media types.
      -
      -      "Work" shall mean the work of authorship, whether in Source or
      -      Object form, made available under the License, as indicated by a
      -      copyright notice that is included in or attached to the work
      -      (an example is provided in the Appendix below).
      -
      -      "Derivative Works" shall mean any work, whether in Source or Object
      -      form, that is based on (or derived from) the Work and for which the
      -      editorial revisions, annotations, elaborations, or other modifications
      -      represent, as a whole, an original work of authorship. For the purposes
      -      of this License, Derivative Works shall not include works that remain
      -      separable from, or merely link (or bind by name) to the interfaces of,
      -      the Work and Derivative Works thereof.
      -
      -      "Contribution" shall mean any work of authorship, including
      -      the original version of the Work and any modifications or additions
      -      to that Work or Derivative Works thereof, that is intentionally
      -      submitted to Licensor for inclusion in the Work by the copyright owner
      -      or by an individual or Legal Entity authorized to submit on behalf of
      -      the copyright owner. For the purposes of this definition, "submitted"
      -      means any form of electronic, verbal, or written communication sent
      -      to the Licensor or its representatives, including but not limited to
      -      communication on electronic mailing lists, source code control systems,
      -      and issue tracking systems that are managed by, or on behalf of, the
      -      Licensor for the purpose of discussing and improving the Work, but
      -      excluding communication that is conspicuously marked or otherwise
      -      designated in writing by the copyright owner as "Not a Contribution."
      -
      -      "Contributor" shall mean Licensor and any individual or Legal Entity
      -      on behalf of whom a Contribution has been received by Licensor and
      -      subsequently incorporated within the Work.
      -
      -   2. Grant of Copyright License. Subject to the terms and conditions of
      -      this License, each Contributor hereby grants to You a perpetual,
      -      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
      -      copyright license to reproduce, prepare Derivative Works of,
      -      publicly display, publicly perform, sublicense, and distribute the
      -      Work and such Derivative Works in Source or Object form.
      -
      -   3. Grant of Patent License. Subject to the terms and conditions of
      -      this License, each Contributor hereby grants to You a perpetual,
      -      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
      -      (except as stated in this section) patent license to make, have made,
      -      use, offer to sell, sell, import, and otherwise transfer the Work,
      -      where such license applies only to those patent claims licensable
      -      by such Contributor that are necessarily infringed by their
      -      Contribution(s) alone or by combination of their Contribution(s)
      -      with the Work to which such Contribution(s) was submitted. If You
      -      institute patent litigation against any entity (including a
      -      cross-claim or counterclaim in a lawsuit) alleging that the Work
      -      or a Contribution incorporated within the Work constitutes direct
      -      or contributory patent infringement, then any patent licenses
      -      granted to You under this License for that Work shall terminate
      -      as of the date such litigation is filed.
      -
      -   4. Redistribution. You may reproduce and distribute copies of the
      -      Work or Derivative Works thereof in any medium, with or without
      -      modifications, and in Source or Object form, provided that You
      -      meet the following conditions:
      -
      -      (a) You must give any other recipients of the Work or
      -          Derivative Works a copy of this License; and
      -
      -      (b) You must cause any modified files to carry prominent notices
      -          stating that You changed the files; and
      -
      -      (c) You must retain, in the Source form of any Derivative Works
      -          that You distribute, all copyright, patent, trademark, and
      -          attribution notices from the Source form of the Work,
      -          excluding those notices that do not pertain to any part of
      -          the Derivative Works; and
      -
      -      (d) If the Work includes a "NOTICE" text file as part of its
      -          distribution, then any Derivative Works that You distribute must
      -          include a readable copy of the attribution notices contained
      -          within such NOTICE file, excluding those notices that do not
      -          pertain to any part of the Derivative Works, in at least one
      -          of the following places: within a NOTICE text file distributed
      -          as part of the Derivative Works; within the Source form or
      -          documentation, if provided along with the Derivative Works; or,
      -          within a display generated by the Derivative Works, if and
      -          wherever such third-party notices normally appear. The contents
      -          of the NOTICE file are for informational purposes only and
      -          do not modify the License. You may add Your own attribution
      -          notices within Derivative Works that You distribute, alongside
      -          or as an addendum to the NOTICE text from the Work, provided
      -          that such additional attribution notices cannot be construed
      -          as modifying the License.
      -
      -      You may add Your own copyright statement to Your modifications and
      -      may provide additional or different license terms and conditions
      -      for use, reproduction, or distribution of Your modifications, or
      -      for any such Derivative Works as a whole, provided Your use,
      -      reproduction, and distribution of the Work otherwise complies with
      -      the conditions stated in this License.
      -
      -   5. Submission of Contributions. Unless You explicitly state otherwise,
      -      any Contribution intentionally submitted for inclusion in the Work
      -      by You to the Licensor shall be under the terms and conditions of
      -      this License, without any additional terms or conditions.
      -      Notwithstanding the above, nothing herein shall supersede or modify
      -      the terms of any separate license agreement you may have executed
      -      with Licensor regarding such Contributions.
      -
      -   6. Trademarks. This License does not grant permission to use the trade
      -      names, trademarks, service marks, or product names of the Licensor,
      -      except as required for reasonable and customary use in describing the
      -      origin of the Work and reproducing the content of the NOTICE file.
      -
      -   7. Disclaimer of Warranty. Unless required by applicable law or
      -      agreed to in writing, Licensor provides the Work (and each
      -      Contributor provides its Contributions) on an "AS IS" BASIS,
      -      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
      -      implied, including, without limitation, any warranties or conditions
      -      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
      -      PARTICULAR PURPOSE. You are solely responsible for determining the
      -      appropriateness of using or redistributing the Work and assume any
      -      risks associated with Your exercise of permissions under this License.
      -
      -   8. Limitation of Liability. In no event and under no legal theory,
      -      whether in tort (including negligence), contract, or otherwise,
      -      unless required by applicable law (such as deliberate and grossly
      -      negligent acts) or agreed to in writing, shall any Contributor be
      -      liable to You for damages, including any direct, indirect, special,
      -      incidental, or consequential damages of any character arising as a
      -      result of this License or out of the use or inability to use the
      -      Work (including but not limited to damages for loss of goodwill,
      -      work stoppage, computer failure or malfunction, or any and all
      -      other commercial damages or losses), even if such Contributor
      -      has been advised of the possibility of such damages.
      -
      -   9. Accepting Warranty or Additional Liability. While redistributing
      -      the Work or Derivative Works thereof, You may choose to offer,
      -      and charge a fee for, acceptance of support, warranty, indemnity,
      -      or other liability obligations and/or rights consistent with this
      -      License. However, in accepting such obligations, You may act only
      -      on Your own behalf and on Your sole responsibility, not on behalf
      -      of any other Contributor, and only if You agree to indemnify,
      -      defend, and hold each Contributor harmless for any liability
      -      incurred by, or claims asserted against, such Contributor by reason
      -      of your accepting any such warranty or additional liability.
      -
      -   END OF TERMS AND CONDITIONS
      -
      -   APPENDIX: How to apply the Apache License to your work.
      -
      -      To apply the Apache License to your work, attach the following
      -      boilerplate notice, with the fields enclosed by brackets "{}"
      -      replaced with your own identifying information. (Don't include
      -      the brackets!)  The text should be enclosed in the appropriate
      -      comment syntax for the file format. We also recommend that a
      -      file or class name and description of purpose be included on the
      -      same "printed page" as the copyright notice for easier
      -      identification within third-party archives.
      -
      -   Copyright {yyyy} {name of copyright owner}
      -
      -   Licensed under the Apache License, Version 2.0 (the "License");
      -   you may not use this file except in compliance with the License.
      -   You may obtain a copy of the License at
      -
      -       http://www.apache.org/licenses/LICENSE-2.0
      -
      -   Unless required by applicable law or agreed to in writing, software
      -   distributed under the License is distributed on an "AS IS" BASIS,
      -   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
      -   See the License for the specific language governing permissions and
      -   limitations under the License.
      -
      -
      -
    • -
    • -

      Apache License 2.0

      -

      Used by:

      - -
                                       Apache License
      -                           Version 2.0, January 2004
      -                        http://www.apache.org/licenses/
      -
      -   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
      -
      -   1. Definitions.
      -
      -      "License" shall mean the terms and conditions for use, reproduction,
      -      and distribution as defined by Sections 1 through 9 of this document.
      -
      -      "Licensor" shall mean the copyright owner or entity authorized by
      -      the copyright owner that is granting the License.
      -
      -      "Legal Entity" shall mean the union of the acting entity and all
      -      other entities that control, are controlled by, or are under common
      -      control with that entity. For the purposes of this definition,
      -      "control" means (i) the power, direct or indirect, to cause the
      -      direction or management of such entity, whether by contract or
      -      otherwise, or (ii) ownership of fifty percent (50%) or more of the
      -      outstanding shares, or (iii) beneficial ownership of such entity.
      -
      -      "You" (or "Your") shall mean an individual or Legal Entity
      -      exercising permissions granted by this License.
      -
      -      "Source" form shall mean the preferred form for making modifications,
      -      including but not limited to software source code, documentation
      -      source, and configuration files.
      -
      -      "Object" form shall mean any form resulting from mechanical
      -      transformation or translation of a Source form, including but
      -      not limited to compiled object code, generated documentation,
      -      and conversions to other media types.
      -
      -      "Work" shall mean the work of authorship, whether in Source or
      -      Object form, made available under the License, as indicated by a
      -      copyright notice that is included in or attached to the work
      -      (an example is provided in the Appendix below).
      -
      -      "Derivative Works" shall mean any work, whether in Source or Object
      -      form, that is based on (or derived from) the Work and for which the
      -      editorial revisions, annotations, elaborations, or other modifications
      -      represent, as a whole, an original work of authorship. For the purposes
      -      of this License, Derivative Works shall not include works that remain
      -      separable from, or merely link (or bind by name) to the interfaces of,
      -      the Work and Derivative Works thereof.
      -
      -      "Contribution" shall mean any work of authorship, including
      -      the original version of the Work and any modifications or additions
      -      to that Work or Derivative Works thereof, that is intentionally
      -      submitted to Licensor for inclusion in the Work by the copyright owner
      -      or by an individual or Legal Entity authorized to submit on behalf of
      -      the copyright owner. For the purposes of this definition, "submitted"
      -      means any form of electronic, verbal, or written communication sent
      -      to the Licensor or its representatives, including but not limited to
      -      communication on electronic mailing lists, source code control systems,
      -      and issue tracking systems that are managed by, or on behalf of, the
      -      Licensor for the purpose of discussing and improving the Work, but
      -      excluding communication that is conspicuously marked or otherwise
      -      designated in writing by the copyright owner as "Not a Contribution."
      -
      -      "Contributor" shall mean Licensor and any individual or Legal Entity
      -      on behalf of whom a Contribution has been received by Licensor and
      -      subsequently incorporated within the Work.
      -
      -   2. Grant of Copyright License. Subject to the terms and conditions of
      -      this License, each Contributor hereby grants to You a perpetual,
      -      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
      -      copyright license to reproduce, prepare Derivative Works of,
      -      publicly display, publicly perform, sublicense, and distribute the
      -      Work and such Derivative Works in Source or Object form.
      -
      -   3. Grant of Patent License. Subject to the terms and conditions of
      -      this License, each Contributor hereby grants to You a perpetual,
      -      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
      -      (except as stated in this section) patent license to make, have made,
      -      use, offer to sell, sell, import, and otherwise transfer the Work,
      -      where such license applies only to those patent claims licensable
      -      by such Contributor that are necessarily infringed by their
      -      Contribution(s) alone or by combination of their Contribution(s)
      -      with the Work to which such Contribution(s) was submitted. If You
      -      institute patent litigation against any entity (including a
      -      cross-claim or counterclaim in a lawsuit) alleging that the Work
      -      or a Contribution incorporated within the Work constitutes direct
      -      or contributory patent infringement, then any patent licenses
      -      granted to You under this License for that Work shall terminate
      -      as of the date such litigation is filed.
      -
      -   4. Redistribution. You may reproduce and distribute copies of the
      -      Work or Derivative Works thereof in any medium, with or without
      -      modifications, and in Source or Object form, provided that You
      -      meet the following conditions:
      -
      -      (a) You must give any other recipients of the Work or
      -          Derivative Works a copy of this License; and
      -
      -      (b) You must cause any modified files to carry prominent notices
      -          stating that You changed the files; and
      -
      -      (c) You must retain, in the Source form of any Derivative Works
      -          that You distribute, all copyright, patent, trademark, and
      -          attribution notices from the Source form of the Work,
      -          excluding those notices that do not pertain to any part of
      -          the Derivative Works; and
      -
      -      (d) If the Work includes a "NOTICE" text file as part of its
      -          distribution, then any Derivative Works that You distribute must
      -          include a readable copy of the attribution notices contained
      -          within such NOTICE file, excluding those notices that do not
      -          pertain to any part of the Derivative Works, in at least one
      -          of the following places: within a NOTICE text file distributed
      -          as part of the Derivative Works; within the Source form or
      -          documentation, if provided along with the Derivative Works; or,
      -          within a display generated by the Derivative Works, if and
      -          wherever such third-party notices normally appear. The contents
      -          of the NOTICE file are for informational purposes only and
      -          do not modify the License. You may add Your own attribution
      -          notices within Derivative Works that You distribute, alongside
      -          or as an addendum to the NOTICE text from the Work, provided
      -          that such additional attribution notices cannot be construed
      -          as modifying the License.
      -
      -      You may add Your own copyright statement to Your modifications and
      -      may provide additional or different license terms and conditions
      -      for use, reproduction, or distribution of Your modifications, or
      -      for any such Derivative Works as a whole, provided Your use,
      -      reproduction, and distribution of the Work otherwise complies with
      -      the conditions stated in this License.
      -
      -   5. Submission of Contributions. Unless You explicitly state otherwise,
      -      any Contribution intentionally submitted for inclusion in the Work
      -      by You to the Licensor shall be under the terms and conditions of
      -      this License, without any additional terms or conditions.
      -      Notwithstanding the above, nothing herein shall supersede or modify
      -      the terms of any separate license agreement you may have executed
      -      with Licensor regarding such Contributions.
      -
      -   6. Trademarks. This License does not grant permission to use the trade
      -      names, trademarks, service marks, or product names of the Licensor,
      -      except as required for reasonable and customary use in describing the
      -      origin of the Work and reproducing the content of the NOTICE file.
      -
      -   7. Disclaimer of Warranty. Unless required by applicable law or
      -      agreed to in writing, Licensor provides the Work (and each
      -      Contributor provides its Contributions) on an "AS IS" BASIS,
      -      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
      -      implied, including, without limitation, any warranties or conditions
      -      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
      -      PARTICULAR PURPOSE. You are solely responsible for determining the
      -      appropriateness of using or redistributing the Work and assume any
      -      risks associated with Your exercise of permissions under this License.
      -
      -   8. Limitation of Liability. In no event and under no legal theory,
      -      whether in tort (including negligence), contract, or otherwise,
      -      unless required by applicable law (such as deliberate and grossly
      -      negligent acts) or agreed to in writing, shall any Contributor be
      -      liable to You for damages, including any direct, indirect, special,
      -      incidental, or consequential damages of any character arising as a
      -      result of this License or out of the use or inability to use the
      -      Work (including but not limited to damages for loss of goodwill,
      -      work stoppage, computer failure or malfunction, or any and all
      -      other commercial damages or losses), even if such Contributor
      -      has been advised of the possibility of such damages.
      -
      -   9. Accepting Warranty or Additional Liability. While redistributing
      -      the Work or Derivative Works thereof, You may choose to offer,
      -      and charge a fee for, acceptance of support, warranty, indemnity,
      -      or other liability obligations and/or rights consistent with this
      -      License. However, in accepting such obligations, You may act only
      -      on Your own behalf and on Your sole responsibility, not on behalf
      -      of any other Contributor, and only if You agree to indemnify,
      -      defend, and hold each Contributor harmless for any liability
      -      incurred by, or claims asserted against, such Contributor by reason
      -      of your accepting any such warranty or additional liability.
      -
      -   END OF TERMS AND CONDITIONS
      -
      -   Copyright 2019 Yoshua Wuyts
      -   Copyright 2016-2018 Michael Tilli (Pyfisch) & `httpdate` contributors
      -
      -   Licensed under the Apache License, Version 2.0 (the "License");
      -   you may not use this file except in compliance with the License.
      -   You may obtain a copy of the License at
      -
      -       http://www.apache.org/licenses/LICENSE-2.0
      -
      -   Unless required by applicable law or agreed to in writing, software
      -   distributed under the License is distributed on an "AS IS" BASIS,
      -   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
      -   See the License for the specific language governing permissions and
      -   limitations under the License.
      -
      -
    • -
    • -

      Apache License 2.0

      -

      Used by:

      - -
                                       Apache License
      -                           Version 2.0, January 2004
      -                        http://www.apache.org/licenses/
      -
      -TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
      -
      -1.  Definitions.
      -
      -    "License" shall mean the terms and conditions for use, reproduction,
      -    and distribution as defined by Sections 1 through 9 of this document.
      +    "License" shall mean the terms and conditions for use, reproduction,
      +    and distribution as defined by Sections 1 through 9 of this document.
       
           "Licensor" shall mean the copyright owner or entity authorized by
           the copyright owner that is granting the License.
      @@ -5321,190 +5160,6 @@ 

      Used by:

      See the License for the specific language governing permissions and limitations under the License. -
      -
    • -
    • -

      Apache License 2.0

      -

      Used by:

      - -
                                       Apache License
      -                           Version 2.0, January 2004
      -                        http://www.apache.org/licenses/
      -
      -   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
      -
      -   1. Definitions.
      -
      -      "License" shall mean the terms and conditions for use, reproduction,
      -      and distribution as defined by Sections 1 through 9 of this document.
      -
      -      "Licensor" shall mean the copyright owner or entity authorized by
      -      the copyright owner that is granting the License.
      -
      -      "Legal Entity" shall mean the union of the acting entity and all
      -      other entities that control, are controlled by, or are under common
      -      control with that entity. For the purposes of this definition,
      -      "control" means (i) the power, direct or indirect, to cause the
      -      direction or management of such entity, whether by contract or
      -      otherwise, or (ii) ownership of fifty percent (50%) or more of the
      -      outstanding shares, or (iii) beneficial ownership of such entity.
      -
      -      "You" (or "Your") shall mean an individual or Legal Entity
      -      exercising permissions granted by this License.
      -
      -      "Source" form shall mean the preferred form for making modifications,
      -      including but not limited to software source code, documentation
      -      source, and configuration files.
      -
      -      "Object" form shall mean any form resulting from mechanical
      -      transformation or translation of a Source form, including but
      -      not limited to compiled object code, generated documentation,
      -      and conversions to other media types.
      -
      -      "Work" shall mean the work of authorship, whether in Source or
      -      Object form, made available under the License, as indicated by a
      -      copyright notice that is included in or attached to the work
      -      (an example is provided in the Appendix below).
      -
      -      "Derivative Works" shall mean any work, whether in Source or Object
      -      form, that is based on (or derived from) the Work and for which the
      -      editorial revisions, annotations, elaborations, or other modifications
      -      represent, as a whole, an original work of authorship. For the purposes
      -      of this License, Derivative Works shall not include works that remain
      -      separable from, or merely link (or bind by name) to the interfaces of,
      -      the Work and Derivative Works thereof.
      -
      -      "Contribution" shall mean any work of authorship, including
      -      the original version of the Work and any modifications or additions
      -      to that Work or Derivative Works thereof, that is intentionally
      -      submitted to Licensor for inclusion in the Work by the copyright owner
      -      or by an individual or Legal Entity authorized to submit on behalf of
      -      the copyright owner. For the purposes of this definition, "submitted"
      -      means any form of electronic, verbal, or written communication sent
      -      to the Licensor or its representatives, including but not limited to
      -      communication on electronic mailing lists, source code control systems,
      -      and issue tracking systems that are managed by, or on behalf of, the
      -      Licensor for the purpose of discussing and improving the Work, but
      -      excluding communication that is conspicuously marked or otherwise
      -      designated in writing by the copyright owner as "Not a Contribution."
      -
      -      "Contributor" shall mean Licensor and any individual or Legal Entity
      -      on behalf of whom a Contribution has been received by Licensor and
      -      subsequently incorporated within the Work.
      -
      -   2. Grant of Copyright License. Subject to the terms and conditions of
      -      this License, each Contributor hereby grants to You a perpetual,
      -      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
      -      copyright license to reproduce, prepare Derivative Works of,
      -      publicly display, publicly perform, sublicense, and distribute the
      -      Work and such Derivative Works in Source or Object form.
      -
      -   3. Grant of Patent License. Subject to the terms and conditions of
      -      this License, each Contributor hereby grants to You a perpetual,
      -      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
      -      (except as stated in this section) patent license to make, have made,
      -      use, offer to sell, sell, import, and otherwise transfer the Work,
      -      where such license applies only to those patent claims licensable
      -      by such Contributor that are necessarily infringed by their
      -      Contribution(s) alone or by combination of their Contribution(s)
      -      with the Work to which such Contribution(s) was submitted. If You
      -      institute patent litigation against any entity (including a
      -      cross-claim or counterclaim in a lawsuit) alleging that the Work
      -      or a Contribution incorporated within the Work constitutes direct
      -      or contributory patent infringement, then any patent licenses
      -      granted to You under this License for that Work shall terminate
      -      as of the date such litigation is filed.
      -
      -   4. Redistribution. You may reproduce and distribute copies of the
      -      Work or Derivative Works thereof in any medium, with or without
      -      modifications, and in Source or Object form, provided that You
      -      meet the following conditions:
      -
      -      (a) You must give any other recipients of the Work or
      -          Derivative Works a copy of this License; and
      -
      -      (b) You must cause any modified files to carry prominent notices
      -          stating that You changed the files; and
      -
      -      (c) You must retain, in the Source form of any Derivative Works
      -          that You distribute, all copyright, patent, trademark, and
      -          attribution notices from the Source form of the Work,
      -          excluding those notices that do not pertain to any part of
      -          the Derivative Works; and
      -
      -      (d) If the Work includes a "NOTICE" text file as part of its
      -          distribution, then any Derivative Works that You distribute must
      -          include a readable copy of the attribution notices contained
      -          within such NOTICE file, excluding those notices that do not
      -          pertain to any part of the Derivative Works, in at least one
      -          of the following places: within a NOTICE text file distributed
      -          as part of the Derivative Works; within the Source form or
      -          documentation, if provided along with the Derivative Works; or,
      -          within a display generated by the Derivative Works, if and
      -          wherever such third-party notices normally appear. The contents
      -          of the NOTICE file are for informational purposes only and
      -          do not modify the License. You may add Your own attribution
      -          notices within Derivative Works that You distribute, alongside
      -          or as an addendum to the NOTICE text from the Work, provided
      -          that such additional attribution notices cannot be construed
      -          as modifying the License.
      -
      -      You may add Your own copyright statement to Your modifications and
      -      may provide additional or different license terms and conditions
      -      for use, reproduction, or distribution of Your modifications, or
      -      for any such Derivative Works as a whole, provided Your use,
      -      reproduction, and distribution of the Work otherwise complies with
      -      the conditions stated in this License.
      -
      -   5. Submission of Contributions. Unless You explicitly state otherwise,
      -      any Contribution intentionally submitted for inclusion in the Work
      -      by You to the Licensor shall be under the terms and conditions of
      -      this License, without any additional terms or conditions.
      -      Notwithstanding the above, nothing herein shall supersede or modify
      -      the terms of any separate license agreement you may have executed
      -      with Licensor regarding such Contributions.
      -
      -   6. Trademarks. This License does not grant permission to use the trade
      -      names, trademarks, service marks, or product names of the Licensor,
      -      except as required for reasonable and customary use in describing the
      -      origin of the Work and reproducing the content of the NOTICE file.
      -
      -   7. Disclaimer of Warranty. Unless required by applicable law or
      -      agreed to in writing, Licensor provides the Work (and each
      -      Contributor provides its Contributions) on an "AS IS" BASIS,
      -      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
      -      implied, including, without limitation, any warranties or conditions
      -      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
      -      PARTICULAR PURPOSE. You are solely responsible for determining the
      -      appropriateness of using or redistributing the Work and assume any
      -      risks associated with Your exercise of permissions under this License.
      -
      -   8. Limitation of Liability. In no event and under no legal theory,
      -      whether in tort (including negligence), contract, or otherwise,
      -      unless required by applicable law (such as deliberate and grossly
      -      negligent acts) or agreed to in writing, shall any Contributor be
      -      liable to You for damages, including any direct, indirect, special,
      -      incidental, or consequential damages of any character arising as a
      -      result of this License or out of the use or inability to use the
      -      Work (including but not limited to damages for loss of goodwill,
      -      work stoppage, computer failure or malfunction, or any and all
      -      other commercial damages or losses), even if such Contributor
      -      has been advised of the possibility of such damages.
      -
      -   9. Accepting Warranty or Additional Liability. While redistributing
      -      the Work or Derivative Works thereof, You may choose to offer,
      -      and charge a fee for, acceptance of support, warranty, indemnity,
      -      or other liability obligations and/or rights consistent with this
      -      License. However, in accepting such obligations, You may act only
      -      on Your own behalf and on Your sole responsibility, not on behalf
      -      of any other Contributor, and only if You agree to indemnify,
      -      defend, and hold each Contributor harmless for any liability
      -      incurred by, or claims asserted against, such Contributor by reason
      -      of your accepting any such warranty or additional liability.
      -
      -   END OF TERMS AND CONDITIONS
       
    • @@ -5723,217 +5378,8 @@

      Used by:

      Apache License 2.0

      Used by:

      -
                                     Apache License
      -                         Version 2.0, January 2004
      -                      http://www.apache.org/licenses/
      -
      -TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
      -
      -1. Definitions.
      -
      -  "License" shall mean the terms and conditions for use, reproduction,
      -  and distribution as defined by Sections 1 through 9 of this document.
      -
      -  "Licensor" shall mean the copyright owner or entity authorized by
      -  the copyright owner that is granting the License.
      -
      -  "Legal Entity" shall mean the union of the acting entity and all
      -  other entities that control, are controlled by, or are under common
      -  control with that entity. For the purposes of this definition,
      -  "control" means (i) the power, direct or indirect, to cause the
      -  direction or management of such entity, whether by contract or
      -  otherwise, or (ii) ownership of fifty percent (50%) or more of the
      -  outstanding shares, or (iii) beneficial ownership of such entity.
      -
      -  "You" (or "Your") shall mean an individual or Legal Entity
      -  exercising permissions granted by this License.
      -
      -  "Source" form shall mean the preferred form for making modifications,
      -  including but not limited to software source code, documentation
      -  source, and configuration files.
      -
      -  "Object" form shall mean any form resulting from mechanical
      -  transformation or translation of a Source form, including but
      -  not limited to compiled object code, generated documentation,
      -  and conversions to other media types.
      -
      -  "Work" shall mean the work of authorship, whether in Source or
      -  Object form, made available under the License, as indicated by a
      -  copyright notice that is included in or attached to the work
      -  (an example is provided in the Appendix below).
      -
      -  "Derivative Works" shall mean any work, whether in Source or Object
      -  form, that is based on (or derived from) the Work and for which the
      -  editorial revisions, annotations, elaborations, or other modifications
      -  represent, as a whole, an original work of authorship. For the purposes
      -  of this License, Derivative Works shall not include works that remain
      -  separable from, or merely link (or bind by name) to the interfaces of,
      -  the Work and Derivative Works thereof.
      -
      -  "Contribution" shall mean any work of authorship, including
      -  the original version of the Work and any modifications or additions
      -  to that Work or Derivative Works thereof, that is intentionally
      -  submitted to Licensor for inclusion in the Work by the copyright owner
      -  or by an individual or Legal Entity authorized to submit on behalf of
      -  the copyright owner. For the purposes of this definition, "submitted"
      -  means any form of electronic, verbal, or written communication sent
      -  to the Licensor or its representatives, including but not limited to
      -  communication on electronic mailing lists, source code control systems,
      -  and issue tracking systems that are managed by, or on behalf of, the
      -  Licensor for the purpose of discussing and improving the Work, but
      -  excluding communication that is conspicuously marked or otherwise
      -  designated in writing by the copyright owner as "Not a Contribution."
      -
      -  "Contributor" shall mean Licensor and any individual or Legal Entity
      -  on behalf of whom a Contribution has been received by Licensor and
      -  subsequently incorporated within the Work.
      -
      -2. Grant of Copyright License. Subject to the terms and conditions of
      -  this License, each Contributor hereby grants to You a perpetual,
      -  worldwide, non-exclusive, no-charge, royalty-free, irrevocable
      -  copyright license to reproduce, prepare Derivative Works of,
      -  publicly display, publicly perform, sublicense, and distribute the
      -  Work and such Derivative Works in Source or Object form.
      -
      -3. Grant of Patent License. Subject to the terms and conditions of
      -  this License, each Contributor hereby grants to You a perpetual,
      -  worldwide, non-exclusive, no-charge, royalty-free, irrevocable
      -  (except as stated in this section) patent license to make, have made,
      -  use, offer to sell, sell, import, and otherwise transfer the Work,
      -  where such license applies only to those patent claims licensable
      -  by such Contributor that are necessarily infringed by their
      -  Contribution(s) alone or by combination of their Contribution(s)
      -  with the Work to which such Contribution(s) was submitted. If You
      -  institute patent litigation against any entity (including a
      -  cross-claim or counterclaim in a lawsuit) alleging that the Work
      -  or a Contribution incorporated within the Work constitutes direct
      -  or contributory patent infringement, then any patent licenses
      -  granted to You under this License for that Work shall terminate
      -  as of the date such litigation is filed.
      -
      -4. Redistribution. You may reproduce and distribute copies of the
      -  Work or Derivative Works thereof in any medium, with or without
      -  modifications, and in Source or Object form, provided that You
      -  meet the following conditions:
      -
      -  (a) You must give any other recipients of the Work or
      -      Derivative Works a copy of this License; and
      -
      -  (b) You must cause any modified files to carry prominent notices
      -      stating that You changed the files; and
      -
      -  (c) You must retain, in the Source form of any Derivative Works
      -      that You distribute, all copyright, patent, trademark, and
      -      attribution notices from the Source form of the Work,
      -      excluding those notices that do not pertain to any part of
      -      the Derivative Works; and
      -
      -  (d) If the Work includes a "NOTICE" text file as part of its
      -      distribution, then any Derivative Works that You distribute must
      -      include a readable copy of the attribution notices contained
      -      within such NOTICE file, excluding those notices that do not
      -      pertain to any part of the Derivative Works, in at least one
      -      of the following places: within a NOTICE text file distributed
      -      as part of the Derivative Works; within the Source form or
      -      documentation, if provided along with the Derivative Works; or,
      -      within a display generated by the Derivative Works, if and
      -      wherever such third-party notices normally appear. The contents
      -      of the NOTICE file are for informational purposes only and
      -      do not modify the License. You may add Your own attribution
      -      notices within Derivative Works that You distribute, alongside
      -      or as an addendum to the NOTICE text from the Work, provided
      -      that such additional attribution notices cannot be construed
      -      as modifying the License.
      -
      -  You may add Your own copyright statement to Your modifications and
      -  may provide additional or different license terms and conditions
      -  for use, reproduction, or distribution of Your modifications, or
      -  for any such Derivative Works as a whole, provided Your use,
      -  reproduction, and distribution of the Work otherwise complies with
      -  the conditions stated in this License.
      -
      -5. Submission of Contributions. Unless You explicitly state otherwise,
      -  any Contribution intentionally submitted for inclusion in the Work
      -  by You to the Licensor shall be under the terms and conditions of
      -  this License, without any additional terms or conditions.
      -  Notwithstanding the above, nothing herein shall supersede or modify
      -  the terms of any separate license agreement you may have executed
      -  with Licensor regarding such Contributions.
      -
      -6. Trademarks. This License does not grant permission to use the trade
      -  names, trademarks, service marks, or product names of the Licensor,
      -  except as required for reasonable and customary use in describing the
      -  origin of the Work and reproducing the content of the NOTICE file.
      -
      -7. Disclaimer of Warranty. Unless required by applicable law or
      -  agreed to in writing, Licensor provides the Work (and each
      -  Contributor provides its Contributions) on an "AS IS" BASIS,
      -  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
      -  implied, including, without limitation, any warranties or conditions
      -  of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
      -  PARTICULAR PURPOSE. You are solely responsible for determining the
      -  appropriateness of using or redistributing the Work and assume any
      -  risks associated with Your exercise of permissions under this License.
      -
      -8. Limitation of Liability. In no event and under no legal theory,
      -  whether in tort (including negligence), contract, or otherwise,
      -  unless required by applicable law (such as deliberate and grossly
      -  negligent acts) or agreed to in writing, shall any Contributor be
      -  liable to You for damages, including any direct, indirect, special,
      -  incidental, or consequential damages of any character arising as a
      -  result of this License or out of the use or inability to use the
      -  Work (including but not limited to damages for loss of goodwill,
      -  work stoppage, computer failure or malfunction, or any and all
      -  other commercial damages or losses), even if such Contributor
      -  has been advised of the possibility of such damages.
      -
      -9. Accepting Warranty or Additional Liability. While redistributing
      -  the Work or Derivative Works thereof, You may choose to offer,
      -  and charge a fee for, acceptance of support, warranty, indemnity,
      -  or other liability obligations and/or rights consistent with this
      -  License. However, in accepting such obligations, You may act only
      -  on Your own behalf and on Your sole responsibility, not on behalf
      -  of any other Contributor, and only if You agree to indemnify,
      -  defend, and hold each Contributor harmless for any liability
      -  incurred by, or claims asserted against, such Contributor by reason
      -  of your accepting any such warranty or additional liability.
      -
      -END OF TERMS AND CONDITIONS
      -
      -APPENDIX: How to apply the Apache License to your work.
      -
      -  To apply the Apache License to your work, attach the following
      -  boilerplate notice, with the fields enclosed by brackets "[]"
      -  replaced with your own identifying information. (Don't include
      -  the brackets!)  The text should be enclosed in the appropriate
      -  comment syntax for the file format. We also recommend that a
      -  file or class name and description of purpose be included on the
      -  same "printed page" as the copyright notice for easier
      -  identification within third-party archives.
      -
      -Copyright [yyyy] [name of copyright owner]
      -
      -Licensed under the Apache License, Version 2.0 (the "License");
      -you may not use this file except in compliance with the License.
      -You may obtain a copy of the License at
      -
      -   http://www.apache.org/licenses/LICENSE-2.0
      -
      -Unless required by applicable law or agreed to in writing, software
      -distributed under the License is distributed on an "AS IS" BASIS,
      -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
      -See the License for the specific language governing permissions and
      -limitations under the License.
      -
      -
    • -
    • -

      Apache License 2.0

      -

      Used by:

      -
                                    Apache License
                               Version 2.0, January 2004
      @@ -6122,7 +5568,7 @@ 

      Used by:

    • erased-serde
    • ghost
    • itoa
    • -
    • linkme
    • +
    • libc
    • paste
    • prettyplease
    • proc-macro2
    • @@ -6131,7 +5577,6 @@

      Used by:

    • ryu
    • semver
    • serde
    • -
    • serde_bytes
    • serde_derive
    • serde_derive_internals
    • serde_json
    • @@ -6139,8 +5584,8 @@

      Used by:

    • serde_qs
    • serde_urlencoded
    • syn
    • -
    • thiserror
    • thiserror-impl
    • +
    • thiserror
    • unicode-ident
    • utf-8
    • utf8parse
    • @@ -6329,7 +5774,6 @@

      Used by:

      Apache License 2.0

      Used by:

                                    Apache License
                               Version 2.0, January 2004
      @@ -6756,6 +6201,7 @@ 

      Apache License 2.0

      Used by:

                                    Apache License
                               Version 2.0, January 2004
      @@ -7594,6 +7040,7 @@ 

      Apache License 2.0

      Used by:

                                    Apache License
                               Version 2.0, January 2004
      @@ -8220,8 +7667,8 @@ 

      Used by:

      Apache License 2.0

      Used by:

                                    Apache License
                               Version 2.0, January 2004
      @@ -8640,7 +8087,7 @@ 

      Used by:

      Apache License 2.0

      Used by:

                                    Apache License
                               Version 2.0, January 2004
      @@ -8830,7 +8277,7 @@ 

      Used by:

      same "printed page" as the copyright notice for easier identification within third-party archives. -Copyright [2016] [rust-uname Developers] +Copyright 2023 Dirkjan Ochtman Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -8849,7 +8296,7 @@

      Used by:

      Apache License 2.0

      Used by:

                                    Apache License
                               Version 2.0, January 2004
      @@ -9039,7 +8486,7 @@ 

      Used by:

      same "printed page" as the copyright notice for easier identification within third-party archives. -Copyright [yyyy] [name of copyright owner] +Copyright [2016] [rust-uname Developers] Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. @@ -9051,7 +8498,8 @@

      Used by:

      distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and -limitations under the License.
      +limitations under the License. +
    • Apache License 2.0

      @@ -9063,6 +8511,7 @@

      Used by:

    • arc-swap
    • async-channel
    • async-compression
    • +
    • atomic-waker
    • autocfg
    • backtrace
    • base64
    • @@ -9074,20 +8523,22 @@

      Used by:

    • bumpalo
    • bytes-utils
    • cc
    • +
    • cexpr
    • cfg-if
    • ci_info
    • cmake
    • concurrent-queue
    • -
    • const-random
    • const-random-macro
    • -
    • core-foundation
    • +
    • const-random
    • core-foundation-sys
    • +
    • core-foundation
    • +
    • core-foundation
    • countme
    • crossbeam-channel
    • crossbeam-deque
    • crossbeam-epoch
    • +
    • crossbeam-queue
    • crossbeam-utils
    • -
    • debugid
    • derivative
    • derive_arbitrary
    • displaydoc
    • @@ -9108,7 +8559,7 @@

      Used by:

    • futures-lite
    • futures-timer
    • gimli
    • -
    • git2
    • +
    • glob
    • hashbrown
    • hashbrown
    • hashbrown
    • @@ -9116,14 +8567,16 @@

      Used by:

    • heck
    • heck
    • hermit-abi
    • +
    • hermit-abi
    • httparse
    • humantime-serde
    • hyper-rustls
    • +
    • hyper-rustls
    • +
    • hyper-timeout
    • hyper-timeout
    • idna
    • idna
    • idna_adapter
    • -
    • if_chain
    • indexmap
    • indexmap
    • inventory
    • @@ -9131,12 +8584,11 @@

      Used by:

    • itertools
    • itertools
    • itertools
    • +
    • jobserver
    • js-sys
    • lazy_static
    • +
    • lazycell
    • libfuzzer-sys
    • -
    • libgit2-sys
    • -
    • libz-ng-sys
    • -
    • libz-sys
    • linux-raw-sys
    • lock_api
    • log
    • @@ -9147,19 +8599,17 @@

      Used by:

    • mockall_derive
    • multimap
    • multimap
    • -
    • multimap
    • -
    • num
    • num-bigint
    • num-complex
    • num-integer
    • num-iter
    • num-rational
    • num-traits
    • +
    • num
    • num_cpus
    • object
    • once_cell
    • openssl-probe
    • -
    • openssl-src
    • parking
    • parking_lot
    • parking_lot_core
    • @@ -9170,35 +8620,39 @@

      Used by:

    • pest_meta
    • petgraph
    • pkg-config
    • -
    • prost
    • prost-build
    • prost-derive
    • prost-derive
    • +
    • prost-derive
    • prost-types
    • prost-types
    • +
    • prost
    • +
    • prost
    • proteus
    • -
    • rayon
    • rayon-core
    • -
    • regex
    • +
    • rayon
    • regex-automata
    • regex-lite
    • regex-syntax
    • regex-syntax
    • +
    • regex
    • rowan
    • rustc-demangle
    • rustc-hash
    • -
    • rustc_version
    • -
    • rustc_version
    • +
    • rustc_version
    • rustix
    • -
    • rustls
    • rustls-native-certs
    • +
    • rustls-native-certs
    • +
    • rustls-native-certs
    • +
    • rustls-pemfile
    • rustls-pemfile
    • +
    • rustls
    • +
    • rustls
    • scopeguard
    • sct
    • -
    • security-framework
    • security-framework-sys
    • -
    • semver
    • -
    • semver-parser
    • +
    • security-framework
    • +
    • security-framework
    • serde_json_bytes
    • serde_yaml
    • shellexpand
    • @@ -9215,12 +8669,13 @@

      Used by:

    • tikv-jemallocator
    • try_match
    • tungstenite
    • +
    • tungstenite
    • typed-builder
    • -
    • typetag
    • typetag-impl
    • +
    • typetag
    • ucd-trie
    • +
    • unicase
    • unicode-bidi
    • -
    • unicode-id
    • unicode-normalization
    • unicode-width
    • url
    • @@ -9229,12 +8684,12 @@

      Used by:

    • waker-fn
    • wasi
    • wasi
    • -
    • wasm-bindgen
    • wasm-bindgen-backend
    • wasm-bindgen-futures
    • -
    • wasm-bindgen-macro
    • wasm-bindgen-macro-support
    • +
    • wasm-bindgen-macro
    • wasm-bindgen-shared
    • +
    • wasm-bindgen
    • web-sys
    • wiremock
    • xmlparser
    • @@ -10679,7 +10134,7 @@

      Used by:

      Apache License 2.0

      Used by:

      @@ -11052,240 +10507,30 @@

      Used by:

      liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - -9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - -END OF TERMS AND CONDITIONS -
      - -
    • -

      Apache License 2.0

      -

      Used by:

      - -
                                    Apache License
      -                        Version 2.0, January 2004
      -                     http://www.apache.org/licenses/
      -
      -TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
      -
      -1. Definitions.
      -
      -   "License" shall mean the terms and conditions for use, reproduction,
      -   and distribution as defined by Sections 1 through 9 of this document.
      -
      -   "Licensor" shall mean the copyright owner or entity authorized by
      -   the copyright owner that is granting the License.
      -
      -   "Legal Entity" shall mean the union of the acting entity and all
      -   other entities that control, are controlled by, or are under common
      -   control with that entity. For the purposes of this definition,
      -   "control" means (i) the power, direct or indirect, to cause the
      -   direction or management of such entity, whether by contract or
      -   otherwise, or (ii) ownership of fifty percent (50%) or more of the
      -   outstanding shares, or (iii) beneficial ownership of such entity.
      -
      -   "You" (or "Your") shall mean an individual or Legal Entity
      -   exercising permissions granted by this License.
      -
      -   "Source" form shall mean the preferred form for making modifications,
      -   including but not limited to software source code, documentation
      -   source, and configuration files.
      -
      -   "Object" form shall mean any form resulting from mechanical
      -   transformation or translation of a Source form, including but
      -   not limited to compiled object code, generated documentation,
      -   and conversions to other media types.
      -
      -   "Work" shall mean the work of authorship, whether in Source or
      -   Object form, made available under the License, as indicated by a
      -   copyright notice that is included in or attached to the work
      -   (an example is provided in the Appendix below).
      -
      -   "Derivative Works" shall mean any work, whether in Source or Object
      -   form, that is based on (or derived from) the Work and for which the
      -   editorial revisions, annotations, elaborations, or other modifications
      -   represent, as a whole, an original work of authorship. For the purposes
      -   of this License, Derivative Works shall not include works that remain
      -   separable from, or merely link (or bind by name) to the interfaces of,
      -   the Work and Derivative Works thereof.
      -
      -   "Contribution" shall mean any work of authorship, including
      -   the original version of the Work and any modifications or additions
      -   to that Work or Derivative Works thereof, that is intentionally
      -   submitted to Licensor for inclusion in the Work by the copyright owner
      -   or by an individual or Legal Entity authorized to submit on behalf of
      -   the copyright owner. For the purposes of this definition, "submitted"
      -   means any form of electronic, verbal, or written communication sent
      -   to the Licensor or its representatives, including but not limited to
      -   communication on electronic mailing lists, source code control systems,
      -   and issue tracking systems that are managed by, or on behalf of, the
      -   Licensor for the purpose of discussing and improving the Work, but
      -   excluding communication that is conspicuously marked or otherwise
      -   designated in writing by the copyright owner as "Not a Contribution."
      -
      -   "Contributor" shall mean Licensor and any individual or Legal Entity
      -   on behalf of whom a Contribution has been received by Licensor and
      -   subsequently incorporated within the Work.
      -
      -2. Grant of Copyright License. Subject to the terms and conditions of
      -   this License, each Contributor hereby grants to You a perpetual,
      -   worldwide, non-exclusive, no-charge, royalty-free, irrevocable
      -   copyright license to reproduce, prepare Derivative Works of,
      -   publicly display, publicly perform, sublicense, and distribute the
      -   Work and such Derivative Works in Source or Object form.
      -
      -3. Grant of Patent License. Subject to the terms and conditions of
      -   this License, each Contributor hereby grants to You a perpetual,
      -   worldwide, non-exclusive, no-charge, royalty-free, irrevocable
      -   (except as stated in this section) patent license to make, have made,
      -   use, offer to sell, sell, import, and otherwise transfer the Work,
      -   where such license applies only to those patent claims licensable
      -   by such Contributor that are necessarily infringed by their
      -   Contribution(s) alone or by combination of their Contribution(s)
      -   with the Work to which such Contribution(s) was submitted. If You
      -   institute patent litigation against any entity (including a
      -   cross-claim or counterclaim in a lawsuit) alleging that the Work
      -   or a Contribution incorporated within the Work constitutes direct
      -   or contributory patent infringement, then any patent licenses
      -   granted to You under this License for that Work shall terminate
      -   as of the date such litigation is filed.
      -
      -4. Redistribution. You may reproduce and distribute copies of the
      -   Work or Derivative Works thereof in any medium, with or without
      -   modifications, and in Source or Object form, provided that You
      -   meet the following conditions:
      -
      -   (a) You must give any other recipients of the Work or
      -       Derivative Works a copy of this License; and
      -
      -   (b) You must cause any modified files to carry prominent notices
      -       stating that You changed the files; and
      -
      -   (c) You must retain, in the Source form of any Derivative Works
      -       that You distribute, all copyright, patent, trademark, and
      -       attribution notices from the Source form of the Work,
      -       excluding those notices that do not pertain to any part of
      -       the Derivative Works; and
      -
      -   (d) If the Work includes a "NOTICE" text file as part of its
      -       distribution, then any Derivative Works that You distribute must
      -       include a readable copy of the attribution notices contained
      -       within such NOTICE file, excluding those notices that do not
      -       pertain to any part of the Derivative Works, in at least one
      -       of the following places: within a NOTICE text file distributed
      -       as part of the Derivative Works; within the Source form or
      -       documentation, if provided along with the Derivative Works; or,
      -       within a display generated by the Derivative Works, if and
      -       wherever such third-party notices normally appear. The contents
      -       of the NOTICE file are for informational purposes only and
      -       do not modify the License. You may add Your own attribution
      -       notices within Derivative Works that You distribute, alongside
      -       or as an addendum to the NOTICE text from the Work, provided
      -       that such additional attribution notices cannot be construed
      -       as modifying the License.
      -
      -   You may add Your own copyright statement to Your modifications and
      -   may provide additional or different license terms and conditions
      -   for use, reproduction, or distribution of Your modifications, or
      -   for any such Derivative Works as a whole, provided Your use,
      -   reproduction, and distribution of the Work otherwise complies with
      -   the conditions stated in this License.
      -
      -5. Submission of Contributions. Unless You explicitly state otherwise,
      -   any Contribution intentionally submitted for inclusion in the Work
      -   by You to the Licensor shall be under the terms and conditions of
      -   this License, without any additional terms or conditions.
      -   Notwithstanding the above, nothing herein shall supersede or modify
      -   the terms of any separate license agreement you may have executed
      -   with Licensor regarding such Contributions.
      -
      -6. Trademarks. This License does not grant permission to use the trade
      -   names, trademarks, service marks, or product names of the Licensor,
      -   except as required for reasonable and customary use in describing the
      -   origin of the Work and reproducing the content of the NOTICE file.
      -
      -7. Disclaimer of Warranty. Unless required by applicable law or
      -   agreed to in writing, Licensor provides the Work (and each
      -   Contributor provides its Contributions) on an "AS IS" BASIS,
      -   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
      -   implied, including, without limitation, any warranties or conditions
      -   of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
      -   PARTICULAR PURPOSE. You are solely responsible for determining the
      -   appropriateness of using or redistributing the Work and assume any
      -   risks associated with Your exercise of permissions under this License.
      -
      -8. Limitation of Liability. In no event and under no legal theory,
      -   whether in tort (including negligence), contract, or otherwise,
      -   unless required by applicable law (such as deliberate and grossly
      -   negligent acts) or agreed to in writing, shall any Contributor be
      -   liable to You for damages, including any direct, indirect, special,
      -   incidental, or consequential damages of any character arising as a
      -   result of this License or out of the use or inability to use the
      -   Work (including but not limited to damages for loss of goodwill,
      -   work stoppage, computer failure or malfunction, or any and all
      -   other commercial damages or losses), even if such Contributor
      -   has been advised of the possibility of such damages.
      -
      -9. Accepting Warranty or Additional Liability. While redistributing
      -   the Work or Derivative Works thereof, You may choose to offer,
      -   and charge a fee for, acceptance of support, warranty, indemnity,
      -   or other liability obligations and/or rights consistent with this
      -   License. However, in accepting such obligations, You may act only
      -   on Your own behalf and on Your sole responsibility, not on behalf
      -   of any other Contributor, and only if You agree to indemnify,
      -   defend, and hold each Contributor harmless for any liability
      -   incurred by, or claims asserted against, such Contributor by reason
      -   of your accepting any such warranty or additional liability.
      -
      -END OF TERMS AND CONDITIONS
      -
      -APPENDIX: How to apply the Apache License to your work.
      -
      -   To apply the Apache License to your work, attach the following
      -   boilerplate notice, with the fields enclosed by brackets "[]"
      -   replaced with your own identifying information. (Don't include
      -   the brackets!)  The text should be enclosed in the appropriate
      -   comment syntax for the file format. We also recommend that a
      -   file or class name and description of purpose be included on the
      -   same "printed page" as the copyright notice for easier
      -   identification within third-party archives.
      -
      -Copyright [yyyy] [name of copyright owner]
      -
      -Licensed under the Apache License, Version 2.0 (the "License");
      -you may not use this file except in compliance with the License.
      -You may obtain a copy of the License at
      +   Work (including but not limited to damages for loss of goodwill,
      +   work stoppage, computer failure or malfunction, or any and all
      +   other commercial damages or losses), even if such Contributor
      +   has been advised of the possibility of such damages.
       
      -	http://www.apache.org/licenses/LICENSE-2.0
      +9. Accepting Warranty or Additional Liability. While redistributing
      +   the Work or Derivative Works thereof, You may choose to offer,
      +   and charge a fee for, acceptance of support, warranty, indemnity,
      +   or other liability obligations and/or rights consistent with this
      +   License. However, in accepting such obligations, You may act only
      +   on Your own behalf and on Your sole responsibility, not on behalf
      +   of any other Contributor, and only if You agree to indemnify,
      +   defend, and hold each Contributor harmless for any liability
      +   incurred by, or claims asserted against, such Contributor by reason
      +   of your accepting any such warranty or additional liability.
       
      -Unless required by applicable law or agreed to in writing, software
      -distributed under the License is distributed on an "AS IS" BASIS,
      -WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
      -See the License for the specific language governing permissions and
      -limitations under the License.
      +END OF TERMS AND CONDITIONS +
    • Apache License 2.0

      Used by:

                                    Apache License
                               Version 2.0, January 2004
      @@ -11487,14 +10732,14 @@ 

      Used by:

      distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and -limitations under the License. -
      +limitations under the License.
    • Apache License 2.0

      Used by:

                                    Apache License
                               Version 2.0, January 2004
      @@ -11697,216 +10942,6 @@ 

      Used by:

      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. - -
      -
    • -
    • -

      Apache License 2.0

      -

      Used by:

      - -
        Apache License
      -                           Version 2.0, January 2004
      -                        http://www.apache.org/licenses/
      -
      -   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
      -
      -   1. Definitions.
      -
      -      "License" shall mean the terms and conditions for use, reproduction,
      -      and distribution as defined by Sections 1 through 9 of this document.
      -
      -      "Licensor" shall mean the copyright owner or entity authorized by
      -      the copyright owner that is granting the License.
      -
      -      "Legal Entity" shall mean the union of the acting entity and all
      -      other entities that control, are controlled by, or are under common
      -      control with that entity. For the purposes of this definition,
      -      "control" means (i) the power, direct or indirect, to cause the
      -      direction or management of such entity, whether by contract or
      -      otherwise, or (ii) ownership of fifty percent (50%) or more of the
      -      outstanding shares, or (iii) beneficial ownership of such entity.
      -
      -      "You" (or "Your") shall mean an individual or Legal Entity
      -      exercising permissions granted by this License.
      -
      -      "Source" form shall mean the preferred form for making modifications,
      -      including but not limited to software source code, documentation
      -      source, and configuration files.
      -
      -      "Object" form shall mean any form resulting from mechanical
      -      transformation or translation of a Source form, including but
      -      not limited to compiled object code, generated documentation,
      -      and conversions to other media types.
      -
      -      "Work" shall mean the work of authorship, whether in Source or
      -      Object form, made available under the License, as indicated by a
      -      copyright notice that is included in or attached to the work
      -      (an example is provided in the Appendix below).
      -
      -      "Derivative Works" shall mean any work, whether in Source or Object
      -      form, that is based on (or derived from) the Work and for which the
      -      editorial revisions, annotations, elaborations, or other modifications
      -      represent, as a whole, an original work of authorship. For the purposes
      -      of this License, Derivative Works shall not include works that remain
      -      separable from, or merely link (or bind by name) to the interfaces of,
      -      the Work and Derivative Works thereof.
      -
      -      "Contribution" shall mean any work of authorship, including
      -      the original version of the Work and any modifications or additions
      -      to that Work or Derivative Works thereof, that is intentionally
      -      submitted to Licensor for inclusion in the Work by the copyright owner
      -      or by an individual or Legal Entity authorized to submit on behalf of
      -      the copyright owner. For the purposes of this definition, "submitted"
      -      means any form of electronic, verbal, or written communication sent
      -      to the Licensor or its representatives, including but not limited to
      -      communication on electronic mailing lists, source code control systems,
      -      and issue tracking systems that are managed by, or on behalf of, the
      -      Licensor for the purpose of discussing and improving the Work, but
      -      excluding communication that is conspicuously marked or otherwise
      -      designated in writing by the copyright owner as "Not a Contribution."
      -
      -      "Contributor" shall mean Licensor and any individual or Legal Entity
      -      on behalf of whom a Contribution has been received by Licensor and
      -      subsequently incorporated within the Work.
      -
      -   2. Grant of Copyright License. Subject to the terms and conditions of
      -      this License, each Contributor hereby grants to You a perpetual,
      -      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
      -      copyright license to reproduce, prepare Derivative Works of,
      -      publicly display, publicly perform, sublicense, and distribute the
      -      Work and such Derivative Works in Source or Object form.
      -
      -   3. Grant of Patent License. Subject to the terms and conditions of
      -      this License, each Contributor hereby grants to You a perpetual,
      -      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
      -      (except as stated in this section) patent license to make, have made,
      -      use, offer to sell, sell, import, and otherwise transfer the Work,
      -      where such license applies only to those patent claims licensable
      -      by such Contributor that are necessarily infringed by their
      -      Contribution(s) alone or by combination of their Contribution(s)
      -      with the Work to which such Contribution(s) was submitted. If You
      -      institute patent litigation against any entity (including a
      -      cross-claim or counterclaim in a lawsuit) alleging that the Work
      -      or a Contribution incorporated within the Work constitutes direct
      -      or contributory patent infringement, then any patent licenses
      -      granted to You under this License for that Work shall terminate
      -      as of the date such litigation is filed.
      -
      -   4. Redistribution. You may reproduce and distribute copies of the
      -      Work or Derivative Works thereof in any medium, with or without
      -      modifications, and in Source or Object form, provided that You
      -      meet the following conditions:
      -
      -      (a) You must give any other recipients of the Work or
      -          Derivative Works a copy of this License; and
      -
      -      (b) You must cause any modified files to carry prominent notices
      -          stating that You changed the files; and
      -
      -      (c) You must retain, in the Source form of any Derivative Works
      -          that You distribute, all copyright, patent, trademark, and
      -          attribution notices from the Source form of the Work,
      -          excluding those notices that do not pertain to any part of
      -          the Derivative Works; and
      -
      -      (d) If the Work includes a "NOTICE" text file as part of its
      -          distribution, then any Derivative Works that You distribute must
      -          include a readable copy of the attribution notices contained
      -          within such NOTICE file, excluding those notices that do not
      -          pertain to any part of the Derivative Works, in at least one
      -          of the following places: within a NOTICE text file distributed
      -          as part of the Derivative Works; within the Source form or
      -          documentation, if provided along with the Derivative Works; or,
      -          within a display generated by the Derivative Works, if and
      -          wherever such third-party notices normally appear. The contents
      -          of the NOTICE file are for informational purposes only and
      -          do not modify the License. You may add Your own attribution
      -          notices within Derivative Works that You distribute, alongside
      -          or as an addendum to the NOTICE text from the Work, provided
      -          that such additional attribution notices cannot be construed
      -          as modifying the License.
      -
      -      You may add Your own copyright statement to Your modifications and
      -      may provide additional or different license terms and conditions
      -      for use, reproduction, or distribution of Your modifications, or
      -      for any such Derivative Works as a whole, provided Your use,
      -      reproduction, and distribution of the Work otherwise complies with
      -      the conditions stated in this License.
      -
      -   5. Submission of Contributions. Unless You explicitly state otherwise,
      -      any Contribution intentionally submitted for inclusion in the Work
      -      by You to the Licensor shall be under the terms and conditions of
      -      this License, without any additional terms or conditions.
      -      Notwithstanding the above, nothing herein shall supersede or modify
      -      the terms of any separate license agreement you may have executed
      -      with Licensor regarding such Contributions.
      -
      -   6. Trademarks. This License does not grant permission to use the trade
      -      names, trademarks, service marks, or product names of the Licensor,
      -      except as required for reasonable and customary use in describing the
      -      origin of the Work and reproducing the content of the NOTICE file.
      -
      -   7. Disclaimer of Warranty. Unless required by applicable law or
      -      agreed to in writing, Licensor provides the Work (and each
      -      Contributor provides its Contributions) on an "AS IS" BASIS,
      -      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
      -      implied, including, without limitation, any warranties or conditions
      -      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
      -      PARTICULAR PURPOSE. You are solely responsible for determining the
      -      appropriateness of using or redistributing the Work and assume any
      -      risks associated with Your exercise of permissions under this License.
      -
      -   8. Limitation of Liability. In no event and under no legal theory,
      -      whether in tort (including negligence), contract, or otherwise,
      -      unless required by applicable law (such as deliberate and grossly
      -      negligent acts) or agreed to in writing, shall any Contributor be
      -      liable to You for damages, including any direct, indirect, special,
      -      incidental, or consequential damages of any character arising as a
      -      result of this License or out of the use or inability to use the
      -      Work (including but not limited to damages for loss of goodwill,
      -      work stoppage, computer failure or malfunction, or any and all
      -      other commercial damages or losses), even if such Contributor
      -      has been advised of the possibility of such damages.
      -
      -   9. Accepting Warranty or Additional Liability. While redistributing
      -      the Work or Derivative Works thereof, You may choose to offer,
      -      and charge a fee for, acceptance of support, warranty, indemnity,
      -      or other liability obligations and/or rights consistent with this
      -      License. However, in accepting such obligations, You may act only
      -      on Your own behalf and on Your sole responsibility, not on behalf
      -      of any other Contributor, and only if You agree to indemnify,
      -      defend, and hold each Contributor harmless for any liability
      -      incurred by, or claims asserted against, such Contributor by reason
      -      of your accepting any such warranty or additional liability.
      -
      -   END OF TERMS AND CONDITIONS
      -
      -   APPENDIX: How to apply the Apache License to your work.
      -
      -      To apply the Apache License to your work, attach the following
      -      boilerplate notice, with the fields enclosed by brackets "{}"
      -      replaced with your own identifying information. (Don't include
      -      the brackets!)  The text should be enclosed in the appropriate
      -      comment syntax for the file format. We also recommend that a
      -      file or class name and description of purpose be included on the
      -      same "printed page" as the copyright notice for easier
      -      identification within third-party archives.
      -
      -   Copyright [yyyy] [name of copyright owner]
      -
      -   Licensed under the Apache License, Version 2.0 (the "License");
      -   you may not use this file except in compliance with the License.
      -   You may obtain a copy of the License at
      -
      -       http://www.apache.org/licenses/LICENSE-2.0
      -
      -   Unless required by applicable law or agreed to in writing, software
      -   distributed under the License is distributed on an "AS IS" BASIS,
      -   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
      -   See the License for the specific language governing permissions and
      -   limitations under the License.
       
    • @@ -12594,33 +11629,30 @@

      Used by:

    • async-graphql-derive
    • async-graphql-parser
    • async-graphql-value
    • +
    • aws-runtime
    • chrono
    • -
    • deno-proc-macro-rules
    • -
    • deno-proc-macro-rules-macros
    • dunce
    • graphql-introspection-query
    • graphql_client
    • graphql_client_codegen
    • graphql_query_derive
    • http-serde
    • +
    • http-serde
    • ident_case
    • -
    • libc
    • -
    • libssh2-sys
    • linkme-impl
    • -
    • md5
    • +
    • linkme
    • num-cmp
    • +
    • portable-atomic
    • prost
    • +
    • rhai
    • rhai_codegen
    • siphasher
    • -
    • system-configuration
    • system-configuration-sys
    • +
    • system-configuration
    • +
    • thiserror-impl
    • +
    • thiserror
    • thrift
    • try_match_inner
    • -
    • unic-char-property
    • -
    • unic-char-range
    • -
    • unic-common
    • -
    • unic-ucd-ident
    • -
    • unic-ucd-version
    • widestring
    • winapi-i686-pc-windows-gnu
    • winapi-x86_64-pc-windows-gnu
    • @@ -12691,7 +11723,28 @@

      Used by:

      you may not use this file except in compliance with the License. You may obtain a copy of the License at -http://www.apache.org/licenses/LICENSE-2.0 +http://www.apache.org/licenses/LICENSE-2.0 + +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. + + +
    • +

      Apache License 2.0

      +

      Used by:

      + +
      Copyright 2015 Nicholas Allegra (comex).
      +
      +Licensed under the Apache License, Version 2.0 (the "License");
      +you may not use this file except in compliance with the License.
      +You may obtain a copy of the License at
      +
      +    http://www.apache.org/licenses/LICENSE-2.0
       
       Unless required by applicable law or agreed to in writing, software
       distributed under the License is distributed on an "AS IS" BASIS,
      @@ -12767,7 +11820,6 @@ 

      Used by:

      Apache License 2.0

      Used by:

      @@ -12805,39 +11857,6 @@

      Used by:

    • zstd-sys
    MIT or Apache-2.0
    -
    - -
  • -

    BSD 2-Clause "Simplified" License

    -

    Used by:

    - -
    BSD 2-Clause License
    -
    -Copyright (c) 2023, Maarten de Vries <maarten@de-vri.es>
    -
    -Redistribution and use in source and binary forms, with or without
    -modification, are permitted provided that the following conditions are met:
    -
    -1. Redistributions of source code must retain the above copyright notice, this
    -   list of conditions and the following disclaimer.
    -
    -2. Redistributions in binary form must reproduce the above copyright notice,
    -   this list of conditions and the following disclaimer in the documentation
    -   and/or other materials provided with the distribution.
    -
    -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
    -AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
    -IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
    -DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
    -FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
    -DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
    -SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
    -CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
    -OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
    -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     
  • @@ -12863,6 +11882,7 @@

    BSD 3-Clause "New" or "Revised" Licens

    Used by:

    BSD 3-Clause License
     
    @@ -12899,63 +11919,61 @@ 

    Used by:

    BSD 3-Clause "New" or "Revised" License

    Used by:

    -
    Copyright (c) 2016 Dropbox, Inc.
    +                
    BSD 3-Clause License
    +
    +Copyright (c) 2013, Jyun-Yan You
     All rights reserved.
     
    -Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
    +Redistribution and use in source and binary forms, with or without
    +modification, are permitted provided that the following conditions are met:
     
    -1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
    +* Redistributions of source code must retain the above copyright notice, this
    +  list of conditions and the following disclaimer.
     
    -2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
    +* Redistributions in binary form must reproduce the above copyright notice,
    +  this list of conditions and the following disclaimer in the documentation
    +  and/or other materials provided with the distribution.
     
    -3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
    +* Neither the name of the copyright holder nor the names of its
    +  contributors may be used to endorse or promote products derived from
    +  this software without specific prior written permission.
     
    -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
    +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
    +AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
    +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
    +DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
    +FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
    +DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
    +SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
    +CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
    +OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
    +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     
  • BSD 3-Clause "New" or "Revised" License

    Used by:

    -
    Copyright (c) 2016 by Armin Ronacher.
    +                
    Copyright (c) 2016 Dropbox, Inc.
    +All rights reserved.
     
    -Some rights reserved.
    +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
     
    -Redistribution and use in source and binary forms, with or without
    -modification, are permitted provided that the following conditions are
    -met:
    +1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
     
    -    * Redistributions of source code must retain the above copyright
    -      notice, this list of conditions and the following disclaimer.
    -
    -    * Redistributions in binary form must reproduce the above
    -      copyright notice, this list of conditions and the following
    -      disclaimer in the documentation and/or other materials provided
    -      with the distribution.
    -
    -    * The names of the contributors may not be used to endorse or
    -      promote products derived from this software without specific
    -      prior written permission.
    -
    -THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
    -"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
    -LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
    -A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
    -OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
    -SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
    -LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
    -DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
    -THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
    -(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
    -OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
    +2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
    +
    +3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
    +
    +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
     
  • @@ -13176,200 +12194,95 @@

    Used by:

    terms, with knowledge of his or her Copyright and Related Rights in the Work and the meaning and intended legal effect of CC0 on those rights. -1. Copyright and Related Rights. A Work made available under CC0 may be -protected by copyright and related or neighboring rights ("Copyright and -Related Rights"). Copyright and Related Rights include, but are not -limited to, the following: - - i. the right to reproduce, adapt, distribute, perform, display, - communicate, and translate a Work; - ii. moral rights retained by the original author(s) and/or performer(s); -iii. publicity and privacy rights pertaining to a person's image or - likeness depicted in a Work; - iv. rights protecting against unfair competition in regards to a Work, - subject to the limitations in paragraph 4(a), below; - v. rights protecting the extraction, dissemination, use and reuse of data - in a Work; - vi. database rights (such as those arising under Directive 96/9/EC of the - European Parliament and of the Council of 11 March 1996 on the legal - protection of databases, and under any national implementation - thereof, including any amended or successor version of such - directive); and -vii. other similar, equivalent or corresponding rights throughout the - world based on applicable law or treaty, and any national - implementations thereof. - -2. Waiver. To the greatest extent permitted by, but not in contravention -of, applicable law, Affirmer hereby overtly, fully, permanently, -irrevocably and unconditionally waives, abandons, and surrenders all of -Affirmer's Copyright and Related Rights and associated claims and causes -of action, whether now known or unknown (including existing as well as -future claims and causes of action), in the Work (i) in all territories -worldwide, (ii) for the maximum duration provided by applicable law or -treaty (including future time extensions), (iii) in any current or future -medium and for any number of copies, and (iv) for any purpose whatsoever, -including without limitation commercial, advertising or promotional -purposes (the "Waiver"). Affirmer makes the Waiver for the benefit of each -member of the public at large and to the detriment of Affirmer's heirs and -successors, fully intending that such Waiver shall not be subject to -revocation, rescission, cancellation, termination, or any other legal or -equitable action to disrupt the quiet enjoyment of the Work by the public -as contemplated by Affirmer's express Statement of Purpose. - -3. Public License Fallback. Should any part of the Waiver for any reason -be judged legally invalid or ineffective under applicable law, then the -Waiver shall be preserved to the maximum extent permitted taking into -account Affirmer's express Statement of Purpose. In addition, to the -extent the Waiver is so judged Affirmer hereby grants to each affected -person a royalty-free, non transferable, non sublicensable, non exclusive, -irrevocable and unconditional license to exercise Affirmer's Copyright and -Related Rights in the Work (i) in all territories worldwide, (ii) for the -maximum duration provided by applicable law or treaty (including future -time extensions), (iii) in any current or future medium and for any number -of copies, and (iv) for any purpose whatsoever, including without -limitation commercial, advertising or promotional purposes (the -"License"). The License shall be deemed effective as of the date CC0 was -applied by Affirmer to the Work. Should any part of the License for any -reason be judged legally invalid or ineffective under applicable law, such -partial invalidity or ineffectiveness shall not invalidate the remainder -of the License, and in such case Affirmer hereby affirms that he or she -will not (i) exercise any of his or her remaining Copyright and Related -Rights in the Work or (ii) assert any associated claims and causes of -action with respect to the Work, in either case contrary to Affirmer's -express Statement of Purpose. - -4. Limitations and Disclaimers. - - a. No trademark or patent rights held by Affirmer are waived, abandoned, - surrendered, licensed or otherwise affected by this document. - b. Affirmer offers the Work as-is and makes no representations or - warranties of any kind concerning the Work, express, implied, - statutory or otherwise, including without limitation warranties of - title, merchantability, fitness for a particular purpose, non - infringement, or the absence of latent or other defects, accuracy, or - the present or absence of errors, whether or not discoverable, all to - the greatest extent permissible under applicable law. - c. Affirmer disclaims responsibility for clearing rights of other persons - that may apply to the Work or any use thereof, including without - limitation any person's Copyright and Related Rights in the Work. - Further, Affirmer disclaims responsibility for obtaining any necessary - consents, permissions or other rights required for any use of the - Work. - d. Affirmer understands and acknowledges that Creative Commons is not a - party to this document and has no duty or obligation with respect to - this CC0 or use of the Work. - -
  • -
  • -

    Elastic License 2.0

    -

    Used by:

    - -
    Copyright 2021 Apollo Graph, Inc.
    -
    -Elastic License 2.0
    -
    -## Acceptance
    -
    -By using the software, you agree to all of the terms and conditions below.
    -
    -## Copyright License
    -
    -The licensor grants you a non-exclusive, royalty-free, worldwide,
    -non-sublicensable, non-transferable license to use, copy, distribute, make
    -available, and prepare derivative works of the software, in each case subject to
    -the limitations and conditions below.
    -
    -## Limitations
    -
    -You may not provide the software to third parties as a hosted or managed
    -service, where the service provides users with access to any substantial set of
    -the features or functionality of the software.
    -
    -You may not move, change, disable, or circumvent the license key functionality
    -in the software, and you may not remove or obscure any functionality in the
    -software that is protected by the license key.
    -
    -You may not alter, remove, or obscure any licensing, copyright, or other notices
    -of the licensor in the software. Any use of the licensor’s trademarks is subject
    -to applicable law.
    -
    -## Patents
    -
    -The licensor grants you a license, under any patent claims the licensor can
    -license, or becomes able to license, to make, have made, use, sell, offer for
    -sale, import and have imported the software, in each case subject to the
    -limitations and conditions in this license. This license does not cover any
    -patent claims that you cause to be infringed by modifications or additions to
    -the software. If you or your company make any written claim that the software
    -infringes or contributes to infringement of any patent, your patent license for
    -the software granted under these terms ends immediately. If your company makes
    -such a claim, your patent license ends immediately for work on behalf of your
    -company.
    -
    -## Notices
    -
    -You must ensure that anyone who gets a copy of any part of the software from you
    -also gets a copy of these terms.
    -
    -If you modify the software, you must include in any modified copies of the
    -software prominent notices stating that you have modified the software.
    -
    -## No Other Rights
    -
    -These terms do not imply any licenses other than those expressly granted in
    -these terms.
    -
    -## Termination
    -
    -If you use the software in violation of these terms, such use is not licensed,
    -and your licenses will automatically terminate. If the licensor provides you
    -with a notice of your violation, and you cease all violation of this license no
    -later than 30 days after you receive that notice, your licenses will be
    -reinstated retroactively. However, if you violate these terms after such
    -reinstatement, any additional violation of these terms will cause your licenses
    -to terminate automatically and permanently.
    -
    -## No Liability
    -
    -*As far as the law allows, the software comes as is, without any warranty or
    -condition, and the licensor will not be liable to you for any damages arising
    -out of these terms or the use or nature of the software, under any kind of
    -legal claim.*
    -
    -## Definitions
    -
    -The **licensor** is the entity offering these terms, and the **software** is the
    -software the licensor makes available under these terms, including any portion
    -of it.
    -
    -**you** refers to the individual or entity agreeing to these terms.
    +1. Copyright and Related Rights. A Work made available under CC0 may be
    +protected by copyright and related or neighboring rights ("Copyright and
    +Related Rights"). Copyright and Related Rights include, but are not
    +limited to, the following:
     
    -**your company** is any legal entity, sole proprietorship, or other kind of
    -organization that you work for, plus all organizations that have control over,
    -are under the control of, or are under common control with that
    -organization. **control** means ownership of substantially all the assets of an
    -entity, or the power to direct its management and policies by vote, contract, or
    -otherwise. Control can be direct or indirect.
    +  i. the right to reproduce, adapt, distribute, perform, display,
    +     communicate, and translate a Work;
    + ii. moral rights retained by the original author(s) and/or performer(s);
    +iii. publicity and privacy rights pertaining to a person's image or
    +     likeness depicted in a Work;
    + iv. rights protecting against unfair competition in regards to a Work,
    +     subject to the limitations in paragraph 4(a), below;
    +  v. rights protecting the extraction, dissemination, use and reuse of data
    +     in a Work;
    + vi. database rights (such as those arising under Directive 96/9/EC of the
    +     European Parliament and of the Council of 11 March 1996 on the legal
    +     protection of databases, and under any national implementation
    +     thereof, including any amended or successor version of such
    +     directive); and
    +vii. other similar, equivalent or corresponding rights throughout the
    +     world based on applicable law or treaty, and any national
    +     implementations thereof.
     
    -**your licenses** are all the licenses granted to you for the software under
    -these terms.
    +2. Waiver. To the greatest extent permitted by, but not in contravention
    +of, applicable law, Affirmer hereby overtly, fully, permanently,
    +irrevocably and unconditionally waives, abandons, and surrenders all of
    +Affirmer's Copyright and Related Rights and associated claims and causes
    +of action, whether now known or unknown (including existing as well as
    +future claims and causes of action), in the Work (i) in all territories
    +worldwide, (ii) for the maximum duration provided by applicable law or
    +treaty (including future time extensions), (iii) in any current or future
    +medium and for any number of copies, and (iv) for any purpose whatsoever,
    +including without limitation commercial, advertising or promotional
    +purposes (the "Waiver"). Affirmer makes the Waiver for the benefit of each
    +member of the public at large and to the detriment of Affirmer's heirs and
    +successors, fully intending that such Waiver shall not be subject to
    +revocation, rescission, cancellation, termination, or any other legal or
    +equitable action to disrupt the quiet enjoyment of the Work by the public
    +as contemplated by Affirmer's express Statement of Purpose.
     
    -**use** means anything you do with the software requiring one of your licenses.
    +3. Public License Fallback. Should any part of the Waiver for any reason
    +be judged legally invalid or ineffective under applicable law, then the
    +Waiver shall be preserved to the maximum extent permitted taking into
    +account Affirmer's express Statement of Purpose. In addition, to the
    +extent the Waiver is so judged Affirmer hereby grants to each affected
    +person a royalty-free, non transferable, non sublicensable, non exclusive,
    +irrevocable and unconditional license to exercise Affirmer's Copyright and
    +Related Rights in the Work (i) in all territories worldwide, (ii) for the
    +maximum duration provided by applicable law or treaty (including future
    +time extensions), (iii) in any current or future medium and for any number
    +of copies, and (iv) for any purpose whatsoever, including without
    +limitation commercial, advertising or promotional purposes (the
    +"License"). The License shall be deemed effective as of the date CC0 was
    +applied by Affirmer to the Work. Should any part of the License for any
    +reason be judged legally invalid or ineffective under applicable law, such
    +partial invalidity or ineffectiveness shall not invalidate the remainder
    +of the License, and in such case Affirmer hereby affirms that he or she
    +will not (i) exercise any of his or her remaining Copyright and Related
    +Rights in the Work or (ii) assert any associated claims and causes of
    +action with respect to the Work, in either case contrary to Affirmer's
    +express Statement of Purpose.
     
    -**trademark** means trademarks, service marks, and similar rights.
    +4. Limitations and Disclaimers.
     
    ---------------------------------------------------------------------------------
    + a. No trademark or patent rights held by Affirmer are waived, abandoned, + surrendered, licensed or otherwise affected by this document. + b. Affirmer offers the Work as-is and makes no representations or + warranties of any kind concerning the Work, express, implied, + statutory or otherwise, including without limitation warranties of + title, merchantability, fitness for a particular purpose, non + infringement, or the absence of latent or other defects, accuracy, or + the present or absence of errors, whether or not discoverable, all to + the greatest extent permissible under applicable law. + c. Affirmer disclaims responsibility for clearing rights of other persons + that may apply to the Work or any use thereof, including without + limitation any person's Copyright and Related Rights in the Work. + Further, Affirmer disclaims responsibility for obtaining any necessary + consents, permissions or other rights required for any use of the + Work. + d. Affirmer understands and acknowledges that Creative Commons is not a + party to this document and has no duty or obligation with respect to + this CC0 or use of the Work. +
  • Elastic License 2.0

    Used by:

    Copyright 2021 Apollo Graph, Inc.
     
    @@ -13465,8 +12378,7 @@ 

    Used by:

    **trademark** means trademarks, service marks, and similar rights. --------------------------------------------------------------------------------- -
    +--------------------------------------------------------------------------------
  • Elastic License 2.0

    @@ -13803,8 +12715,31 @@

    Used by:

    ISC License

    Used by:

    +
    Copyright © 2015, Simonas Kazlauskas
    +
    +Permission to use, copy, modify, and/or distribute this software for any purpose with or without
    +fee is hereby granted, provided that the above copyright notice and this permission notice appear
    +in all copies.
    +
    +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS
    +SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE
    +AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
    +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
    +NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF
    +THIS SOFTWARE.
    +
    +
  • +
  • +

    ISC License

    +

    Used by:

    +
    ISC License:
     
    @@ -13820,33 +12755,27 @@ 

    Used by:

    MIT License

    Used by:

    -
    Copyright (c) 2014 Alex Crichton
    +                
    Copyright (c) 2009, 2010, 2013-2016 by the Brotli Authors.
     
    -Permission is hereby granted, free of charge, to any
    -person obtaining a copy of this software and associated
    -documentation files (the "Software"), to deal in the
    -Software without restriction, including without
    -limitation the rights to use, copy, modify, merge,
    -publish, distribute, sublicense, and/or sell copies of
    -the Software, and to permit persons to whom the Software
    -is furnished to do so, subject to the following
    -conditions:
    +Permission is hereby granted, free of charge, to any person obtaining a copy
    +of this software and associated documentation files (the "Software"), to deal
    +in the Software without restriction, including without limitation the rights
    +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
    +copies of the Software, and to permit persons to whom the Software is
    +furnished to do so, subject to the following conditions:
     
    -The above copyright notice and this permission notice
    -shall be included in all copies or substantial portions
    -of the Software.
    +The above copyright notice and this permission notice shall be included in
    +all copies or substantial portions of the Software.
     
    -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
    -ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
    -TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
    -PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
    -SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
    -CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
    -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
    -IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
    -DEALINGS IN THE SOFTWARE.
    +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
    +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
    +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL THE
    +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
    +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
    +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
    +THE SOFTWARE.
     
  • @@ -13854,6 +12783,7 @@

    MIT License

    Used by:

    Copyright (c) 2014 Carl Lerche and other MIO contributors
     
    @@ -13908,8 +12838,8 @@ 

    Used by:

    MIT License

    Used by:

    Copyright (c) 2014-2019 Sean McArthur
     
    @@ -13965,6 +12895,7 @@ 

    MIT License

    Used by:

    Copyright (c) 2014-2021 Sean McArthur
     
    @@ -13985,6 +12916,35 @@ 

    Used by:

    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +
    +
  • +
  • +

    MIT License

    +

    Used by:

    + +
    Copyright (c) 2014-2023 Sean McArthur
    +
    +Permission is hereby granted, free of charge, to any person obtaining a copy
    +of this software and associated documentation files (the "Software"), to deal
    +in the Software without restriction, including without limitation the rights
    +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
    +copies of the Software, and to permit persons to whom the Software is
    +furnished to do so, subject to the following conditions:
    +
    +The above copyright notice and this permission notice shall be included in
    +all copies or substantial portions of the Software.
    +
    +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
    +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
    +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
    +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
    +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
    +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
    +THE SOFTWARE.
    +
     
  • @@ -14211,8 +13171,8 @@

    Used by:

    MIT License

    Used by:

    Copyright (c) 2016 William Orr <will@worrbase.com>
     
    @@ -14240,6 +13200,7 @@ 

    MIT License

    Used by:

    Copyright (c) 2017 Daniel Abramov
     Copyright (c) 2017 Alexey Galakhov
    @@ -14322,7 +13283,6 @@ 

    MIT License

    Used by:

    Copyright (c) 2017 Redox OS Developers
     
    @@ -14353,6 +13313,7 @@ 

    MIT License

    Used by:

    Copyright (c) 2017 h2 authors
     
    @@ -14379,6 +13340,34 @@ 

    Used by:

    OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +
    +
  • +
  • +

    MIT License

    +

    Used by:

    + +
    Copyright (c) 2017-2019 Geoffroy Couprie
    +
    +Permission is hereby granted, free of charge, to any person obtaining
    +a copy of this software and associated documentation files (the
    +"Software"), to deal in the Software without restriction, including
    +without limitation the rights to use, copy, modify, merge, publish,
    +distribute, sublicense, and/or sell copies of the Software, and to
    +permit persons to whom the Software is furnished to do so, subject to
    +the following conditions:
    +
    +The above copyright notice and this permission notice shall be
    +included in all copies or substantial portions of the Software.
    +
    +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
    +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
    +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
    +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
    +LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
    +OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
    +WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
     
  • @@ -14476,6 +13465,8 @@

    MIT License

    Used by:

    Copyright (c) 2019 Axum Contributors
     
    @@ -14541,8 +13532,8 @@ 

    Used by:

    MIT License

    Used by:

    Copyright (c) 2019 Carl Lerche
     
    @@ -14670,8 +13661,8 @@ 

    Used by:

    MIT License

    Used by:

    Copyright (c) 2019 Hyper Contributors
     
    @@ -14730,13 +13721,14 @@ 

    Used by:

    MIT License

    Used by:

    Copyright (c) 2019 Tokio Contributors
     
    @@ -14769,9 +13761,10 @@ 

    Used by:

    MIT License

    Used by:

    Copyright (c) 2019 Tower Contributors
     
    @@ -14870,8 +13863,9 @@ 

    Used by:

    MIT License

    Used by:

    Copyright (c) 2020 Lucio Franco
     
    @@ -14898,34 +13892,27 @@ 

    Used by:

    MIT License

    Used by:

    -
    Copyright (c) 2023 Tokio Contributors
    +                
    Copyright (c) 2023 Sean McArthur
     
    -Permission is hereby granted, free of charge, to any
    -person obtaining a copy of this software and associated
    -documentation files (the "Software"), to deal in the
    -Software without restriction, including without
    -limitation the rights to use, copy, modify, merge,
    -publish, distribute, sublicense, and/or sell copies of
    -the Software, and to permit persons to whom the Software
    -is furnished to do so, subject to the following
    -conditions:
    +Permission is hereby granted, free of charge, to any person obtaining a copy
    +of this software and associated documentation files (the "Software"), to deal
    +in the Software without restriction, including without limitation the rights
    +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
    +copies of the Software, and to permit persons to whom the Software is
    +furnished to do so, subject to the following conditions:
     
    -The above copyright notice and this permission notice
    -shall be included in all copies or substantial portions
    -of the Software.
    +The above copyright notice and this permission notice shall be included in
    +all copies or substantial portions of the Software.
     
    -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF
    -ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED
    -TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
    -PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
    -SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
    -CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
    -OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR
    -IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
    -DEALINGS IN THE SOFTWARE.
    +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
    +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
    +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
    +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
    +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
    +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
    +THE SOFTWARE.
     
  • @@ -14947,16 +13934,27 @@

    Used by:

    MIT License

    Used by:

    -
    Copyright 2021 Alec Embke
    -
    -Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
    +                
    Copyright 2017-2019 Florent Fayolle, Valentin Lorentz
     
    -The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
    +Permission is hereby granted, free of charge, to any person obtaining a copy of
    +this software and associated documentation files (the "Software"), to deal in
    +the Software without restriction, including without limitation the rights to
    +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
    +of the Software, and to permit persons to whom the Software is furnished to do
    +so, subject to the following conditions:
     
    -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
    +The above copyright notice and this permission notice shall be included in all
    +copies or substantial portions of the Software.
     
    +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
    +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
    +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
    +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
    +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
    +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
    +SOFTWARE.
     
  • @@ -14964,6 +13962,9 @@

    MIT License

    Used by:

    Copyright 2021 Axum Contributors
     
    @@ -14978,15 +13979,32 @@ 

    Used by:

    MIT License

    Used by:

    -
    Copyright 2023 Alec Embke
    +                
    Copyright 2024 Alec Embke
    +
    +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
    +
    +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
    +
    +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
    +
    +
  • +
  • +

    MIT License

    +

    Used by:

    + +
    Copyright 2024 Alec Embke
     
     Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
     
     The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
     
     THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
    +
     
  • @@ -15051,11 +14069,11 @@

    Used by:

    MIT License

    Used by:

    MIT License
     
    -Copyright (c) 2017 Evgeny Safronov
    +Copyright (c) 2017 Denis Kurilenko
     
     Permission is hereby granted, free of charge, to any person obtaining a copy
     of this software and associated documentation files (the "Software"), to deal
    @@ -15080,13 +14098,11 @@ 

    Used by:

    MIT License

    Used by:

    MIT License
     
    -Copyright (c) 2017 Ted Driggs
    +Copyright (c) 2017 Evgeny Safronov
     
     Permission is hereby granted, free of charge, to any person obtaining a copy
     of this software and associated documentation files (the "Software"), to deal
    @@ -15111,11 +14127,13 @@ 

    Used by:

    MIT License

    Used by:

    MIT License
     
    -Copyright (c) 2018 Canop
    +Copyright (c) 2017 Ted Driggs
     
     Permission is hereby granted, free of charge, to any person obtaining a copy
     of this software and associated documentation files (the "Software"), to deal
    @@ -15347,11 +14365,11 @@ 

    Used by:

    MIT License

    Used by:

    MIT License
     
    -Copyright (c) 2019 brunoczim
    +Copyright (c) 2019 jD91mZM2
     
     Permission is hereby granted, free of charge, to any person obtaining a copy
     of this software and associated documentation files (the "Software"), to deal
    @@ -15377,6 +14395,7 @@ 

    MIT License

    Used by:

    MIT License
     
    @@ -15434,11 +14453,11 @@ 

    Used by:

    MIT License

    Used by:

    MIT License
     
    -Copyright (c) 2021 the Deno authors
    +Copyright (c) 2021-2022 Joshua Barretto 
     
     Permission is hereby granted, free of charge, to any person obtaining a copy
     of this software and associated documentation files (the "Software"), to deal
    @@ -15463,11 +14482,12 @@ 

    Used by:

    MIT License

    Used by:

    MIT License
     
    -Copyright (c) 2021-2022 Joshua Barretto 
    +Copyright (c) 2022 Ibraheem Ahmed
     
     Permission is hereby granted, free of charge, to any person obtaining a copy
     of this software and associated documentation files (the "Software"), to deal
    @@ -15492,11 +14512,11 @@ 

    Used by:

    MIT License

    Used by:

    MIT License
     
    -Copyright (c) 2022 Ibraheem Ahmed
    +Copyright (c) 2022 Nugine
     
     Permission is hereby granted, free of charge, to any person obtaining a copy
     of this software and associated documentation files (the "Software"), to deal
    @@ -15514,18 +14534,17 @@ 

    Used by:

    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE. -
    +SOFTWARE.
  • MIT License

    Used by:

    MIT License
     
    -Copyright (c) 2022 Nugine
    +Copyright (c) 2022 picoHz
     
     Permission is hereby granted, free of charge, to any person obtaining a copy
     of this software and associated documentation files (the "Software"), to deal
    @@ -15543,17 +14562,18 @@ 

    Used by:

    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE.
    +SOFTWARE. +
  • MIT License

    Used by:

    MIT License
     
    -Copyright (c) 2022 picoHz
    +Copyright (c) 2023 4lDO2
     
     Permission is hereby granted, free of charge, to any person obtaining a copy
     of this software and associated documentation files (the "Software"), to deal
    @@ -15578,29 +14598,28 @@ 

    Used by:

    MIT License

    Used by:

    MIT License
     
    -Copyright (c) 2023 4lDO2
    +Copyright (c) 2024 Ben Newman <shape@eloper.dev> and Apollo Graph, Inc.
     
    -Permission is hereby granted, free of charge, to any person obtaining a copy
    -of this software and associated documentation files (the "Software"), to deal
    -in the Software without restriction, including without limitation the rights
    -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
    -copies of the Software, and to permit persons to whom the Software is
    -furnished to do so, subject to the following conditions:
    +Permission is hereby granted, free of charge, to any person obtaining a copy of
    +this software and associated documentation files (the "Software"), to deal in
    +the Software without restriction, including without limitation the rights to
    +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
    +the Software, and to permit persons to whom the Software is furnished to do so,
    +subject to the following conditions:
     
     The above copyright notice and this permission notice shall be included in all
     copies or substantial portions of the Software.
     
     THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
    -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
    -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
    -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
    -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
    -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
    -SOFTWARE.
    +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
    +FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
    +COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
    +IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
    +CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
     
  • @@ -15609,21 +14628,8 @@

    Used by:

    MIT License
    @@ -15641,6 +14647,8 @@ 

    Used by:

    MIT License

    Used by:

    MIT License
    @@ -15749,6 +14757,35 @@ 

    Used by:

    LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
    +
  • +
  • +

    MIT License

    +

    Used by:

    + +
    MIT License
    +
    +Copyright (c) 2020 Nicholas Fleck
    +
    +Permission is hereby granted, free of charge, to any person obtaining a copy
    +of this software and associated documentation files (the "Software"), to deal
    +in the Software without restriction, including without limitation the rights
    +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
    +copies of the Software, and to permit persons to whom the Software is
    +furnished to do so, subject to the following conditions:
    +
    +The above copyright notice and this permission notice shall be included in all
    +copies or substantial portions of the Software.
    +
    +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
    +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
    +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
    +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
    +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
    +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
    +SOFTWARE.
    +
  • MIT License

    @@ -15804,7 +14841,6 @@

    MIT License

    Used by:

    Permission is hereby granted, free of charge, to any person obtaining
     a copy of this software and associated documentation files (the
    @@ -15891,6 +14927,7 @@ 

    MIT License

    Used by:

    The MIT License (MIT)
     
    @@ -15949,8 +14986,6 @@ 

    Used by:

    The MIT License (MIT)
     
    @@ -16069,6 +15104,36 @@ 

    Used by:

    OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +
    +
  • +
  • +

    MIT License

    +

    Used by:

    + +
    The MIT License (MIT)
    +
    +Copyright (c) 2015 Markus Westerlind
    +
    +Permission is hereby granted, free of charge, to any person obtaining a copy
    +of this software and associated documentation files (the "Software"), to deal
    +in the Software without restriction, including without limitation the rights
    +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
    +copies of the Software, and to permit persons to whom the Software is
    +furnished to do so, subject to the following conditions:
    +
    +The above copyright notice and this permission notice shall be included in
    +all copies or substantial portions of the Software.
    +
    +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
    +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
    +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
    +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
    +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
    +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
    +THE SOFTWARE.
    +
     
  • @@ -16284,8 +15349,6 @@

    MIT License

    Used by:

    The MIT License (MIT)
     
    @@ -16345,9 +15408,9 @@ 

    Used by:

    MIT License

    Used by:

    The MIT License (MIT)
     
    @@ -16372,34 +15435,6 @@ 

    Used by:

    SOFTWARE.
    -
  • -
  • -

    MIT License

    -

    Used by:

    - -
    The MIT License (MIT)
    -
    -Copyright (c) 2020 Benjamin Coenen
    -
    -Permission is hereby granted, free of charge, to any person obtaining a copy
    -of this software and associated documentation files (the "Software"), to deal
    -in the Software without restriction, including without limitation the rights
    -to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
    -copies of the Software, and to permit persons to whom the Software is
    -furnished to do so, subject to the following conditions:
    -
    -The above copyright notice and this permission notice shall be included in
    -all copies or substantial portions of the Software.
    -
    -THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
    -IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
    -FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
    -AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
    -LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
    -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
    -THE SOFTWARE.
  • MIT License

    @@ -16421,11 +15456,11 @@

    Used by:

    MIT License

    Used by:

    The MIT License (MIT)
     
    -Copyright (c) 2015 Bartłomiej Kamiński
    +Copyright (c) 2015 Austin Bonander
     
     Permission is hereby granted, free of charge, to any person obtaining a copy
     of this software and associated documentation files (the "Software"), to deal
    @@ -16443,17 +15478,19 @@ 

    Used by:

    AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE -SOFTWARE.
    +SOFTWARE. + +
  • MIT License

    Used by:

    The MIT License (MIT)
     
    -Copyright (c) 2015 Markus Westerlind
    +Copyright (c) 2015 Bartłomiej Kamiński
     
     Permission is hereby granted, free of charge, to any person obtaining a copy
     of this software and associated documentation files (the "Software"), to deal
    @@ -16462,18 +15499,16 @@ 

    Used by:

    copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: -The above copyright notice and this permission notice shall be included in -all copies or substantial portions of the Software. +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN -THE SOFTWARE. - -
    +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE.
  • MIT License

    @@ -17272,6 +16307,7 @@

    Used by:

    Mozilla Public License Version 2.0
     ==================================
    @@ -17736,6 +16772,62 @@ 

    Used by:

    * Hudson (tjh@cryptsoft.com). * */
    +
  • +
  • +

    OpenSSL License

    +

    Used by:

    + +
    OpenSSL License
    +
    +Copyright (c) 1998-2008 The OpenSSL Project. All rights reserved.
    +
    +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
    +
    +1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
    +
    +2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
    +
    +3. All advertising materials mentioning features or use of this software must display the following acknowledgment: "This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit. (http://www.openssl.org/)"
    +
    +4. The names "OpenSSL Toolkit" and "OpenSSL Project" must not be used to endorse or promote products derived from this software without prior written permission. For written permission, please contact openssl-core@openssl.org.
    +
    +5. Products derived from this software may not be called "OpenSSL" nor may "OpenSSL" appear in their names without prior written permission of the OpenSSL Project.
    +
    +6. Redistributions of any form whatsoever must retain the following acknowledgment: "This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/)"
    +
    +THIS SOFTWARE IS PROVIDED BY THE OpenSSL PROJECT ``AS IS'' AND ANY EXPRESSED OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE OpenSSL PROJECT OR ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
    +
    +This product includes cryptographic software written by Eric Young (eay@cryptsoft.com). This product includes software written by Tim Hudson (tjh@cryptsoft.com).
    +
    +
    +Original SSLeay License
    +
    +Copyright (C) 1995-1998 Eric Young (eay@cryptsoft.com) All rights reserved.
    +
    +This package is an SSL implementation written by Eric Young (eay@cryptsoft.com). The implementation was written so as to conform with Netscapes SSL.
    +
    +This library is free for commercial and non-commercial use as long as the following conditions are aheared to. The following conditions apply to all code found in this distribution, be it the RC4, RSA, lhash, DES, etc., code; not just the SSL code. The SSL documentation included with this distribution is covered by the same copyright terms except that the holder is Tim Hudson (tjh@cryptsoft.com).
    +
    +Copyright remains Eric Young's, and as such any Copyright notices in the code are not to be removed. If this package is used in a product, Eric Young should be given attribution as the author of the parts of the library used. This can be in the form of a textual message at program startup or in documentation (online or textual) provided with the package.
    +
    +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
    +
    +1. Redistributions of source code must retain the copyright notice, this list of conditions and the following disclaimer.
    +
    +2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
    +
    +3. All advertising materials mentioning features or use of this software must display the following acknowledgement:
    +"This product includes cryptographic software written by Eric Young (eay@cryptsoft.com)"
    +The word 'cryptographic' can be left out if the rouines from the library being used are not cryptographic related :-).
    +
    +4. If you include any Windows specific code (or a derivative thereof) from the apps directory (application code) you must include an acknowledgement: "This product includes software written by Tim Hudson (tjh@cryptsoft.com)"
    +
    +THIS SOFTWARE IS PROVIDED BY ERIC YOUNG ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
    +
    +The licence and distribution terms for any publically available version or derivative of this code cannot be changed. i.e. this code cannot simply be copied and put under another distribution licence [including the GNU Public Licence.]
    +
  • Unicode License v3

    @@ -17753,13 +16845,14 @@

    Used by:

  • icu_provider_macros
  • litemap
  • tinystr
  • +
  • unicode-ident
  • writeable
  • -
  • yoke
  • yoke-derive
  • -
  • zerofrom
  • +
  • yoke
  • zerofrom-derive
  • -
  • zerovec
  • +
  • zerofrom
  • zerovec-derive
  • +
  • zerovec
UNICODE LICENSE V3
 
@@ -17800,36 +16893,6 @@ 

Used by:

not be used in advertising or otherwise to promote the sale, use or other dealings in these Data Files or Software without prior written authorization of the copyright holder. -
- -
  • -

    Unicode License Agreement - Data Files and Software (2016)

    -

    Used by:

    - -
    UNICODE, INC. LICENSE AGREEMENT - DATA FILES AND SOFTWARE
    -
    -Unicode Data Files include all data files under the directories http://www.unicode.org/Public/, http://www.unicode.org/reports/, http://www.unicode.org/cldr/data/, http://source.icu-project.org/repos/icu/, and http://www.unicode.org/utility/trac/browser/.
    -
    -Unicode Data Files do not include PDF online code charts under the directory http://www.unicode.org/Public/.
    -
    -Software includes any source code published in the Unicode Standard or under the directories http://www.unicode.org/Public/, http://www.unicode.org/reports/, http://www.unicode.org/cldr/data/, http://source.icu-project.org/repos/icu/, and http://www.unicode.org/utility/trac/browser/.
    -
    -NOTICE TO USER: Carefully read the following legal agreement. BY DOWNLOADING, INSTALLING, COPYING OR OTHERWISE USING UNICODE INC.'S DATA FILES ("DATA FILES"), AND/OR SOFTWARE ("SOFTWARE"), YOU UNEQUIVOCALLY ACCEPT, AND AGREE TO BE BOUND BY, ALL OF THE TERMS AND CONDITIONS OF THIS AGREEMENT. IF YOU DO NOT AGREE, DO NOT DOWNLOAD, INSTALL, COPY, DISTRIBUTE OR USE THE DATA FILES OR SOFTWARE.
    -
    -COPYRIGHT AND PERMISSION NOTICE
    -
    -Copyright © 1991-2016 Unicode, Inc. All rights reserved. Distributed under the Terms of Use in http://www.unicode.org/copyright.html.
    -
    -Permission is hereby granted, free of charge, to any person obtaining a copy of the Unicode data files and any associated documentation (the "Data Files") or Unicode software and any associated documentation (the "Software") to deal in the Data Files or Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, and/or sell copies of the Data Files or Software, and to permit persons to whom the Data Files or Software are furnished to do so, provided that either
    -
    -     (a) this copyright and permission notice appear with all copies of the Data Files or Software, or
    -     (b) this copyright and permission notice appear in associated Documentation.
    -
    -THE DATA FILES AND SOFTWARE ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT OF THIRD PARTY RIGHTS. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR HOLDERS INCLUDED IN THIS NOTICE BE LIABLE FOR ANY CLAIM, OR ANY SPECIAL INDIRECT OR CONSEQUENTIAL DAMAGES, OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THE DATA FILES OR SOFTWARE.
    -
    -Except as contained in this notice, the name of a copyright holder shall not be used in advertising or otherwise to promote the sale, use or other dealings in these Data Files or Software without prior written authorization of the copyright holder.
     
  • diff --git a/scripts/install.sh b/scripts/install.sh index 560319b7db..19d5a9b237 100755 --- a/scripts/install.sh +++ b/scripts/install.sh @@ -11,7 +11,7 @@ BINARY_DOWNLOAD_PREFIX="https://github.com/apollographql/router/releases/downloa # Router version defined in apollo-router's Cargo.toml # Note: Change this line manually during the release steps. -PACKAGE_VERSION="v1.59.1" +PACKAGE_VERSION="v2.0.0-preview.4" download_binary() { downloader --check @@ -200,4 +200,3 @@ downloader() { } download_binary "$@" || exit 1 - diff --git a/xtask/src/commands/release.rs b/xtask/src/commands/release.rs index db80637fc5..fd6007d82f 100644 --- a/xtask/src/commands/release.rs +++ b/xtask/src/commands/release.rs @@ -173,7 +173,8 @@ impl Prepare { Ok(()) } - /// Update the `apollo-router` version in the `dependencies` sections of the `Cargo.toml` files in `apollo-router-scaffold/templates/**`. + /// Update the `apollo-router` version in the `dependencies` sections of the `Cargo.toml` + /// files. fn update_cargo_tomls(&self, version: &Version) -> Result { println!("updating Cargo.toml files"); fn bump(component: &str) -> Result<()> { @@ -238,24 +239,12 @@ impl Prepare { .to_string(); if let Version::Nightly = version { - println!("Not changing `apollo-router-scaffold` or `apollo-router-benchmarks` because of nightly build mode."); + println!("Not changing `apollo-router-benchmarks` because of nightly build mode."); } else { - let packages = vec!["apollo-router-scaffold", "apollo-router-benchmarks"]; + let packages = vec!["apollo-router-benchmarks"]; for package in packages { cargo!(["set-version", &resolved_version, "--package", package]) } - replace_in_file!( - "./apollo-router-scaffold/templates/base/Cargo.template.toml", - "^apollo-router\\s*=\\s*\"[^\"]+\"", - format!("apollo-router = \"{resolved_version}\"") - ); - replace_in_file!( - "./apollo-router-scaffold/templates/base/xtask/Cargo.template.toml", - r#"^apollo-router-scaffold = \{\s*git\s*=\s*"https://github.com/apollographql/router.git",\s*tag\s*=\s*"v[^"]+"\s*\}$"#, - format!( - r#"apollo-router-scaffold = {{ git = "https://github.com/apollographql/router.git", tag = "v{resolved_version}" }}"# - ) - ); } Ok(resolved_version)