diff --git a/.github/actions/pr_notifier/requirements.in b/.github/actions/pr_notifier/requirements.in new file mode 100644 index 0000000000000..b27ccacba25ae --- /dev/null +++ b/.github/actions/pr_notifier/requirements.in @@ -0,0 +1,2 @@ +pygithub +slack_sdk diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 36d284ee60378..4aeb246c9db95 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -7,12 +7,12 @@ updates: interval: "daily" - package-ecosystem: "pip" - directory: "/test/extensions/filters/network/thrift_proxy" + directory: "/configs" schedule: interval: "daily" - package-ecosystem: "pip" - directory: "/contrib/kafka/filters/network/source" + directory: "/test/extensions/filters/network/thrift_proxy" schedule: interval: "daily" @@ -27,17 +27,7 @@ updates: interval: "daily" - package-ecosystem: "pip" - directory: "/tools/docs" - schedule: - interval: "daily" - -- package-ecosystem: "pip" - directory: "/tools/git" - schedule: - interval: "daily" - -- package-ecosystem: "pip" - directory: "/tools/config_validation" + directory: "/tools/code_format" schedule: interval: "daily" @@ -46,28 +36,13 @@ updates: schedule: interval: "daily" -- package-ecosystem: "pip" - directory: "/tools/deprecate_version" - schedule: - interval: "daily" - -- package-ecosystem: "pip" - directory: "/tools/distribution" - schedule: - interval: "daily" - -- package-ecosystem: "pip" - directory: "/tools/protodoc" - schedule: - interval: "daily" - - package-ecosystem: "pip" directory: "/tools/deprecate_features" schedule: interval: "daily" - package-ecosystem: "pip" - directory: "/tools/code_format" + directory: "/tools/deprecate_version" schedule: interval: "daily" @@ -76,16 +51,6 @@ updates: schedule: interval: "daily" -- package-ecosystem: "pip" - directory: "/configs" - schedule: - interval: "daily" - -- package-ecosystem: "pip" - directory: "/tools/testing" - schedule: - interval: "daily" - - package-ecosystem: "docker" directory: "/ci" schedule: diff --git a/WORKSPACE b/WORKSPACE index a96cba5013021..65b97f124d45d 100644 --- a/WORKSPACE +++ b/WORKSPACE @@ -16,6 +16,10 @@ load("//bazel:repositories_extra.bzl", "envoy_dependencies_extra") envoy_dependencies_extra() +load("@base_pip3//:requirements.bzl", "install_deps") + +install_deps() + load("//bazel:dependency_imports.bzl", "envoy_dependency_imports") envoy_dependency_imports() diff --git a/api/BUILD b/api/BUILD index 5bbde32946b63..93f9184a2b400 100644 --- a/api/BUILD +++ b/api/BUILD @@ -60,6 +60,7 @@ proto_library( "//contrib/envoy/extensions/filters/http/squash/v3:pkg", "//contrib/envoy/extensions/filters/http/sxg/v3alpha:pkg", "//contrib/envoy/extensions/filters/network/kafka_broker/v3:pkg", + "//contrib/envoy/extensions/filters/network/kafka_mesh/v3alpha:pkg", "//contrib/envoy/extensions/filters/network/mysql_proxy/v3:pkg", "//contrib/envoy/extensions/filters/network/postgres_proxy/v3alpha:pkg", "//contrib/envoy/extensions/filters/network/rocketmq_proxy/v3:pkg", diff --git a/api/contrib/envoy/extensions/filters/network/kafka_mesh/v3alpha/BUILD b/api/contrib/envoy/extensions/filters/network/kafka_mesh/v3alpha/BUILD new file mode 100644 index 0000000000000..ee92fb652582e --- /dev/null +++ b/api/contrib/envoy/extensions/filters/network/kafka_mesh/v3alpha/BUILD @@ -0,0 +1,9 @@ +# DO NOT EDIT. This file is generated by tools/proto_format/proto_sync.py. + +load("@envoy_api//bazel:api_build_system.bzl", "api_proto_package") + +licenses(["notice"]) # Apache 2 + +api_proto_package( + deps = ["@com_github_cncf_udpa//udpa/annotations:pkg"], +) diff --git a/api/contrib/envoy/extensions/filters/network/kafka_mesh/v3alpha/kafka_mesh.proto b/api/contrib/envoy/extensions/filters/network/kafka_mesh/v3alpha/kafka_mesh.proto new file mode 100644 index 0000000000000..03a6522852ab5 --- /dev/null +++ b/api/contrib/envoy/extensions/filters/network/kafka_mesh/v3alpha/kafka_mesh.proto @@ -0,0 +1,58 @@ +syntax = "proto3"; + +package envoy.extensions.filters.network.kafka_mesh.v3alpha; + +import "udpa/annotations/status.proto"; +import "validate/validate.proto"; + +option java_package = "io.envoyproxy.envoy.extensions.filters.network.kafka_mesh.v3alpha"; +option java_outer_classname = "KafkaMeshProto"; +option java_multiple_files = true; +option (udpa.annotations.file_status).work_in_progress = true; +option (udpa.annotations.file_status).package_version_status = ACTIVE; + +// [#protodoc-title: Kafka Mesh] +// Kafka Mesh :ref:`configuration overview `. +// [#extension: envoy.filters.network.kafka_mesh] + +message KafkaMesh { + // Envoy's host that's advertised to clients. + // Has the same meaning as corresponding Kafka broker properties. + // Usually equal to filter chain's listener config, but needs to be reachable by clients + // (so 0.0.0.0 will not work). + string advertised_host = 1 [(validate.rules).string = {min_len: 1}]; + + // Envoy's port that's advertised to clients. + int32 advertised_port = 2 [(validate.rules).int32 = {gt: 0}]; + + // Upstream clusters this filter will connect to. + repeated KafkaClusterDefinition upstream_clusters = 3; + + // Rules that will decide which cluster gets which request. + repeated ForwardingRule forwarding_rules = 4; +} + +message KafkaClusterDefinition { + // Cluster name. + string cluster_name = 1 [(validate.rules).string = {min_len: 1}]; + + // Kafka cluster address. + string bootstrap_servers = 2 [(validate.rules).string = {min_len: 1}]; + + // Default number of partitions present in this cluster. + // This is especially important for clients that do not specify partition in their payloads and depend on this value for hashing. + int32 partition_count = 3 [(validate.rules).int32 = {gt: 0}]; + + // Custom configuration passed to Kafka producer. + map producer_config = 4; +} + +message ForwardingRule { + // Cluster name. + string target_cluster = 1; + + oneof trigger { + // Intended place for future types of forwarding rules. + string topic_prefix = 2; + } +} diff --git a/api/versioning/BUILD b/api/versioning/BUILD index 52cb8c09eaf81..61af4c4764680 100644 --- a/api/versioning/BUILD +++ b/api/versioning/BUILD @@ -12,6 +12,7 @@ proto_library( "//contrib/envoy/extensions/filters/http/squash/v3:pkg", "//contrib/envoy/extensions/filters/http/sxg/v3alpha:pkg", "//contrib/envoy/extensions/filters/network/kafka_broker/v3:pkg", + "//contrib/envoy/extensions/filters/network/kafka_mesh/v3alpha:pkg", "//contrib/envoy/extensions/filters/network/mysql_proxy/v3:pkg", "//contrib/envoy/extensions/filters/network/postgres_proxy/v3alpha:pkg", "//contrib/envoy/extensions/filters/network/rocketmq_proxy/v3:pkg", diff --git a/bazel/BUILD b/bazel/BUILD index 016482a577f3e..303ab531bead3 100644 --- a/bazel/BUILD +++ b/bazel/BUILD @@ -586,3 +586,8 @@ alias( name = "remote_jdk11", actual = "@bazel_tools//tools/jdk:remote_jdk11", ) + +alias( + name = "windows", + actual = "@bazel_tools//src/conditions:windows", +) diff --git a/bazel/repositories.bzl b/bazel/repositories.bzl index c53d62da1bbb6..ae04a8918212c 100644 --- a/bazel/repositories.bzl +++ b/bazel/repositories.bzl @@ -637,14 +637,8 @@ def _com_google_absl(): ) def _com_google_protobuf(): - # TODO(phlax): remove patch - # patch is applied to update setuptools to version (0.5.4), - # and can be removed once this has been updated in rules_python - # see https://github.com/envoyproxy/envoy/pull/15236#issuecomment-788650946 for discussion external_http_archive( name = "rules_python", - patches = ["@envoy//bazel:rules_python.patch"], - patch_args = ["-p1"], ) external_http_archive( diff --git a/bazel/repositories_extra.bzl b/bazel/repositories_extra.bzl index 6b9c483a6ea72..686e90998f16d 100644 --- a/bazel/repositories_extra.bzl +++ b/bazel/repositories_extra.bzl @@ -1,138 +1,28 @@ -load("@rules_python//python:pip.bzl", "pip_install") +load("@rules_python//python:pip.bzl", "pip_install", "pip_parse") load("@proxy_wasm_cpp_host//bazel/cargo:crates.bzl", "proxy_wasm_cpp_host_fetch_remote_crates") load("//bazel/external/cargo:crates.bzl", "raze_fetch_remote_crates") # Python dependencies. def _python_deps(): - pip_install( + pip_parse( name = "base_pip3", - requirements = "@envoy//tools/base:requirements.txt", + requirements_lock = "@envoy//tools/base:requirements.txt", extra_pip_args = ["--require-hashes"], ) - pip_install( - name = "config_validation_pip3", - requirements = "@envoy//tools/config_validation:requirements.txt", - extra_pip_args = ["--require-hashes"], - - # project_name = "PyYAML", - # project_url = "https://github.com/yaml/pyyaml", - # version = "5.4.1", - # release_date = "2021-01-20" - # use_category = ["devtools"], - # cpe = "cpe:2.3:a:pyyaml:pyyaml:*", - ) pip_install( name = "configs_pip3", requirements = "@envoy//configs:requirements.txt", extra_pip_args = ["--require-hashes"], - - # project_name = "Jinja", - # project_url = "http://palletsprojects.com/p/jinja", - # version = "2.11.2", - # release_date = "2020-04-13" - # use_category = ["test"], - # cpe = "cpe:2.3:a:palletsprojects:jinja:*", - - # project_name = "MarkupSafe", - # project_url = "https://markupsafe.palletsprojects.com/en/1.1.x/", - # version = "1.1.1", - # release_date = "2019-02-23" - # use_category = ["test"], - ) - pip_install( - name = "docs_pip3", - requirements = "@envoy//tools/docs:requirements.txt", - extra_pip_args = ["--require-hashes"], - ) - pip_install( - name = "deps_pip3", - requirements = "@envoy//tools/dependency:requirements.txt", - extra_pip_args = ["--require-hashes"], - ) - pip_install( - name = "distribution_pip3", - requirements = "@envoy//tools/distribution:requirements.txt", - extra_pip_args = ["--require-hashes"], - ) - pip_install( - name = "git_pip3", - requirements = "@envoy//tools/git:requirements.txt", - extra_pip_args = ["--require-hashes"], - ) - pip_install( - name = "kafka_pip3", - requirements = "@envoy//contrib/kafka/filters/network/source:requirements.txt", - extra_pip_args = ["--require-hashes"], - - # project_name = "Jinja", - # project_url = "http://palletsprojects.com/p/jinja", - # version = "2.11.2", - # release_date = "2020-04-13" - # use_category = ["test"], - # cpe = "cpe:2.3:a:palletsprojects:jinja:*", - - # project_name = "MarkupSafe", - # project_url = "https://markupsafe.palletsprojects.com/en/1.1.x/", - # version = "1.1.1", - # release_date = "2019-02-23" - # use_category = ["test"], - ) - pip_install( - name = "protodoc_pip3", - requirements = "@envoy//tools/protodoc:requirements.txt", - extra_pip_args = ["--require-hashes"], - - # project_name = "PyYAML", - # project_url = "https://github.com/yaml/pyyaml", - # version = "5.4.1", - # release_date = "2021-01-20" - # use_category = ["docs"], - # cpe = "cpe:2.3:a:pyyaml:pyyaml:*", - ) - pip_install( - name = "pylint_pip3", - requirements = "@envoy//tools/code_format:requirements.txt", - extra_pip_args = ["--require-hashes"], - ) - pip_install( - name = "testing_pip3", - requirements = "@envoy//tools/testing:requirements.txt", - extra_pip_args = ["--require-hashes"], ) pip_install( name = "thrift_pip3", requirements = "@envoy//test/extensions/filters/network/thrift_proxy:requirements.txt", extra_pip_args = ["--require-hashes"], - - # project_name = "Apache Thrift", - # project_url = "http://thrift.apache.org/", - # version = "0.11.0", - # release_date = "2017-12-07" - # use_category = ["test"], - # cpe = "cpe:2.3:a:apache:thrift:*", - - # project_name = "Six: Python 2 and 3 Compatibility Library", - # project_url = "https://six.readthedocs.io/", - # version = "1.15.0", - # release_date = "2020-05-21" - # use_category = ["test"], ) pip_install( name = "fuzzing_pip3", requirements = "@rules_fuzzing//fuzzing:requirements.txt", extra_pip_args = ["--require-hashes"], - - # project_name = "Abseil Python Common Libraries", - # project_url = "https://github.com/abseil/abseil-py", - # version = "0.11.0", - # release_date = "2020-10-27", - # use_category = ["test"], - - # project_name = "Six: Python 2 and 3 Compatibility Library", - # project_url = "https://six.readthedocs.io/", - # version = "1.15.0", - # release_date = "2020-05-21" - # use_category = ["test"], ) # Envoy deps that rely on a first stage of dependency loading in envoy_dependencies(). diff --git a/bazel/repository_locations.bzl b/bazel/repository_locations.bzl index b817e6efffab7..e6aa827110114 100644 --- a/bazel/repository_locations.bzl +++ b/bazel/repository_locations.bzl @@ -675,10 +675,11 @@ REPOSITORY_LOCATIONS_SPEC = dict( project_name = "Python rules for Bazel", project_desc = "Bazel rules for the Python language", project_url = "https://github.com/bazelbuild/rules_python", - version = "0.3.0", - sha256 = "934c9ceb552e84577b0faf1e5a2f0450314985b4d8712b2b70717dc679fdc01b", - release_date = "2021-06-23", - urls = ["https://github.com/bazelbuild/rules_python/releases/download/{version}/rules_python-{version}.tar.gz"], + version = "9f597623ccfbe430b0d81c82498e33b80b7aec88", + sha256 = "8d61fed6974f1e69e09243ca78c9ecf82f50fa3de64bb5df6b0b9061f9c9639b", + release_date = "2021-09-07", + strip_prefix = "rules_python-{version}", + urls = ["https://github.com/bazelbuild/rules_python/archive/{version}.tar.gz"], use_category = ["build"], ), rules_pkg = dict( @@ -935,7 +936,7 @@ REPOSITORY_LOCATIONS_SPEC = dict( strip_prefix = "kafka-{version}/clients/src/main/resources/common/message", urls = ["https://github.com/apache/kafka/archive/{version}.zip"], use_category = ["dataplane_ext"], - extensions = ["envoy.filters.network.kafka_broker"], + extensions = ["envoy.filters.network.kafka_broker", "envoy.filters.network.kafka_mesh"], release_date = "2020-03-03", cpe = "cpe:2.3:a:apache:kafka:*", ), @@ -948,7 +949,7 @@ REPOSITORY_LOCATIONS_SPEC = dict( strip_prefix = "librdkafka-{version}", urls = ["https://github.com/edenhill/librdkafka/archive/v{version}.tar.gz"], use_category = ["dataplane_ext"], - extensions = ["envoy.filters.network.kafka_broker"], + extensions = ["envoy.filters.network.kafka_mesh"], release_date = "2021-05-10", cpe = "N/A", ), diff --git a/bazel/rules_python.patch b/bazel/rules_python.patch deleted file mode 100644 index 205998745d576..0000000000000 --- a/bazel/rules_python.patch +++ /dev/null @@ -1,15 +0,0 @@ -diff --git a/python/pip_install/repositories.bzl b/python/pip_install/repositories.bzl -index 302ff0e..c40deae 100644 ---- a/python/pip_install/repositories.bzl -+++ b/python/pip_install/repositories.bzl -@@ -26,8 +26,8 @@ _RULE_DEPS = [ - ), - ( - "pypi__setuptools", -- "https://files.pythonhosted.org/packages/ab/b5/3679d7c98be5b65fa5522671ef437b792d909cf3908ba54fe9eca5d2a766/setuptools-44.1.0-py2.py3-none-any.whl", -- "992728077ca19db6598072414fb83e0a284aca1253aaf2e24bb1e55ee6db1a30", -+ "https://files.pythonhosted.org/packages/70/06/849cc805ac6332210083f2114a95b22ee252ce81ed4e1be4f1d2b87c9108/setuptools-54.0.0-py3-none-any.whl", -+ "d85b57c41e88b69ab87065c964134ec85b7573cbab0fdaa7ef32330ed764600a", - ), - ( - "pypi__wheel", diff --git a/ci/Dockerfile-envoy-alpine b/ci/Dockerfile-envoy-alpine index 36a23f6e3a2aa..ae0e57cdb9e96 100644 --- a/ci/Dockerfile-envoy-alpine +++ b/ci/Dockerfile-envoy-alpine @@ -1,4 +1,4 @@ -FROM frolvlad/alpine-glibc:alpine-3.12_glibc-2.31 +FROM frolvlad/alpine-glibc:alpine-3.14_glibc-2.33 RUN mkdir -p /etc/envoy ADD configs/envoyproxy_io_proxy.yaml /etc/envoy/envoy.yaml diff --git a/ci/Dockerfile-envoy-distroless b/ci/Dockerfile-envoy-distroless index d2647f7b38557..40c2257e5b4bb 100644 --- a/ci/Dockerfile-envoy-distroless +++ b/ci/Dockerfile-envoy-distroless @@ -1,4 +1,4 @@ -FROM gcr.io/distroless/base-debian10:nonroot +FROM gcr.io/distroless/base-debian11:nonroot ADD configs/envoyproxy_io_proxy.yaml /etc/envoy/envoy.yaml diff --git a/ci/docker_ci.sh b/ci/docker_ci.sh index 7fb99271c63cc..9eb97f75afe62 100755 --- a/ci/docker_ci.sh +++ b/ci/docker_ci.sh @@ -147,7 +147,7 @@ for BUILD_TYPE in "${BUILD_TYPES[@]}"; do build_images "${BUILD_TYPE}" "$image_tag" if ! is_windows; then - if [[ "$BUILD_TYPE" == "" || "$BUILD_TYPE" == "-contrib" || "$BUILD_TYPE" == "-alpine" ]]; then + if [[ "$BUILD_TYPE" == "" || "$BUILD_TYPE" == "-contrib" || "$BUILD_TYPE" == "-alpine" || "$BUILD_TYPE" == "-distroless" ]]; then # verify_examples expects the base and alpine images, and for them to be named `-dev` dev_image="envoyproxy/envoy${BUILD_TYPE}-dev:latest" docker tag "$image_tag" "$dev_image" diff --git a/ci/flaky_test/requirements.in b/ci/flaky_test/requirements.in new file mode 100644 index 0000000000000..e5704ffe9fbe6 --- /dev/null +++ b/ci/flaky_test/requirements.in @@ -0,0 +1 @@ +slackclient diff --git a/configs/BUILD b/configs/BUILD index 80583f0aa32fb..71e3fa5f9774a 100644 --- a/configs/BUILD +++ b/configs/BUILD @@ -20,8 +20,7 @@ py_binary( "*.yaml", ]), deps = [ - requirement("Jinja2"), - requirement("MarkupSafe"), + requirement("jinja2"), ], ) diff --git a/contrib/contrib_build_config.bzl b/contrib/contrib_build_config.bzl index 34ef00af9fd15..f27001d971be8 100644 --- a/contrib/contrib_build_config.bzl +++ b/contrib/contrib_build_config.bzl @@ -12,6 +12,7 @@ CONTRIB_EXTENSIONS = { # "envoy.filters.network.kafka_broker": "//contrib/kafka/filters/network/source:kafka_broker_config_lib", + "envoy.filters.network.kafka_mesh": "//contrib/kafka/filters/network/source/mesh:config_lib", "envoy.filters.network.mysql_proxy": "//contrib/mysql_proxy/filters/network/source:config", "envoy.filters.network.postgres_proxy": "//contrib/postgres_proxy/filters/network/source:config", "envoy.filters.network.rocketmq_proxy": "//contrib/rocketmq_proxy/filters/network/source:config", diff --git a/contrib/extensions_metadata.yaml b/contrib/extensions_metadata.yaml index c3ccc61e53ee1..8614d2dbddb83 100644 --- a/contrib/extensions_metadata.yaml +++ b/contrib/extensions_metadata.yaml @@ -13,6 +13,11 @@ envoy.filters.network.kafka_broker: - envoy.filters.network security_posture: requires_trusted_downstream_and_upstream status: wip +envoy.filters.network.kafka_mesh: + categories: + - envoy.filters.network + security_posture: requires_trusted_downstream_and_upstream + status: wip envoy.filters.network.rocketmq_proxy: categories: - envoy.filters.network diff --git a/contrib/kafka/filters/network/source/BUILD b/contrib/kafka/filters/network/source/BUILD index ec196b5e9abe2..b90a1598249e5 100644 --- a/contrib/kafka/filters/network/source/BUILD +++ b/contrib/kafka/filters/network/source/BUILD @@ -5,7 +5,7 @@ load( "envoy_contrib_package", ) load("@rules_python//python:defs.bzl", "py_binary", "py_library") -load("@kafka_pip3//:requirements.bzl", "requirement") +load("@base_pip3//:requirements.bzl", "requirement") licenses(["notice"]) # Apache 2 diff --git a/contrib/kafka/filters/network/source/mesh/BUILD b/contrib/kafka/filters/network/source/mesh/BUILD index fe24168a884b0..f457afee713ea 100644 --- a/contrib/kafka/filters/network/source/mesh/BUILD +++ b/contrib/kafka/filters/network/source/mesh/BUILD @@ -1,5 +1,6 @@ load( "//bazel:envoy_build_system.bzl", + "envoy_cc_contrib_extension", "envoy_cc_library", "envoy_contrib_package", ) @@ -10,6 +11,25 @@ licenses(["notice"]) # Apache 2 envoy_contrib_package() # Kafka-mesh network filter. +# Mesh filter public docs: docs/root/configuration/listeners/network_filters/kafka_mesh_filter.rst + +envoy_cc_contrib_extension( + name = "config_lib", + srcs = ["config.cc"], + hdrs = ["config.h"], + deps = [ + "//envoy/registry", + "//source/extensions/filters/network/common:factory_base_lib", + "@envoy_api//contrib/envoy/extensions/filters/network/kafka_mesh/v3alpha:pkg_cc_proto", + ] + select({ + "//bazel:windows": [], + "//conditions:default": [ + ":filter_lib", + ":upstream_config_lib", + ":upstream_kafka_facade_lib", + ], + }), +) envoy_cc_library( name = "filter_lib", @@ -121,11 +141,15 @@ envoy_cc_library( envoy_cc_library( name = "upstream_config_lib", srcs = [ + "upstream_config.cc", ], hdrs = [ "upstream_config.h", ], tags = ["skip_on_windows"], deps = [ + "//source/common/common:assert_lib", + "//source/common/common:minimal_logger_lib", + "@envoy_api//contrib/envoy/extensions/filters/network/kafka_mesh/v3alpha:pkg_cc_proto", ], ) diff --git a/contrib/kafka/filters/network/source/mesh/config.cc b/contrib/kafka/filters/network/source/mesh/config.cc new file mode 100644 index 0000000000000..7c2a1f4e2474c --- /dev/null +++ b/contrib/kafka/filters/network/source/mesh/config.cc @@ -0,0 +1,55 @@ +#include "contrib/kafka/filters/network/source/mesh/config.h" + +#include "envoy/registry/registry.h" +#include "envoy/server/filter_config.h" +#include "envoy/stats/scope.h" + +#ifndef WIN32 +#include "contrib/kafka/filters/network/source/mesh/upstream_config.h" +#include "contrib/kafka/filters/network/source/mesh/upstream_kafka_facade.h" +#include "contrib/kafka/filters/network/source/mesh/filter.h" +#else +#include "envoy/common/exception.h" +#endif + +namespace Envoy { +namespace Extensions { +namespace NetworkFilters { +namespace Kafka { +namespace Mesh { + +// The mesh filter doesn't do anything special, it just sets up the shared entities. +// Any extra configuration validation is done in UpstreamKafkaConfiguration constructor. +Network::FilterFactoryCb KafkaMeshConfigFactory::createFilterFactoryFromProtoTyped( + const KafkaMeshProtoConfig& config, Server::Configuration::FactoryContext& context) { + +#ifdef WIN32 + throw EnvoyException("Kafka mesh filter is not supported on Windows"); +#else + // Shared configuration (tells us where the upstream clusters are). + const UpstreamKafkaConfigurationSharedPtr configuration = + std::make_shared(config); + + // Shared upstream facade (connects us to upstream Kafka clusters). + const UpstreamKafkaFacadeSharedPtr upstream_kafka_facade = + std::make_shared(*configuration, context.threadLocal(), + context.api().threadFactory()); + + return [configuration, upstream_kafka_facade](Network::FilterManager& filter_manager) -> void { + Network::ReadFilterSharedPtr filter = + std::make_shared(*configuration, *upstream_kafka_facade); + filter_manager.addReadFilter(filter); + }; +#endif +} + +/** + * Static registration for the Kafka filter. @see RegisterFactory. + */ +REGISTER_FACTORY(KafkaMeshConfigFactory, Server::Configuration::NamedNetworkFilterConfigFactory); + +} // namespace Mesh +} // namespace Kafka +} // namespace NetworkFilters +} // namespace Extensions +} // namespace Envoy diff --git a/contrib/kafka/filters/network/source/mesh/config.h b/contrib/kafka/filters/network/source/mesh/config.h new file mode 100644 index 0000000000000..12ba71691bbca --- /dev/null +++ b/contrib/kafka/filters/network/source/mesh/config.h @@ -0,0 +1,33 @@ +#pragma once + +#include "source/extensions/filters/network/common/factory_base.h" + +#include "contrib/envoy/extensions/filters/network/kafka_mesh/v3alpha/kafka_mesh.pb.h" +#include "contrib/envoy/extensions/filters/network/kafka_mesh/v3alpha/kafka_mesh.pb.validate.h" + +namespace Envoy { +namespace Extensions { +namespace NetworkFilters { +namespace Kafka { +namespace Mesh { + +using KafkaMeshProtoConfig = envoy::extensions::filters::network::kafka_mesh::v3alpha::KafkaMesh; + +/** + * Config registration for the Kafka mesh filter. + */ +class KafkaMeshConfigFactory : public Common::FactoryBase { +public: + KafkaMeshConfigFactory() : FactoryBase("envoy.filters.network.kafka_mesh", true) {} + +private: + Network::FilterFactoryCb + createFilterFactoryFromProtoTyped(const KafkaMeshProtoConfig& config, + Server::Configuration::FactoryContext& context) override; +}; + +} // namespace Mesh +} // namespace Kafka +} // namespace NetworkFilters +} // namespace Extensions +} // namespace Envoy diff --git a/contrib/kafka/filters/network/source/mesh/upstream_config.cc b/contrib/kafka/filters/network/source/mesh/upstream_config.cc new file mode 100644 index 0000000000000..8e6917df034c8 --- /dev/null +++ b/contrib/kafka/filters/network/source/mesh/upstream_config.cc @@ -0,0 +1,93 @@ +#include "contrib/kafka/filters/network/source/mesh/upstream_config.h" + +#include "envoy/common/exception.h" + +#include "source/common/common/assert.h" + +#include "absl/strings/str_cat.h" + +namespace Envoy { +namespace Extensions { +namespace NetworkFilters { +namespace Kafka { +namespace Mesh { + +using KafkaClusterDefinition = + envoy::extensions::filters::network::kafka_mesh::v3alpha::KafkaClusterDefinition; +using ForwardingRule = envoy::extensions::filters::network::kafka_mesh::v3alpha::ForwardingRule; + +UpstreamKafkaConfigurationImpl::UpstreamKafkaConfigurationImpl(const KafkaMeshProtoConfig& config) + : advertised_address_{config.advertised_host(), config.advertised_port()} { + + // Processing cluster data. + const auto& upstream_clusters = config.upstream_clusters(); + if (upstream_clusters.empty()) { + throw EnvoyException("kafka-mesh filter needs to have at least one upstream Kafka cluster"); + } + + // Processing cluster configuration. + std::map cluster_name_to_cluster_config; + for (const auto& upstream_cluster_definition : upstream_clusters) { + const std::string& cluster_name = upstream_cluster_definition.cluster_name(); + + // No duplicates are allowed. + if (cluster_name_to_cluster_config.find(cluster_name) != cluster_name_to_cluster_config.end()) { + throw EnvoyException( + absl::StrCat("kafka-mesh filter has multiple Kafka clusters referenced by the same name", + cluster_name)); + } + + // Upstream client configuration - use all the optional custom configs provided, and then use + // the target IPs. + std::map producer_configs = { + upstream_cluster_definition.producer_config().begin(), + upstream_cluster_definition.producer_config().end()}; + producer_configs["bootstrap.servers"] = upstream_cluster_definition.bootstrap_servers(); + ClusterConfig cluster_config = {cluster_name, upstream_cluster_definition.partition_count(), + producer_configs}; + cluster_name_to_cluster_config[cluster_name] = cluster_config; + } + + // Processing forwarding rules. + const auto& forwarding_rules = config.forwarding_rules(); + if (forwarding_rules.empty()) { + throw EnvoyException("kafka-mesh filter needs to have at least one forwarding rule"); + } + + for (const auto& rule : forwarding_rules) { + const std::string& target_cluster = rule.target_cluster(); + ASSERT(rule.trigger_case() == ForwardingRule::TriggerCase::kTopicPrefix); + ENVOY_LOG(trace, "Setting up forwarding rule: {} -> {}", rule.topic_prefix(), target_cluster); + // Each forwarding rule needs to reference a cluster. + if (cluster_name_to_cluster_config.find(target_cluster) == + cluster_name_to_cluster_config.end()) { + throw EnvoyException(absl::StrCat( + "kafka-mesh filter forwarding rule is referencing unknown upstream Kafka cluster: ", + target_cluster)); + } + topic_prefix_to_cluster_config_[rule.topic_prefix()] = + cluster_name_to_cluster_config[target_cluster]; + } +} + +absl::optional +UpstreamKafkaConfigurationImpl::computeClusterConfigForTopic(const std::string& topic) const { + // We find the first matching prefix (this is why ordering is important). + for (const auto& it : topic_prefix_to_cluster_config_) { + if (topic.rfind(it.first, 0) == 0) { + const ClusterConfig cluster_config = it.second; + return absl::make_optional(cluster_config); + } + } + return absl::nullopt; +} + +std::pair UpstreamKafkaConfigurationImpl::getAdvertisedAddress() const { + return advertised_address_; +} + +} // namespace Mesh +} // namespace Kafka +} // namespace NetworkFilters +} // namespace Extensions +} // namespace Envoy diff --git a/contrib/kafka/filters/network/source/mesh/upstream_config.h b/contrib/kafka/filters/network/source/mesh/upstream_config.h index 00e3e7faf32da..ad49f2f1304d6 100644 --- a/contrib/kafka/filters/network/source/mesh/upstream_config.h +++ b/contrib/kafka/filters/network/source/mesh/upstream_config.h @@ -7,7 +7,11 @@ #include "envoy/common/pure.h" +#include "source/common/common/logger.h" + #include "absl/types/optional.h" +#include "contrib/envoy/extensions/filters/network/kafka_mesh/v3alpha/kafka_mesh.pb.h" +#include "contrib/envoy/extensions/filters/network/kafka_mesh/v3alpha/kafka_mesh.pb.validate.h" namespace Envoy { namespace Extensions { @@ -15,6 +19,8 @@ namespace NetworkFilters { namespace Kafka { namespace Mesh { +using KafkaMeshProtoConfig = envoy::extensions::filters::network::kafka_mesh::v3alpha::KafkaMesh; + // Minor helper structure that contains information about upstream Kafka clusters. struct ClusterConfig { @@ -32,23 +38,52 @@ struct ClusterConfig { // This map always contains entry with key 'bootstrap.servers', as this is the only mandatory // producer property. std::map upstream_producer_properties_; + + bool operator==(const ClusterConfig& rhs) const { + return name_ == rhs.name_ && partition_count_ == rhs.partition_count_ && + upstream_producer_properties_ == rhs.upstream_producer_properties_; + } }; /** * Keeps the configuration related to upstream Kafka clusters. - * Impl note: current matching from topic to cluster is based on prefix matching but more complex - * rules could be added. */ class UpstreamKafkaConfiguration { public: virtual ~UpstreamKafkaConfiguration() = default; + + // Return this the host-port pair that's provided to Kafka clients. + // This value needs to follow same rules as 'advertised.address' property of Kafka broker. + virtual std::pair getAdvertisedAddress() const PURE; + + // Provides cluster for given Kafka topic, according to the rules contained within this + // configuration object. virtual absl::optional computeClusterConfigForTopic(const std::string& topic) const PURE; - virtual std::pair getAdvertisedAddress() const PURE; }; using UpstreamKafkaConfigurationSharedPtr = std::shared_ptr; +/** + * Implementation that uses only topic-prefix to figure out which Kafka cluster to use. + */ +class UpstreamKafkaConfigurationImpl : public UpstreamKafkaConfiguration, + private Logger::Loggable { +public: + UpstreamKafkaConfigurationImpl(const KafkaMeshProtoConfig& config); + + // UpstreamKafkaConfiguration + absl::optional + computeClusterConfigForTopic(const std::string& topic) const override; + + // UpstreamKafkaConfiguration + std::pair getAdvertisedAddress() const override; + +private: + const std::pair advertised_address_; + std::map topic_prefix_to_cluster_config_; +}; + } // namespace Mesh } // namespace Kafka } // namespace NetworkFilters diff --git a/contrib/kafka/filters/network/source/requirements.txt b/contrib/kafka/filters/network/source/requirements.txt deleted file mode 100644 index 1cd69909b9962..0000000000000 --- a/contrib/kafka/filters/network/source/requirements.txt +++ /dev/null @@ -1,38 +0,0 @@ -Jinja2==3.0.1 \ - --hash=sha256:1f06f2da51e7b56b8f238affdd6b4e2c61e39598a378cc49345bc1bd42a978a4 \ - --hash=sha256:703f484b47a6af502e743c9122595cc812b0271f661722403114f71a79d0f5a4 -MarkupSafe==2.0.1 \ - --hash=sha256:f9081981fe268bd86831e5c75f7de206ef275defcb82bc70740ae6dc507aee51 \ - --hash=sha256:0955295dd5eec6cb6cc2fe1698f4c6d84af2e92de33fbcac4111913cd100a6ff \ - --hash=sha256:0446679737af14f45767963a1a9ef7620189912317d095f2d9ffa183a4d25d2b \ - --hash=sha256:f826e31d18b516f653fe296d967d700fddad5901ae07c622bb3705955e1faa94 \ - --hash=sha256:fa130dd50c57d53368c9d59395cb5526eda596d3ffe36666cd81a44d56e48872 \ - --hash=sha256:905fec760bd2fa1388bb5b489ee8ee5f7291d692638ea5f67982d968366bef9f \ - --hash=sha256:6c4ca60fa24e85fe25b912b01e62cb969d69a23a5d5867682dd3e80b5b02581d \ - --hash=sha256:b2f4bf27480f5e5e8ce285a8c8fd176c0b03e93dcc6646477d4630e83440c6a9 \ - --hash=sha256:0717a7390a68be14b8c793ba258e075c6f4ca819f15edfc2a3a027c823718567 \ - --hash=sha256:6557b31b5e2c9ddf0de32a691f2312a32f77cd7681d8af66c2692efdbef84c18 \ - --hash=sha256:49e3ceeabbfb9d66c3aef5af3a60cc43b85c33df25ce03d0031a608b0a8b2e3f \ - --hash=sha256:d7f9850398e85aba693bb640262d3611788b1f29a79f0c93c565694658f4071f \ - --hash=sha256:6a7fae0dd14cf60ad5ff42baa2e95727c3d81ded453457771d02b7d2b3f9c0c2 \ - --hash=sha256:b7f2d075102dc8c794cbde1947378051c4e5180d52d276987b8d28a3bd58c17d \ - --hash=sha256:a30e67a65b53ea0a5e62fe23682cfe22712e01f453b95233b25502f7c61cb415 \ - --hash=sha256:611d1ad9a4288cf3e3c16014564df047fe08410e628f89805e475368bd304914 \ - --hash=sha256:be98f628055368795d818ebf93da628541e10b75b41c559fdf36d104c5787066 \ - --hash=sha256:1d609f577dc6e1aa17d746f8bd3c31aa4d258f4070d61b2aa5c4166c1539de35 \ - --hash=sha256:7d91275b0245b1da4d4cfa07e0faedd5b0812efc15b702576d103293e252af1b \ - --hash=sha256:01a9b8ea66f1658938f65b93a85ebe8bc016e6769611be228d797c9d998dd298 \ - --hash=sha256:47ab1e7b91c098ab893b828deafa1203de86d0bc6ab587b160f78fe6c4011f75 \ - --hash=sha256:97383d78eb34da7e1fa37dd273c20ad4320929af65d156e35a5e2d89566d9dfb \ - --hash=sha256:023cb26ec21ece8dc3907c0e8320058b2e0cb3c55cf9564da612bc325bed5e64 \ - --hash=sha256:984d76483eb32f1bcb536dc27e4ad56bba4baa70be32fa87152832cdd9db0833 \ - --hash=sha256:2ef54abee730b502252bcdf31b10dacb0a416229b72c18b19e24a4509f273d26 \ - --hash=sha256:3c112550557578c26af18a1ccc9e090bfe03832ae994343cfdacd287db6a6ae7 \ - --hash=sha256:53edb4da6925ad13c07b6d26c2a852bd81e364f95301c66e930ab2aef5b5ddd8 \ - --hash=sha256:f5653a225f31e113b152e56f154ccbe59eeb1c7487b39b9d9f9cdb58e6c79dc5 \ - --hash=sha256:4efca8f86c54b22348a5467704e3fec767b2db12fc39c6d963168ab1d3fc9135 \ - --hash=sha256:ab3ef638ace319fa26553db0624c4699e31a28bb2a835c5faca8f8acf6a5a902 \ - --hash=sha256:f8ba0e8349a38d3001fae7eadded3f6606f0da5d748ee53cc1dab1d6527b9509 \ - --hash=sha256:10f82115e21dc0dfec9ab5c0223652f7197feb168c940f3ef61563fc2d6beb74 \ - --hash=sha256:693ce3f9e70a6cf7d2fb9e6c9d8b204b6b39897a2c4a1aa65728d5ac97dcc1d8 \ - --hash=sha256:594c67807fb16238b30c44bdf74f36c02cdf22d1c8cda91ef8a0ed8dabf5620a diff --git a/contrib/kafka/filters/network/test/BUILD b/contrib/kafka/filters/network/test/BUILD index 4c620dcfe43b9..93b2d2e35a29b 100644 --- a/contrib/kafka/filters/network/test/BUILD +++ b/contrib/kafka/filters/network/test/BUILD @@ -5,7 +5,7 @@ load( "envoy_contrib_package", ) load("@rules_python//python:defs.bzl", "py_binary") -load("@kafka_pip3//:requirements.bzl", "requirement") +load("@base_pip3//:requirements.bzl", "requirement") licenses(["notice"]) # Apache 2 diff --git a/contrib/kafka/filters/network/test/broker/integration_test/BUILD b/contrib/kafka/filters/network/test/broker/integration_test/BUILD index 080c2a21a3805..cb9410485692c 100644 --- a/contrib/kafka/filters/network/test/broker/integration_test/BUILD +++ b/contrib/kafka/filters/network/test/broker/integration_test/BUILD @@ -3,7 +3,7 @@ load( "envoy_contrib_package", "envoy_py_test", ) -load("@kafka_pip3//:requirements.bzl", "requirement") +load("@base_pip3//:requirements.bzl", "requirement") licenses(["notice"]) # Apache 2 diff --git a/contrib/kafka/filters/network/test/mesh/BUILD b/contrib/kafka/filters/network/test/mesh/BUILD index acff686d9e163..bc6e740f08842 100644 --- a/contrib/kafka/filters/network/test/mesh/BUILD +++ b/contrib/kafka/filters/network/test/mesh/BUILD @@ -13,6 +13,16 @@ licenses(["notice"]) # Apache 2 envoy_contrib_package() +envoy_cc_test( + name = "config_unit_test", + srcs = ["config_unit_test.cc"], + tags = ["skip_on_windows"], + deps = [ + "//contrib/kafka/filters/network/source/mesh:config_lib", + "//test/mocks/server:factory_context_mocks", + ], +) + envoy_cc_test( name = "filter_unit_test", srcs = ["filter_unit_test.cc"], @@ -73,3 +83,12 @@ envoy_cc_test_library( envoy_external_dep_path("librdkafka"), ], ) + +envoy_cc_test( + name = "upstream_config_unit_test", + srcs = ["upstream_config_unit_test.cc"], + tags = ["skip_on_windows"], + deps = [ + "//contrib/kafka/filters/network/source/mesh:upstream_config_lib", + ], +) diff --git a/contrib/kafka/filters/network/test/mesh/config_unit_test.cc b/contrib/kafka/filters/network/test/mesh/config_unit_test.cc new file mode 100644 index 0000000000000..3ac2ad70a64e6 --- /dev/null +++ b/contrib/kafka/filters/network/test/mesh/config_unit_test.cc @@ -0,0 +1,81 @@ +#include "test/mocks/server/factory_context.h" + +#include "contrib/kafka/filters/network/source/mesh/config.h" +#include "gmock/gmock.h" +#include "gtest/gtest.h" + +namespace Envoy { +namespace Extensions { +namespace NetworkFilters { +namespace Kafka { +namespace Mesh { + +class MockThreadFactory : public Thread::ThreadFactory { +public: + MOCK_METHOD(Thread::ThreadPtr, createThread, (std::function, Thread::OptionsOptConstRef)); + MOCK_METHOD(Thread::ThreadId, currentThreadId, ()); +}; + +TEST(KafkaMeshConfigFactoryUnitTest, shouldCreateFilter) { + // given + const std::string yaml = R"EOF( +advertised_host: "127.0.0.1" +advertised_port: 19092 +upstream_clusters: +- cluster_name: kafka_c1 + bootstrap_servers: 127.0.0.1:9092 + partition_count: 1 +- cluster_name: kafka_c2 + bootstrap_servers: 127.0.0.1:9093 + partition_count: 1 +- cluster_name: kafka_c3 + bootstrap_servers: 127.0.0.1:9094 + partition_count: 5 + producer_config: + acks: "1" + linger.ms: "500" +forwarding_rules: +- target_cluster: kafka_c1 + topic_prefix: apples +- target_cluster: kafka_c2 + topic_prefix: bananas +- target_cluster: kafka_c3 + topic_prefix: cherries + )EOF"; + + KafkaMeshProtoConfig proto_config; + TestUtility::loadFromYamlAndValidate(yaml, proto_config); + + testing::NiceMock context; + testing::NiceMock thread_factory; + ON_CALL(context.api_, threadFactory()).WillByDefault(ReturnRef(thread_factory)); + KafkaMeshConfigFactory factory; + + Network::FilterFactoryCb cb = factory.createFilterFactoryFromProto(proto_config, context); + Network::MockConnection connection; + EXPECT_CALL(connection, addReadFilter(_)); + + // when + cb(connection); + + // then - connection had `addFilter` invoked +} + +TEST(KafkaMeshConfigFactoryUnitTest, throwsIfAdvertisedPortIsMissing) { + // given + const std::string yaml = R"EOF( +advertised_host: "127.0.0.1" + )EOF"; + + KafkaMeshProtoConfig proto_config; + + // when + // then - exception gets thrown + EXPECT_THROW(TestUtility::loadFromYamlAndValidate(yaml, proto_config), ProtoValidationException); +} + +} // namespace Mesh +} // namespace Kafka +} // namespace NetworkFilters +} // namespace Extensions +} // namespace Envoy diff --git a/contrib/kafka/filters/network/test/mesh/integration_test/BUILD b/contrib/kafka/filters/network/test/mesh/integration_test/BUILD new file mode 100644 index 0000000000000..449098f15e15f --- /dev/null +++ b/contrib/kafka/filters/network/test/mesh/integration_test/BUILD @@ -0,0 +1,31 @@ +load( + "//bazel:envoy_build_system.bzl", + "envoy_contrib_package", + "envoy_py_test", +) +load("@base_pip3//:requirements.bzl", "requirement") + +licenses(["notice"]) # Apache 2 + +envoy_contrib_package() + +# This test sets up multiple services, and this can take variable amount of time (30-60 seconds). +envoy_py_test( + name = "kafka_mesh_integration_test", + srcs = [ + "kafka_mesh_integration_test.py", + "@kafka_python_client//:all", + ], + data = [ + "//contrib/exe:envoy-static", + "//bazel:remote_jdk11", + "@kafka_server_binary//:all", + ] + glob(["*.j2"]), + flaky = True, + python_version = "PY3", + srcs_version = "PY3", + deps = [ + requirement("Jinja2"), + requirement("MarkupSafe"), + ], +) diff --git a/contrib/kafka/filters/network/test/mesh/integration_test/envoy_config_yaml.j2 b/contrib/kafka/filters/network/test/mesh/integration_test/envoy_config_yaml.j2 new file mode 100644 index 0000000000000..fbb22d2af3a96 --- /dev/null +++ b/contrib/kafka/filters/network/test/mesh/integration_test/envoy_config_yaml.j2 @@ -0,0 +1,34 @@ +static_resources: + listeners: + - address: + socket_address: + address: 127.0.0.1 + port_value: {{ data['kafka_envoy_port'] }} + filter_chains: + - filters: + - name: requesttypes + typed_config: + "@type": type.googleapis.com/envoy.extensions.filters.network.kafka_broker.v3.KafkaBroker + stat_prefix: testfilter + - name: mesh + typed_config: + "@type": type.googleapis.com/envoy.extensions.filters.network.kafka_mesh.v3alpha.KafkaMesh + advertised_host: "127.0.0.1" + advertised_port: {{ data['kafka_envoy_port'] }} + upstream_clusters: + - cluster_name: kafka_c1 + bootstrap_servers: 127.0.0.1:{{ data['kafka_real_port1'] }} + partition_count: 1 + - cluster_name: kafka_c2 + bootstrap_servers: 127.0.0.1:{{ data['kafka_real_port2'] }} + partition_count: 1 + forwarding_rules: + - target_cluster: kafka_c1 + topic_prefix: a + - target_cluster: kafka_c2 + topic_prefix: b +admin: + access_log_path: /dev/null + profile_path: /dev/null + address: + socket_address: { address: 127.0.0.1, port_value: {{ data['envoy_monitoring_port'] }} } diff --git a/contrib/kafka/filters/network/test/mesh/integration_test/kafka_mesh_integration_test.py b/contrib/kafka/filters/network/test/mesh/integration_test/kafka_mesh_integration_test.py new file mode 100644 index 0000000000000..f21145cda99a4 --- /dev/null +++ b/contrib/kafka/filters/network/test/mesh/integration_test/kafka_mesh_integration_test.py @@ -0,0 +1,650 @@ +#!/usr/bin/python + +import random +import os +import shutil +import socket +import subprocess +import tempfile +from threading import Thread, Semaphore +import time +import unittest +import random + +from kafka import KafkaConsumer, KafkaProducer, TopicPartition +import urllib.request + + +class IntegrationTest(unittest.TestCase): + """ + All tests in this class depend on Envoy/Zookeeper/Kafka running. + For each of these tests we are going to create Kafka producers and consumers, with producers + pointing to Envoy (so the records get forwarded to target Kafka clusters) and verifying consumers + pointing to Kafka clusters directly (as mesh filter does not yet support Fetch requests). + We expect every operation to succeed (as they should reach Kafka) and the corresponding metrics + to increase on Envoy side (to show that messages were received and forwarded successfully). + """ + + services = None + + @classmethod + def setUpClass(cls): + IntegrationTest.services = ServicesHolder() + IntegrationTest.services.start() + + @classmethod + def tearDownClass(cls): + IntegrationTest.services.shut_down() + + def setUp(self): + # We want to check if our services are okay before running any kind of test. + IntegrationTest.services.check_state() + self.metrics = MetricsHolder(self) + + def tearDown(self): + # We want to check if our services are okay after running any test. + IntegrationTest.services.check_state() + + @classmethod + def kafka_envoy_address(cls): + return '127.0.0.1:%s' % IntegrationTest.services.kafka_envoy_port + + @classmethod + def kafka_cluster1_address(cls): + return '127.0.0.1:%s' % IntegrationTest.services.kafka_real_port1 + + @classmethod + def kafka_cluster2_address(cls): + return '127.0.0.1:%s' % IntegrationTest.services.kafka_real_port2 + + @classmethod + def envoy_stats_address(cls): + return 'http://127.0.0.1:%s/stats' % IntegrationTest.services.envoy_monitoring_port + + def test_producing(self): + """ + This test verifies that producer can send messages through mesh filter. + We are going to send messages to two topics: 'apples' and 'bananas'. + The mesh filter is configured to forward records for topics starting with 'a' (like 'apples') + to the first cluster, and the ones starting with 'b' (so 'bananas') to the second one. + + We are going to send messages one by one, so they will not be batched in Kafka producer, + so the filter is going to receive them one by one too. + + After sending, the consumers are going to read from Kafka clusters directly to make sure that + nothing was lost. + """ + + messages_to_send = 100 + partition1 = TopicPartition('apples', 0) + partition2 = TopicPartition('bananas', 0) + + producer = KafkaProducer( + bootstrap_servers=IntegrationTest.kafka_envoy_address(), api_version=(1, 0, 0)) + offset_to_payload1 = {} + offset_to_payload2 = {} + for _ in range(messages_to_send): + payload = bytearray(random.getrandbits(8) for _ in range(5)) + future1 = producer.send( + value=payload, topic=partition1.topic, partition=partition1.partition) + self.assertTrue(future1.get().offset >= 0) + offset_to_payload1[future1.get().offset] = payload + + future2 = producer.send( + value=payload, topic=partition2.topic, partition=partition2.partition) + self.assertTrue(future2.get().offset >= 0) + offset_to_payload2[future2.get().offset] = payload + self.assertTrue(len(offset_to_payload1) == messages_to_send) + self.assertTrue(len(offset_to_payload2) == messages_to_send) + producer.close() + + # Check the target clusters. + self.__verify_target_kafka_cluster( + IntegrationTest.kafka_cluster1_address(), partition1, offset_to_payload1, partition2) + self.__verify_target_kafka_cluster( + IntegrationTest.kafka_cluster2_address(), partition2, offset_to_payload2, partition1) + + # Check if requests have been received. + self.metrics.collect_final_metrics() + self.metrics.assert_metric_increase('produce', 200) + + def test_producing_with_batched_records(self): + """ + Compared to previous test, we are going to have batching in Kafka producers (this is caused by high 'linger.ms' value). + So a single request that reaches a Kafka broker might be carrying more than one record, for different partitions. + """ + messages_to_send = 100 + partition1 = TopicPartition('apricots', 0) + partition2 = TopicPartition('berries', 0) + + # This ensures that records to 'apricots' and 'berries' partitions. + producer = KafkaProducer( + bootstrap_servers=IntegrationTest.kafka_envoy_address(), + api_version=(1, 0, 0), + linger_ms=1000, + batch_size=100) + future_to_payload1 = {} + future_to_payload2 = {} + for _ in range(messages_to_send): + payload = bytearray(random.getrandbits(8) for _ in range(5)) + future1 = producer.send( + value=payload, topic=partition1.topic, partition=partition1.partition) + future_to_payload1[future1] = payload + + payload = bytearray(random.getrandbits(8) for _ in range(5)) + future2 = producer.send( + value=payload, topic=partition2.topic, partition=partition2.partition) + future_to_payload2[future2] = payload + + offset_to_payload1 = {} + offset_to_payload2 = {} + for future in future_to_payload1.keys(): + offset_to_payload1[future.get().offset] = future_to_payload1[future] + self.assertTrue(future.get().offset >= 0) + for future in future_to_payload2.keys(): + offset_to_payload2[future.get().offset] = future_to_payload2[future] + self.assertTrue(future.get().offset >= 0) + self.assertTrue(len(offset_to_payload1) == messages_to_send) + self.assertTrue(len(offset_to_payload2) == messages_to_send) + producer.close() + + # Check the target clusters. + self.__verify_target_kafka_cluster( + IntegrationTest.kafka_cluster1_address(), partition1, offset_to_payload1, partition2) + self.__verify_target_kafka_cluster( + IntegrationTest.kafka_cluster2_address(), partition2, offset_to_payload2, partition1) + + # Check if requests have been received. + self.metrics.collect_final_metrics() + self.metrics.assert_metric_increase('produce', 1) + + def __verify_target_kafka_cluster( + self, bootstrap_servers, partition, offset_to_payload_map, other_partition): + # Check if records were properly forwarded to the cluster. + consumer = KafkaConsumer(bootstrap_servers=bootstrap_servers, auto_offset_reset='earliest') + consumer.assign([partition]) + received_messages = [] + while (len(received_messages) < len(offset_to_payload_map)): + poll_result = consumer.poll(timeout_ms=1000) + received_messages += poll_result[partition] + self.assertTrue(len(received_messages) == len(offset_to_payload_map)) + for record in received_messages: + self.assertTrue(record.value == offset_to_payload_map[record.offset]) + + # Check that no records were incorrectly routed from the "other" partition (they would have created the topics). + self.assertTrue(other_partition.topic not in consumer.topics()) + consumer.close(False) + + +class MetricsHolder: + """ + Utility for storing Envoy metrics. + Expected to be created before the test (to get initial metrics), and then to collect them at the + end of test, so the expected increases can be verified. + """ + + def __init__(self, owner): + self.owner = owner + self.initial_requests, self.inital_responses = MetricsHolder.get_envoy_stats() + self.final_requests = None + self.final_responses = None + + def collect_final_metrics(self): + self.final_requests, self.final_responses = MetricsHolder.get_envoy_stats() + + def assert_metric_increase(self, message_type, count): + request_type = message_type + '_request' + response_type = message_type + '_response' + + initial_request_value = self.initial_requests.get(request_type, 0) + final_request_value = self.final_requests.get(request_type, 0) + self.owner.assertGreaterEqual(final_request_value, initial_request_value + count) + + initial_response_value = self.inital_responses.get(response_type, 0) + final_response_value = self.final_responses.get(response_type, 0) + self.owner.assertGreaterEqual(final_response_value, initial_response_value + count) + + @staticmethod + def get_envoy_stats(): + """ + Grab request/response metrics from envoy's stats interface. + """ + + stats_url = IntegrationTest.envoy_stats_address() + requests = {} + responses = {} + with urllib.request.urlopen(stats_url) as remote_metrics_url: + payload = remote_metrics_url.read().decode() + lines = payload.splitlines() + for line in lines: + request_prefix = 'kafka.testfilter.request.' + response_prefix = 'kafka.testfilter.response.' + if line.startswith(request_prefix): + data = line[len(request_prefix):].split(': ') + requests[data[0]] = int(data[1]) + pass + if line.startswith(response_prefix) and '_response:' in line: + data = line[len(response_prefix):].split(': ') + responses[data[0]] = int(data[1]) + return [requests, responses] + + +class ServicesHolder: + """ + Utility class for setting up our external dependencies: Envoy, Zookeeper + and two Kafka clusters (single-broker each). + """ + + def __init__(self): + self.kafka_tmp_dir = None + + self.envoy_worker = None + self.zk_worker = None + self.kafka_workers = None + + @staticmethod + def get_random_listener_port(): + """ + Here we count on OS to give us some random socket. + Obviously this method will need to be invoked in a try loop anyways, as in degenerate scenario + someone else might have bound to it after we had closed the socket and before the service + that's supposed to use it binds to it. + """ + + import socket + with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as server_socket: + server_socket.bind(('0.0.0.0', 0)) + socket_port = server_socket.getsockname()[1] + print('returning %s' % socket_port) + return socket_port + + def start(self): + """ + Starts all the services we need for integration tests. + """ + + # Find java installation that we are going to use to start Zookeeper & Kafka. + java_directory = ServicesHolder.find_java() + + launcher_environment = os.environ.copy() + # Make `java` visible to build script: + # https://github.com/apache/kafka/blob/2.2.0/bin/kafka-run-class.sh#L226 + new_path = os.path.abspath(java_directory) + os.pathsep + launcher_environment['PATH'] + launcher_environment['PATH'] = new_path + # Both ZK & Kafka use Kafka launcher script. + # By default it sets up JMX options: + # https://github.com/apache/kafka/blob/2.2.0/bin/kafka-run-class.sh#L167 + # But that forces the JVM to load file that is not present due to: + # https://docs.oracle.com/javase/9/management/monitoring-and-management-using-jmx-technology.htm + # Let's make it simple and just disable JMX. + launcher_environment['KAFKA_JMX_OPTS'] = ' ' + + # Setup a temporary directory, which will be used by Kafka & Zookeeper servers. + self.kafka_tmp_dir = tempfile.mkdtemp() + print('Temporary directory used for tests: ' + self.kafka_tmp_dir) + + # This directory will store the configuration files fed to services. + config_dir = self.kafka_tmp_dir + '/config' + os.mkdir(config_dir) + # This directory will store Zookeeper's data (== Kafka server metadata). + zookeeper_store_dir = self.kafka_tmp_dir + '/zookeeper_data' + os.mkdir(zookeeper_store_dir) + # These directories will store Kafka's data (== partitions). + kafka_store_dir1 = self.kafka_tmp_dir + '/kafka_data1' + os.mkdir(kafka_store_dir1) + kafka_store_dir2 = self.kafka_tmp_dir + '/kafka_data2' + os.mkdir(kafka_store_dir2) + + # Find the Kafka server 'bin' directory. + kafka_bin_dir = os.path.join('.', 'external', 'kafka_server_binary', 'bin') + + # Main initialization block: + # - generate random ports, + # - render configuration with these ports, + # - start services and check if they are running okay, + # - if anything is having problems, kill everything and start again. + while True: + + # Generate random ports. + zk_port = ServicesHolder.get_random_listener_port() + kafka_envoy_port = ServicesHolder.get_random_listener_port() + kafka_real_port1 = ServicesHolder.get_random_listener_port() + kafka_real_port2 = ServicesHolder.get_random_listener_port() + envoy_monitoring_port = ServicesHolder.get_random_listener_port() + + # These ports need to be exposed to tests. + self.kafka_envoy_port = kafka_envoy_port + self.kafka_real_port1 = kafka_real_port1 + self.kafka_real_port2 = kafka_real_port2 + self.envoy_monitoring_port = envoy_monitoring_port + + # Render config file for Envoy. + template = RenderingHelper.get_template('envoy_config_yaml.j2') + contents = template.render( + data={ + 'kafka_envoy_port': kafka_envoy_port, + 'kafka_real_port1': kafka_real_port1, + 'kafka_real_port2': kafka_real_port2, + 'envoy_monitoring_port': envoy_monitoring_port + }) + envoy_config_file = os.path.join(config_dir, 'envoy_config.yaml') + with open(envoy_config_file, 'w') as fd: + fd.write(contents) + print('Envoy config file rendered at: ' + envoy_config_file) + + # Render config file for Zookeeper. + template = RenderingHelper.get_template('zookeeper_properties.j2') + contents = template.render(data={'data_dir': zookeeper_store_dir, 'zk_port': zk_port}) + zookeeper_config_file = os.path.join(config_dir, 'zookeeper.properties') + with open(zookeeper_config_file, 'w') as fd: + fd.write(contents) + print('Zookeeper config file rendered at: ' + zookeeper_config_file) + + # Render config file for Kafka cluster 1. + template = RenderingHelper.get_template('kafka_server_properties.j2') + contents = template.render( + data={ + 'kafka_real_port': kafka_real_port1, + 'data_dir': kafka_store_dir1, + 'zk_port': zk_port, + 'kafka_zk_instance': 'instance1' + }) + kafka_config_file1 = os.path.join(config_dir, 'kafka_server1.properties') + with open(kafka_config_file1, 'w') as fd: + fd.write(contents) + print('Kafka config file rendered at: ' + kafka_config_file1) + + # Render config file for Kafka cluster 2. + template = RenderingHelper.get_template('kafka_server_properties.j2') + contents = template.render( + data={ + 'kafka_real_port': kafka_real_port2, + 'data_dir': kafka_store_dir2, + 'zk_port': zk_port, + 'kafka_zk_instance': 'instance2' + }) + kafka_config_file2 = os.path.join(config_dir, 'kafka_server2.properties') + with open(kafka_config_file2, 'w') as fd: + fd.write(contents) + print('Kafka config file rendered at: ' + kafka_config_file2) + + # Start the services now. + try: + + # Start Envoy in the background, pointing to rendered config file. + envoy_binary = ServicesHolder.find_envoy() + # --base-id is added to allow multiple Envoy instances to run at the same time. + envoy_args = [ + os.path.abspath(envoy_binary), '-c', envoy_config_file, '--base-id', + str(random.randint(1, 999999)) + ] + envoy_handle = subprocess.Popen( + envoy_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + self.envoy_worker = ProcessWorker( + envoy_handle, 'Envoy', 'starting main dispatch loop') + self.envoy_worker.await_startup() + + # Start Zookeeper in background, pointing to rendered config file. + zk_binary = os.path.join(kafka_bin_dir, 'zookeeper-server-start.sh') + zk_args = [os.path.abspath(zk_binary), zookeeper_config_file] + zk_handle = subprocess.Popen( + zk_args, + env=launcher_environment, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + self.zk_worker = ProcessWorker(zk_handle, 'Zookeeper', 'binding to port') + self.zk_worker.await_startup() + + self.kafka_workers = [] + + # Start Kafka 1 in background, pointing to rendered config file. + kafka_binary = os.path.join(kafka_bin_dir, 'kafka-server-start.sh') + kafka_args = [os.path.abspath(kafka_binary), os.path.abspath(kafka_config_file1)] + kafka_handle = subprocess.Popen( + kafka_args, + env=launcher_environment, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + kafka_worker = ProcessWorker(kafka_handle, 'Kafka', '[KafkaServer id=0] started') + kafka_worker.await_startup() + self.kafka_workers.append(kafka_worker) + + # Start Kafka 2 in background, pointing to rendered config file. + kafka_binary = os.path.join(kafka_bin_dir, 'kafka-server-start.sh') + kafka_args = [os.path.abspath(kafka_binary), os.path.abspath(kafka_config_file2)] + kafka_handle = subprocess.Popen( + kafka_args, + env=launcher_environment, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE) + kafka_worker = ProcessWorker(kafka_handle, 'Kafka', '[KafkaServer id=0] started') + kafka_worker.await_startup() + self.kafka_workers.append(kafka_worker) + + # All services have started without problems - now we can finally finish. + break + + except Exception as e: + print('Could not start services, will try again', e) + + if self.kafka_workers: + self.kafka_worker.kill() + self.kafka_worker = None + if self.zk_worker: + self.zk_worker.kill() + self.zk_worker = None + if self.envoy_worker: + self.envoy_worker.kill() + self.envoy_worker = None + + @staticmethod + def find_java(): + """ + This method just locates the Java installation in current directory. + We cannot hardcode the name, as the dirname changes as per: + https://github.com/bazelbuild/bazel/blob/master/tools/jdk/BUILD#L491 + """ + + external_dir = os.path.join('.', 'external') + for directory in os.listdir(external_dir): + if 'remotejdk11' in directory: + result = os.path.join(external_dir, directory, 'bin') + print('Using Java: ' + result) + return result + raise Exception('Could not find Java in: ' + external_dir) + + @staticmethod + def find_envoy(): + """ + This method locates envoy binary. + It's present at ./contrib/exe/envoy-static (at least for mac/bazel-asan/bazel-tsan), + or at ./external/envoy/contrib/exe/envoy-static (for bazel-compile_time_options). + """ + + candidate = os.path.join('.', 'contrib', 'exe', 'envoy-static') + if os.path.isfile(candidate): + return candidate + candidate = os.path.join('.', 'external', 'envoy', 'contrib', 'exe', 'envoy-static') + if os.path.isfile(candidate): + return candidate + raise Exception("Could not find Envoy") + + def shut_down(self): + # Teardown - kill Kafka, Zookeeper, and Envoy. Then delete their data directory. + print('Cleaning up') + + if self.kafka_workers: + for worker in self.kafka_workers: + worker.kill() + + if self.zk_worker: + self.zk_worker.kill() + + if self.envoy_worker: + self.envoy_worker.kill() + + if self.kafka_tmp_dir: + print('Removing temporary directory: ' + self.kafka_tmp_dir) + shutil.rmtree(self.kafka_tmp_dir) + + def check_state(self): + self.envoy_worker.check_state() + self.zk_worker.check_state() + for worker in self.kafka_workers: + worker.check_state() + + +class ProcessWorker: + """ + Helper class that wraps the external service process. + Provides ability to wait until service is ready to use (this is done by tracing logs) and + printing service's output to stdout. + """ + + # Service is considered to be properly initialized after it has logged its startup message + # and has been alive for INITIALIZATION_WAIT_SECONDS after that message has been seen. + # This (clunky) design is needed because Zookeeper happens to log "binding to port" and then + # might fail to bind. + INITIALIZATION_WAIT_SECONDS = 3 + + def __init__(self, process_handle, name, startup_message): + # Handle to process and pretty name. + self.process_handle = process_handle + self.name = name + + self.startup_message = startup_message + self.startup_message_ts = None + + # Semaphore raised when startup has finished and information regarding startup's success. + self.initialization_semaphore = Semaphore(value=0) + self.initialization_ok = False + + self.state_worker = Thread(target=ProcessWorker.initialization_worker, args=(self,)) + self.state_worker.start() + self.out_worker = Thread( + target=ProcessWorker.pipe_handler, args=(self, self.process_handle.stdout, 'out')) + self.out_worker.start() + self.err_worker = Thread( + target=ProcessWorker.pipe_handler, args=(self, self.process_handle.stderr, 'err')) + self.err_worker.start() + + @staticmethod + def initialization_worker(owner): + """ + Worker thread. + Responsible for detecting if service died during initialization steps and ensuring if enough + time has passed since the startup message has been seen. + When either of these happens, we just raise the initialization semaphore. + """ + + while True: + status = owner.process_handle.poll() + if status: + # Service died. + print('%s did not initialize properly - finished with: %s' % (owner.name, status)) + owner.initialization_ok = False + owner.initialization_semaphore.release() + break + else: + # Service is still running. + startup_message_ts = owner.startup_message_ts + if startup_message_ts: + # The log message has been registered (by pipe_handler thread), let's just ensure that + # some time has passed and mark the service as running. + current_time = int(round(time.time())) + if current_time - startup_message_ts >= ProcessWorker.INITIALIZATION_WAIT_SECONDS: + print( + 'Startup message seen %s seconds ago, and service is still running' % + (ProcessWorker.INITIALIZATION_WAIT_SECONDS), + flush=True) + owner.initialization_ok = True + owner.initialization_semaphore.release() + break + time.sleep(1) + print('Initialization worker for %s has finished' % (owner.name)) + + @staticmethod + def pipe_handler(owner, pipe, pipe_name): + """ + Worker thread. + If a service startup message is seen, then it just registers the timestamp of its appearance. + Also prints every received message. + """ + + try: + for raw_line in pipe: + line = raw_line.decode().rstrip() + print('%s(%s):' % (owner.name, pipe_name), line, flush=True) + if owner.startup_message in line: + print( + '%s initialization message [%s] has been logged' % + (owner.name, owner.startup_message)) + owner.startup_message_ts = int(round(time.time())) + finally: + pipe.close() + print('Pipe handler for %s(%s) has finished' % (owner.name, pipe_name)) + + def await_startup(self): + """ + Awaits on initialization semaphore, and then verifies the initialization state. + If everything is okay, we just continue (we can use the service), otherwise throw. + """ + + print('Waiting for %s to start...' % (self.name)) + self.initialization_semaphore.acquire() + try: + if self.initialization_ok: + print('Service %s started successfully' % (self.name)) + else: + raise Exception('%s could not start' % (self.name)) + finally: + self.initialization_semaphore.release() + + def check_state(self): + """ + Verifies if the service is still running. Throws if it is not. + """ + + status = self.process_handle.poll() + if status: + raise Exception('%s died with: %s' % (self.name, str(status))) + + def kill(self): + """ + Utility method to kill the main service thread and all related workers. + """ + + print('Stopping service %s' % self.name) + + # Kill the real process. + self.process_handle.kill() + self.process_handle.wait() + + # The sub-workers are going to finish on their own, as they will detect main thread dying + # (through pipes closing, or .poll() returning a non-null value). + self.state_worker.join() + self.out_worker.join() + self.err_worker.join() + + print('Service %s has been stopped' % self.name) + + +class RenderingHelper: + """ + Helper for jinja templates. + """ + + @staticmethod + def get_template(template): + import jinja2 + import os + import sys + # Templates are resolved relatively to main start script, due to main & test templates being + # stored in different directories. + env = jinja2.Environment( + loader=jinja2.FileSystemLoader(searchpath=os.path.dirname(os.path.abspath(__file__)))) + return env.get_template(template) + + +if __name__ == '__main__': + unittest.main() diff --git a/contrib/kafka/filters/network/test/mesh/integration_test/kafka_server_properties.j2 b/contrib/kafka/filters/network/test/mesh/integration_test/kafka_server_properties.j2 new file mode 100644 index 0000000000000..021991a0d4670 --- /dev/null +++ b/contrib/kafka/filters/network/test/mesh/integration_test/kafka_server_properties.j2 @@ -0,0 +1,31 @@ +broker.id=0 +listeners=PLAINTEXT://127.0.0.1:{{ data['kafka_real_port'] }} +advertised.listeners=PLAINTEXT://127.0.0.1:{{ data['kafka_real_port'] }} + +num.network.threads=3 +num.io.threads=8 +socket.send.buffer.bytes=102400 +socket.receive.buffer.bytes=102400 +socket.request.max.bytes=104857600 + +log.dirs={{ data['data_dir'] }} +num.partitions=1 +num.recovery.threads.per.data.dir=1 + +offsets.topic.replication.factor=1 +transaction.state.log.replication.factor=1 +transaction.state.log.min.isr=1 + +log.retention.hours=168 +log.segment.bytes=1073741824 +log.retention.check.interval.ms=300000 + +# As we are going to have multiple Kafka clusters (not even brokers!), +# we need to register them at different paths in ZK. +zookeeper.connect=127.0.0.1:{{ data['zk_port'] }}/{{ data['kafka_zk_instance'] }} +zookeeper.connection.timeout.ms=6000 + +group.initial.rebalance.delay.ms=0 + +# The number of __consumer_offsets partitions is reduced to make logs a bit more readable. +offsets.topic.num.partitions=5 diff --git a/contrib/kafka/filters/network/test/mesh/integration_test/zookeeper_properties.j2 b/contrib/kafka/filters/network/test/mesh/integration_test/zookeeper_properties.j2 new file mode 100644 index 0000000000000..be524bea342bc --- /dev/null +++ b/contrib/kafka/filters/network/test/mesh/integration_test/zookeeper_properties.j2 @@ -0,0 +1,5 @@ +clientPort={{ data['zk_port'] }} +dataDir={{ data['data_dir'] }} +maxClientCnxns=0 +# ZK 3.5 tries to bind 8080 for introspection capacility - we do not need that. +admin.enableServer=false diff --git a/contrib/kafka/filters/network/test/mesh/upstream_config_unit_test.cc b/contrib/kafka/filters/network/test/mesh/upstream_config_unit_test.cc new file mode 100644 index 0000000000000..23bfb039b9a03 --- /dev/null +++ b/contrib/kafka/filters/network/test/mesh/upstream_config_unit_test.cc @@ -0,0 +1,143 @@ +#include "source/common/protobuf/utility.h" + +#include "test/test_common/utility.h" + +#include "contrib/kafka/filters/network/source/mesh/upstream_config.h" +#include "gtest/gtest.h" + +namespace Envoy { +namespace Extensions { +namespace NetworkFilters { +namespace Kafka { +namespace Mesh { + +TEST(UpstreamKafkaConfigurationTest, shouldThrowIfNoKafkaClusters) { + // given + KafkaMeshProtoConfig proto_config; + + // when + // then - exception gets thrown + EXPECT_THROW_WITH_REGEX(UpstreamKafkaConfigurationImpl{proto_config}, EnvoyException, + "at least one upstream Kafka cluster"); +} + +TEST(UpstreamKafkaConfigurationTest, shouldThrowIfKafkaClustersWithSameName) { + // given + const std::string yaml = R"EOF( +advertised_host: mock +advertised_port: 1 +upstream_clusters: +- cluster_name: REPEATEDNAME + bootstrap_servers: mock + partition_count : 1 +- cluster_name: REPEATEDNAME + bootstrap_servers: mock + partition_count : 1 +forwarding_rules: + )EOF"; + KafkaMeshProtoConfig proto_config; + TestUtility::loadFromYamlAndValidate(yaml, proto_config); + + // when + // then - exception gets thrown + EXPECT_THROW_WITH_REGEX(UpstreamKafkaConfigurationImpl{proto_config}, EnvoyException, + "multiple Kafka clusters referenced by the same name"); +} + +TEST(UpstreamKafkaConfigurationTest, shouldThrowIfNoForwardingRules) { + // given + const std::string yaml = R"EOF( +advertised_host: mock_host +advertised_port: 42 +upstream_clusters: +- cluster_name: mock + bootstrap_servers: mock + partition_count : 1 +forwarding_rules: + )EOF"; + KafkaMeshProtoConfig proto_config; + TestUtility::loadFromYamlAndValidate(yaml, proto_config); + + // when + // then - exception gets thrown + EXPECT_THROW_WITH_REGEX(UpstreamKafkaConfigurationImpl{proto_config}, EnvoyException, + "at least one forwarding rule"); +} + +TEST(UpstreamKafkaConfigurationTest, shouldThrowIfForwardingRuleWithUnknownTarget) { + // given + const std::string yaml = R"EOF( +advertised_host: mock_host +advertised_port: 42 +upstream_clusters: +- cluster_name: mock + bootstrap_servers: mock + partition_count : 1 +forwarding_rules: +- target_cluster: BADNAME + topic_prefix: mock + )EOF"; + KafkaMeshProtoConfig proto_config; + TestUtility::loadFromYamlAndValidate(yaml, proto_config); + + // when + // then - exception gets thrown + EXPECT_THROW_WITH_REGEX(UpstreamKafkaConfigurationImpl{proto_config}, EnvoyException, + "forwarding rule is referencing unknown upstream Kafka cluster"); +} + +TEST(UpstreamKafkaConfigurationTest, shouldBehaveProperly) { + // given + const std::string yaml = R"EOF( +advertised_host: mock_host +advertised_port: 42 +upstream_clusters: +- cluster_name: cluster1 + bootstrap_servers: s1 + partition_count : 1 +- cluster_name: cluster2 + bootstrap_servers: s2 + partition_count : 2 +forwarding_rules: +- target_cluster: cluster1 + topic_prefix: prefix1 +- target_cluster: cluster2 + topic_prefix: prefix2 + )EOF"; + KafkaMeshProtoConfig proto_config; + TestUtility::loadFromYamlAndValidate(yaml, proto_config); + const UpstreamKafkaConfiguration& testee = UpstreamKafkaConfigurationImpl{proto_config}; + + const ClusterConfig cluster1 = {"cluster1", 1, {{"bootstrap.servers", "s1"}}}; + const ClusterConfig cluster2 = {"cluster2", 2, {{"bootstrap.servers", "s2"}}}; + + // when, then (advertised address is returned properly) + const auto address = testee.getAdvertisedAddress(); + EXPECT_EQ(address.first, "mock_host"); + EXPECT_EQ(address.second, 42); + + // when, then (matching prefix with something more) + const auto res1 = testee.computeClusterConfigForTopic("prefix1somethingmore"); + ASSERT_TRUE(res1.has_value()); + EXPECT_EQ(*res1, cluster1); + + // when, then (matching prefix alone) + const auto res2 = testee.computeClusterConfigForTopic("prefix1"); + ASSERT_TRUE(res2.has_value()); + EXPECT_EQ(*res2, cluster1); + + // when, then (failing to match first rule, but then matching the second one) + const auto res3 = testee.computeClusterConfigForTopic("prefix2somethingmore"); + ASSERT_TRUE(res3.has_value()); + EXPECT_EQ(*res3, cluster2); + + // when, then (no rules match) + const auto res4 = testee.computeClusterConfigForTopic("someotherthing"); + EXPECT_FALSE(res4.has_value()); +} + +} // namespace Mesh +} // namespace Kafka +} // namespace NetworkFilters +} // namespace Extensions +} // namespace Envoy diff --git a/contrib/squash/filters/http/test/squash_filter_integration_test.cc b/contrib/squash/filters/http/test/squash_filter_integration_test.cc index 83f10a7069c66..938d2e6a59d40 100644 --- a/contrib/squash/filters/http/test/squash_filter_integration_test.cc +++ b/contrib/squash/filters/http/test/squash_filter_integration_test.cc @@ -82,7 +82,7 @@ class SquashFilterIntegrationTest : public testing::TestWithParamadd_clusters(); diff --git a/docs/root/configuration/listeners/network_filters/kafka_broker_filter.rst b/docs/root/configuration/listeners/network_filters/kafka_broker_filter.rst index 4753f3845a78c..2c286686f33f5 100644 --- a/docs/root/configuration/listeners/network_filters/kafka_broker_filter.rst +++ b/docs/root/configuration/listeners/network_filters/kafka_broker_filter.rst @@ -14,6 +14,10 @@ this filter) are forwarded as-is. * :ref:`v3 API reference ` * This filter should be configured with the name *envoy.filters.network.kafka_broker*. +.. attention:: + + The Kafka broker filter is only included in :ref:`contrib images ` + .. attention:: The kafka_broker filter is experimental and is currently under active development. diff --git a/docs/root/configuration/listeners/network_filters/kafka_mesh_filter.rst b/docs/root/configuration/listeners/network_filters/kafka_mesh_filter.rst new file mode 100644 index 0000000000000..4a8504b7d67e3 --- /dev/null +++ b/docs/root/configuration/listeners/network_filters/kafka_mesh_filter.rst @@ -0,0 +1,103 @@ +.. _config_network_filters_kafka_mesh: + +Kafka Mesh filter +=================== + +The Apache Kafka mesh filter provides a facade for `Apache Kafka `_ +producers. Produce requests sent to this filter insance can be forwarded to one of multiple +clusters, depending on configured forwarding rules. Corresponding message versions from +Kafka 2.4.0 are supported. + +* :ref:`v3 API reference ` +* This filter should be configured with the name *envoy.filters.network.kafka_mesh*. + +.. attention:: + + The Kafka mesh filter is only included in :ref:`contrib images ` + +.. attention:: + + The kafka_mesh filter is experimental and is currently under active development. + Capabilities will be expanded over time and the configuration structures are likely to change. + +.. attention:: + + The kafka_mesh filter is does not work on Windows (the blocker is getting librdkafka compiled). + +.. _config_network_filters_kafka_mesh_config: + +Configuration +------------- + +Below example shows us typical filter configuration that proxies 3 Kafka clusters. +Clients are going to connect to '127.0.0.1:19092', and their messages are going to be distributed +to cluster depending on topic names. + +.. code-block:: yaml + + listeners: + - address: + socket_address: + address: 127.0.0.1 # Host that Kafka clients should connect to. + port_value: 19092 # Port that Kafka clients should connect to. + filter_chains: + - filters: + - name: envoy.filters.network.kafka_mesh + typed_config: + "@type": type.googleapis.com/envoy.extensions.filters.network.kafka_mesh.v3alpha.KafkaMesh + advertised_host: "127.0.0.1" + advertised_port: 19092 + upstream_clusters: + - cluster_name: kafka_c1 + bootstrap_servers: cluster1_node1:9092,cluster1_node2:9092,cluster1_node3:9092 + partition_count: 1 + - cluster_name: kafka_c2 + bootstrap_servers: cluster2_node1:9092,cluster2_node2:9092,cluster2_node3:9092 + partition_count: 1 + - cluster_name: kafka_c3 + bootstrap_servers: cluster3_node1:9092,cluster3_node2:9092 + partition_count: 5 + producer_config: + acks: "1" + linger.ms: "500" + forwarding_rules: + - target_cluster: kafka_c1 + topic_prefix: apples + - target_cluster: kafka_c2 + topic_prefix: bananas + - target_cluster: kafka_c3 + topic_prefix: cherries + +It should be noted that Kafka broker filter can be inserted before Kafka mesh filter in the filter +chain to capture the request processing metrics. + +.. _config_network_filters_kafka_mesh_notes: + +Notes +----- +Given that this filter does its own processing of received requests, there are some changes +in behaviour compared to explicit connection to a Kafka cluster: + +#. Record headers are not sent upstream. +#. Only ProduceRequests with version 2 are supported (what means very old producers like 0.8 are + not going to be supported). +#. Python producers need to set API version of at least 1.0.0, so that the produce requests they + send are going to have records with magic equal to 2. +#. Downstream handling of Kafka producer 'acks' property is delegated to upstream client. + E.g. if upstream client is configured to use acks=0 then the response is going to be sent + to downstream client as soon as possible (even if they had non-zero acks!). +#. As the filter splits single producer requests into separate records, it's possible that delivery + of only some of these records fails. In that case, the response returned to upstream client is + a failure, however it is possible some of the records have been appended in target cluster. +#. Because of the splitting mentioned above, records are not necessarily appended one after another + (as they do not get sent as single request to upstream). Users that want to avoid this scenario + might want to take a look into downstream producer configs: 'linger.ms' and 'batch.size'. +#. Produce requests that reference to topics that do not match any of the rules are going to close + connection and fail. This usually should not happen (clients request metadata first, and they + should then fail with 'no broker available' first), but is possible if someone tailors binary + payloads over the connection. +#. librdkafka was compiled without ssl, lz4, gssapi, so related custom producer config options are + not supported. +#. Invalid custom producer configs are not found at startup (only when appropriate clusters are + being sent to). Requests that would have referenced these clusters are going to close connection + and fail. diff --git a/docs/root/configuration/listeners/network_filters/network_filters.rst b/docs/root/configuration/listeners/network_filters/network_filters.rst index a4b918ddf380b..d4dcc5e86c97a 100644 --- a/docs/root/configuration/listeners/network_filters/network_filters.rst +++ b/docs/root/configuration/listeners/network_filters/network_filters.rst @@ -17,6 +17,7 @@ filters. direct_response_filter ext_authz_filter kafka_broker_filter + kafka_mesh_filter local_rate_limit_filter mongo_proxy_filter mysql_proxy_filter diff --git a/docs/root/version_history/current.rst b/docs/root/version_history/current.rst index c8e227d34b58d..bdc1e5bb7e920 100644 --- a/docs/root/version_history/current.rst +++ b/docs/root/version_history/current.rst @@ -76,6 +76,7 @@ Bug Fixes * aws request signer: fix the AWS Request Signer extension to correctly normalize the path and query string to be signed according to AWS' guidelines, so that the hash on the server side matches. See `AWS SigV4 documentaion `_. * cluster: delete pools when they're idle to fix unbounded memory use when using PROXY protocol upstream with tcp_proxy. This behavior can be temporarily reverted by setting the ``envoy.reloadable_features.conn_pool_delete_when_idle`` runtime guard to false. * cluster: finish cluster warming even if hosts are removed before health check initialization. This only affected clusters with :ref:`ignore_health_on_host_removal `. +* compressor: fix a bug where if trailers were added and a subsequent filter paused the filter chain, the request could be stalled. This behavior can be reverted by setting ``envoy.reloadable_features.fix_added_trailers`` to false. * dynamic forward proxy: fixing a validation bug where san and sni checks were not applied setting :ref:`http_protocol_options ` via :ref:`typed_extension_protocol_options `. * ext_authz: fix the ext_authz filter to correctly merge multiple same headers using the ',' as separator in the check request to the external authorization service. * ext_authz: the network ext_authz filter now correctly sets dynamic metdata returned by the authorization service for non-OK responses. This behavior now matches the http ext_authz filter. diff --git a/envoy/stats/allocator.h b/envoy/stats/allocator.h index 6f9cc9715ea43..2924ebf0ab303 100644 --- a/envoy/stats/allocator.h +++ b/envoy/stats/allocator.h @@ -70,11 +70,12 @@ class Allocator { virtual void markTextReadoutForDeletion(const TextReadoutSharedPtr& text_readout) PURE; /** - * Iterate over all stats that need to be sinked. Note, that implementations can potentially hold - * on to a mutex that will deadlock if the passed in functors try to create or delete a stat. - * @param f_size functor that is provided the number of all sinked stats. Note this is called - * only once, prior to any calls to f_stat. - * @param f_stat functor that is provided one sinked stat at a time. + * Iterate over all stats that need to be added to a sink. Note, that implementations can + * potentially hold on to a mutex that will deadlock if the passed in functors try to create + * or delete a stat. + * @param f_size functor that is provided the number of all stats in the sink. Note this is + * called only once, prior to any calls to f_stat. + * @param f_stat functor that is provided one stat in the sink at a time. */ virtual void forEachCounter(std::function f_size, std::function f_stat) const PURE; diff --git a/envoy/stats/store.h b/envoy/stats/store.h index a682fb0cd3d5f..3d456bbe7bec9 100644 --- a/envoy/stats/store.h +++ b/envoy/stats/store.h @@ -51,10 +51,11 @@ class Store : public Scope { virtual std::vector histograms() const PURE; /** - * Iterate over all stats that need to be sinked. Note, that implementations can potentially hold - * on to a mutex that will deadlock if the passed in functors try to create or delete a stat. - * @param f_size functor that is provided the number of all sinked stats. - * @param f_stat functor that is provided one sinked stat at a time. + * Iterate over all stats that need to be added to a sink. Note, that implementations can + * potentially hold on to a mutex that will deadlock if the passed in functors try to create + * or delete a stat. + * @param f_size functor that is provided the number of all stats in the sink. + * @param f_stat functor that is provided one stat in the sink at a time. */ virtual void forEachCounter(std::function f_size, std::function f_stat) const PURE; diff --git a/generated_api_shadow/BUILD b/generated_api_shadow/BUILD index 5bbde32946b63..93f9184a2b400 100644 --- a/generated_api_shadow/BUILD +++ b/generated_api_shadow/BUILD @@ -60,6 +60,7 @@ proto_library( "//contrib/envoy/extensions/filters/http/squash/v3:pkg", "//contrib/envoy/extensions/filters/http/sxg/v3alpha:pkg", "//contrib/envoy/extensions/filters/network/kafka_broker/v3:pkg", + "//contrib/envoy/extensions/filters/network/kafka_mesh/v3alpha:pkg", "//contrib/envoy/extensions/filters/network/mysql_proxy/v3:pkg", "//contrib/envoy/extensions/filters/network/postgres_proxy/v3alpha:pkg", "//contrib/envoy/extensions/filters/network/rocketmq_proxy/v3:pkg", diff --git a/generated_api_shadow/contrib/envoy/extensions/filters/network/kafka_mesh/v3alpha/BUILD b/generated_api_shadow/contrib/envoy/extensions/filters/network/kafka_mesh/v3alpha/BUILD new file mode 100644 index 0000000000000..ee92fb652582e --- /dev/null +++ b/generated_api_shadow/contrib/envoy/extensions/filters/network/kafka_mesh/v3alpha/BUILD @@ -0,0 +1,9 @@ +# DO NOT EDIT. This file is generated by tools/proto_format/proto_sync.py. + +load("@envoy_api//bazel:api_build_system.bzl", "api_proto_package") + +licenses(["notice"]) # Apache 2 + +api_proto_package( + deps = ["@com_github_cncf_udpa//udpa/annotations:pkg"], +) diff --git a/generated_api_shadow/contrib/envoy/extensions/filters/network/kafka_mesh/v3alpha/kafka_mesh.proto b/generated_api_shadow/contrib/envoy/extensions/filters/network/kafka_mesh/v3alpha/kafka_mesh.proto new file mode 100644 index 0000000000000..03a6522852ab5 --- /dev/null +++ b/generated_api_shadow/contrib/envoy/extensions/filters/network/kafka_mesh/v3alpha/kafka_mesh.proto @@ -0,0 +1,58 @@ +syntax = "proto3"; + +package envoy.extensions.filters.network.kafka_mesh.v3alpha; + +import "udpa/annotations/status.proto"; +import "validate/validate.proto"; + +option java_package = "io.envoyproxy.envoy.extensions.filters.network.kafka_mesh.v3alpha"; +option java_outer_classname = "KafkaMeshProto"; +option java_multiple_files = true; +option (udpa.annotations.file_status).work_in_progress = true; +option (udpa.annotations.file_status).package_version_status = ACTIVE; + +// [#protodoc-title: Kafka Mesh] +// Kafka Mesh :ref:`configuration overview `. +// [#extension: envoy.filters.network.kafka_mesh] + +message KafkaMesh { + // Envoy's host that's advertised to clients. + // Has the same meaning as corresponding Kafka broker properties. + // Usually equal to filter chain's listener config, but needs to be reachable by clients + // (so 0.0.0.0 will not work). + string advertised_host = 1 [(validate.rules).string = {min_len: 1}]; + + // Envoy's port that's advertised to clients. + int32 advertised_port = 2 [(validate.rules).int32 = {gt: 0}]; + + // Upstream clusters this filter will connect to. + repeated KafkaClusterDefinition upstream_clusters = 3; + + // Rules that will decide which cluster gets which request. + repeated ForwardingRule forwarding_rules = 4; +} + +message KafkaClusterDefinition { + // Cluster name. + string cluster_name = 1 [(validate.rules).string = {min_len: 1}]; + + // Kafka cluster address. + string bootstrap_servers = 2 [(validate.rules).string = {min_len: 1}]; + + // Default number of partitions present in this cluster. + // This is especially important for clients that do not specify partition in their payloads and depend on this value for hashing. + int32 partition_count = 3 [(validate.rules).int32 = {gt: 0}]; + + // Custom configuration passed to Kafka producer. + map producer_config = 4; +} + +message ForwardingRule { + // Cluster name. + string target_cluster = 1; + + oneof trigger { + // Intended place for future types of forwarding rules. + string topic_prefix = 2; + } +} diff --git a/source/common/common/stl_helpers.h b/source/common/common/stl_helpers.h index 674372f64eaa1..9c1ab9498b330 100644 --- a/source/common/common/stl_helpers.h +++ b/source/common/common/stl_helpers.h @@ -28,7 +28,8 @@ std::string accumulateToString(const ContainerT& source, if (source.empty()) { return "[]"; } - return std::accumulate(std::next(source.begin()), source.end(), "[" + string_func(source[0]), + return std::accumulate(std::next(source.begin()), source.end(), + "[" + string_func(*source.begin()), [string_func](std::string acc, const T& element) { return acc + ", " + string_func(element); }) + diff --git a/source/common/http/filter_manager.cc b/source/common/http/filter_manager.cc index 3369ed06c06af..fc15f8041f224 100644 --- a/source/common/http/filter_manager.cc +++ b/source/common/http/filter_manager.cc @@ -604,6 +604,9 @@ void FilterManager::decodeData(ActiveStreamDecoderFilter* filter, Buffer::Instan ScopeTrackerScopeState scope(&*this, dispatcher_); filter_manager_callbacks_.resetIdleTimer(); + const bool fix_added_trailers = + Runtime::runtimeFeatureEnabled("envoy.reloadable_features.fix_added_trailers"); + // If a response is complete or a reset has been sent, filters do not care about further body // data. Just drop it. if (state_.local_complete_) { @@ -690,6 +693,9 @@ void FilterManager::decodeData(ActiveStreamDecoderFilter* filter, Buffer::Instan if (!trailers_exists_at_start && filter_manager_callbacks_.requestTrailers() && trailers_added_entry == decoder_filters_.end()) { + if (fix_added_trailers) { + end_stream = false; + } trailers_added_entry = entry; } @@ -698,7 +704,11 @@ void FilterManager::decodeData(ActiveStreamDecoderFilter* filter, Buffer::Instan // Stop iteration IFF this is not the last filter. If it is the last filter, continue with // processing since we need to handle the case where a terminal filter wants to buffer, but // a previous filter has added trailers. - return; + if (fix_added_trailers) { + break; + } else { + return; + } } } diff --git a/source/common/quic/udp_gso_batch_writer.h b/source/common/quic/udp_gso_batch_writer.h index 367f248bd7b4e..06dfc06cf4620 100644 --- a/source/common/quic/udp_gso_batch_writer.h +++ b/source/common/quic/udp_gso_batch_writer.h @@ -1,6 +1,6 @@ #pragma once -#if !defined(__linux__) +#if !defined(__linux__) || defined(__ANDROID_API__) #define UDP_GSO_BATCH_WRITER_COMPILETIME_SUPPORT 0 #else #define UDP_GSO_BATCH_WRITER_COMPILETIME_SUPPORT 1 diff --git a/source/common/runtime/runtime_features.cc b/source/common/runtime/runtime_features.cc index cf0aeb7ae4807..50cba15839226 100644 --- a/source/common/runtime/runtime_features.cc +++ b/source/common/runtime/runtime_features.cc @@ -64,6 +64,7 @@ constexpr const char* runtime_features[] = { "envoy.reloadable_features.disable_tls_inspector_injection", "envoy.reloadable_features.dont_add_content_length_for_bodiless_requests", "envoy.reloadable_features.enable_compression_without_content_length_header", + "envoy.reloadable_features.fix_added_trailers", "envoy.reloadable_features.grpc_bridge_stats_disabled", "envoy.reloadable_features.grpc_web_fix_non_proto_encoded_response_handling", "envoy.reloadable_features.grpc_json_transcoder_adhere_to_buffer_limits", diff --git a/source/common/upstream/cluster_manager_impl.cc b/source/common/upstream/cluster_manager_impl.cc index cbe3507907f5d..6e1f4e1b73587 100644 --- a/source/common/upstream/cluster_manager_impl.cc +++ b/source/common/upstream/cluster_manager_impl.cc @@ -1200,7 +1200,7 @@ void ClusterManagerImpl::ThreadLocalClusterManagerImpl::drainConnPools( pools->drainConnections(Envoy::ConnectionPool::DrainBehavior::DrainAndDelete); container.do_not_delete_ = false; - if (container.pools_->size() == 0) { + if (container.pools_->empty()) { host_http_conn_pool_map_.erase(old_host); } } @@ -1393,7 +1393,7 @@ void ClusterManagerImpl::ThreadLocalClusterManagerImpl::drainAllConnPoolsWorker( Envoy::ConnectionPool::DrainBehavior::DrainExistingConnections); container->do_not_delete_ = false; - if (container->pools_->size() == 0) { + if (container->pools_->empty()) { host_http_conn_pool_map_.erase(host); } } @@ -1539,7 +1539,7 @@ void ClusterManagerImpl::ThreadLocalClusterManagerImpl::httpConnPoolIsIdle( // Guard deletion of the container with `do_not_delete_` to avoid deletion while // iterating through the container in `container->pools_->startDrain()`. See // comment in `ClusterManagerImpl::ThreadLocalClusterManagerImpl::drainConnPools`. - if (!container->do_not_delete_ && container->pools_->size() == 0) { + if (!container->do_not_delete_ && container->pools_->empty()) { ENVOY_LOG(trace, "Pool container empty for host {}, erasing host entry", host); host_http_conn_pool_map_.erase( host); // NOTE: `container` is erased after this point in the lambda. diff --git a/source/common/upstream/conn_pool_map.h b/source/common/upstream/conn_pool_map.h index b3840c3600cd6..6b9891cc84f64 100644 --- a/source/common/upstream/conn_pool_map.h +++ b/source/common/upstream/conn_pool_map.h @@ -46,6 +46,11 @@ template class ConnPoolMap { */ size_t size() const; + /** + * @return true if the pools are empty. + */ + size_t empty() const; + /** * Destroys all mapped pools. */ diff --git a/source/common/upstream/conn_pool_map_impl.h b/source/common/upstream/conn_pool_map_impl.h index 63db84a047412..6df615ce013cf 100644 --- a/source/common/upstream/conn_pool_map_impl.h +++ b/source/common/upstream/conn_pool_map_impl.h @@ -80,6 +80,11 @@ size_t ConnPoolMap::size() const { return active_pools_.size(); } +template +size_t ConnPoolMap::empty() const { + return active_pools_.empty(); +} + template void ConnPoolMap::clear() { Common::AutoDebugRecursionChecker assert_not_in(recursion_checker_); for (auto& pool_pair : active_pools_) { diff --git a/source/common/upstream/priority_conn_pool_map.h b/source/common/upstream/priority_conn_pool_map.h index d3c3c66bd4714..fc69b19ea6e23 100644 --- a/source/common/upstream/priority_conn_pool_map.h +++ b/source/common/upstream/priority_conn_pool_map.h @@ -38,6 +38,11 @@ template class PriorityConnPoolMap { */ size_t size() const; + /** + * @return true if the pools across all priorities are empty. + */ + bool empty() const; + /** * Destroys all mapped pools. */ diff --git a/source/common/upstream/priority_conn_pool_map_impl.h b/source/common/upstream/priority_conn_pool_map_impl.h index a706938b8e182..855da91d0bfc0 100644 --- a/source/common/upstream/priority_conn_pool_map_impl.h +++ b/source/common/upstream/priority_conn_pool_map_impl.h @@ -40,6 +40,16 @@ size_t PriorityConnPoolMap::size() const { return size; } +template +bool PriorityConnPoolMap::empty() const { + for (const auto& pool_map : conn_pool_maps_) { + if (!pool_map->empty()) { + return false; + } + } + return true; +} + template void PriorityConnPoolMap::clear() { for (auto& pool_map : conn_pool_maps_) { diff --git a/source/extensions/common/dynamic_forward_proxy/dns_cache_impl.cc b/source/extensions/common/dynamic_forward_proxy/dns_cache_impl.cc index 7a208b9f1398d..604004c565cdf 100644 --- a/source/extensions/common/dynamic_forward_proxy/dns_cache_impl.cc +++ b/source/extensions/common/dynamic_forward_proxy/dns_cache_impl.cc @@ -2,6 +2,7 @@ #include "envoy/extensions/common/dynamic_forward_proxy/v3/dns_cache.pb.h" +#include "source/common/common/stl_helpers.h" #include "source/common/config/utility.h" #include "source/common/http/utility.h" #include "source/common/network/resolver_impl.h" @@ -289,7 +290,11 @@ void DnsCacheImpl::finishResolve(const std::string& host, Network::DnsResolver::ResolutionStatus status, std::list&& response, bool from_cache) { ASSERT(main_thread_dispatcher_.isThreadSafe()); - ENVOY_LOG(debug, "main thread resolve complete for host '{}'. {} results", host, response.size()); + ENVOY_LOG_EVENT(debug, "dns_cache_finish_resolve", + "main thread resolve complete for host '{}': {}", host, + accumulateToString(response, [](const auto& dns_response) { + return dns_response.address_->asString(); + })); // Functions like this one that modify primary_hosts_ are only called in the main thread so we // know it is safe to use the PrimaryHostInfo pointers outside of the lock. diff --git a/test/common/upstream/priority_conn_pool_map_impl_test.cc b/test/common/upstream/priority_conn_pool_map_impl_test.cc index db48e6afc799d..03856fd544f82 100644 --- a/test/common/upstream/priority_conn_pool_map_impl_test.cc +++ b/test/common/upstream/priority_conn_pool_map_impl_test.cc @@ -108,7 +108,7 @@ TEST_F(PriorityConnPoolMapImplTest, TestClearEmptiesOut) { test_map->getPool(ResourcePriority::Default, 2, getBasicFactory()); test_map->clear(); - EXPECT_EQ(test_map->size(), 0); + EXPECT_TRUE(test_map->empty()); } TEST_F(PriorityConnPoolMapImplTest, TestErase) { @@ -124,7 +124,7 @@ TEST_F(PriorityConnPoolMapImplTest, TestErase) { EXPECT_EQ(2, test_map->size()); EXPECT_TRUE(test_map->erasePool(ResourcePriority::Default, 1)); EXPECT_TRUE(test_map->erasePool(ResourcePriority::High, 1)); - EXPECT_EQ(0, test_map->size()); + EXPECT_TRUE(test_map->empty()); EXPECT_NE(pool_ptr, &test_map->getPool(ResourcePriority::High, 1, getBasicFactory()).value().get()); } diff --git a/test/config/utility.cc b/test/config/utility.cc index 4c39561331fa1..0928efd680b86 100644 --- a/test/config/utility.cc +++ b/test/config/utility.cc @@ -1037,7 +1037,9 @@ void ConfigHelper::addVirtualHost(const envoy::config::route::v3::VirtualHost& v storeHttpConnectionManager(hcm_config); } -void ConfigHelper::addFilter(const std::string& config) { +void ConfigHelper::addFilter(const std::string& config) { prependFilter(config); } + +void ConfigHelper::prependFilter(const std::string& config) { RELEASE_ASSERT(!finalized_, ""); envoy::extensions::filters::network::http_connection_manager::v3::HttpConnectionManager hcm_config; diff --git a/test/config/utility.h b/test/config/utility.h index 06865cc36518a..f421c95ba8c10 100644 --- a/test/config/utility.h +++ b/test/config/utility.h @@ -127,13 +127,13 @@ class ConfigHelper { static std::string httpProxyConfig(bool downstream_use_quic = false); // A basic configuration for L7 proxying with QUIC transport. static std::string quicHttpProxyConfig(); - // A string for a basic buffer filter, which can be used with addFilter() + // A string for a basic buffer filter, which can be used with prependFilter() static std::string defaultBufferFilter(); - // A string for a small buffer filter, which can be used with addFilter() + // A string for a small buffer filter, which can be used with prependFilter() static std::string smallBufferFilter(); - // A string for a health check filter which can be used with addFilter() + // A string for a health check filter which can be used with prependFilter() static std::string defaultHealthCheckFilter(); - // A string for a squash filter which can be used with addFilter() + // A string for a squash filter which can be used with prependFilter() static std::string defaultSquashFilter(); // A string for startTls transport socket config. static std::string startTlsConfig(); @@ -213,6 +213,10 @@ class ConfigHelper { void addVirtualHost(const envoy::config::route::v3::VirtualHost& vhost); // Add an HTTP filter prior to existing filters. + void prependFilter(const std::string& filter_yaml); + + // Add an HTTP filter prior to existing filters. + // TODO(rgs1): remove once envoy-filter-example has been updated. void addFilter(const std::string& filter_yaml); // Add a network filter prior to existing filters. diff --git a/test/extensions/filters/http/adaptive_concurrency/adaptive_concurrency_filter_integration_test.h b/test/extensions/filters/http/adaptive_concurrency/adaptive_concurrency_filter_integration_test.h index 35a107450298e..022ad5a114bb1 100644 --- a/test/extensions/filters/http/adaptive_concurrency/adaptive_concurrency_filter_integration_test.h +++ b/test/extensions/filters/http/adaptive_concurrency/adaptive_concurrency_filter_integration_test.h @@ -41,7 +41,7 @@ class AdaptiveConcurrencyIntegrationTest void customInit() { setDownstreamProtocol(Http::CodecType::HTTP2); setUpstreamProtocol(Http::CodecType::HTTP2); - config_helper_.addFilter(ADAPTIVE_CONCURRENCY_CONFIG); + config_helper_.prependFilter(ADAPTIVE_CONCURRENCY_CONFIG); initialize(); codec_client_ = makeHttpConnection(lookupPort("http")); } diff --git a/test/extensions/filters/http/admission_control/admission_control_integration_test.cc b/test/extensions/filters/http/admission_control/admission_control_integration_test.cc index 8fb07712f747b..e59dba7611f65 100644 --- a/test/extensions/filters/http/admission_control/admission_control_integration_test.cc +++ b/test/extensions/filters/http/admission_control/admission_control_integration_test.cc @@ -46,7 +46,7 @@ class AdmissionControlIntegrationTest : public Event::TestUsingSimulatedTime, void initialize() override { config_helper_.addConfigModifier(setEnableDownstreamTrailersHttp1()); - config_helper_.addFilter(ADMISSION_CONTROL_CONFIG); + config_helper_.prependFilter(ADMISSION_CONTROL_CONFIG); HttpIntegrationTest::initialize(); } diff --git a/test/extensions/filters/http/alternate_protocols_cache/filter_integration_test.cc b/test/extensions/filters/http/alternate_protocols_cache/filter_integration_test.cc index 74660d6a2c6b2..0bfc84ac19fae 100644 --- a/test/extensions/filters/http/alternate_protocols_cache/filter_integration_test.cc +++ b/test/extensions/filters/http/alternate_protocols_cache/filter_integration_test.cc @@ -25,7 +25,7 @@ name: alternate_protocols_cache alternate_protocols_cache_options: name: default_alternate_protocols_cache )EOF"; - config_helper_.addFilter(filter); + config_helper_.prependFilter(filter); upstream_tls_ = true; config_helper_.configureUpstreamTls(/*use_alpn=*/true, /*http3=*/true, diff --git a/test/extensions/filters/http/aws_lambda/aws_lambda_filter_integration_test.cc b/test/extensions/filters/http/aws_lambda/aws_lambda_filter_integration_test.cc index db3227d9a2df4..ce86d677eb696 100644 --- a/test/extensions/filters/http/aws_lambda/aws_lambda_filter_integration_test.cc +++ b/test/extensions/filters/http/aws_lambda/aws_lambda_filter_integration_test.cc @@ -37,7 +37,7 @@ class AwsLambdaFilterIntegrationTest : public testing::TestWithParamset_seconds(2000 * 1000); }); - config_helper_.addFilter(ConfigHelper::smallBufferFilter()); + config_helper_.prependFilter(ConfigHelper::smallBufferFilter()); initialize(); codec_client_ = makeHttpConnection(lookupPort("http")); @@ -142,7 +142,7 @@ ConfigHelper::HttpModifierFunction overrideConfig(const std::string& json_config TEST_P(BufferIntegrationTest, RouteDisabled) { ConfigHelper::HttpModifierFunction mod = overrideConfig(R"EOF({"disabled": true})EOF"); config_helper_.addConfigModifier(mod); - config_helper_.addFilter(ConfigHelper::smallBufferFilter()); + config_helper_.prependFilter(ConfigHelper::smallBufferFilter()); config_helper_.setBufferLimits(1024, 1024); initialize(); @@ -169,7 +169,7 @@ TEST_P(BufferIntegrationTest, RouteOverride) { "max_request_bytes": 5242880 }})EOF"); config_helper_.addConfigModifier(mod); - config_helper_.addFilter(ConfigHelper::smallBufferFilter()); + config_helper_.prependFilter(ConfigHelper::smallBufferFilter()); initialize(); diff --git a/test/extensions/filters/http/cache/cache_filter_integration_test.cc b/test/extensions/filters/http/cache/cache_filter_integration_test.cc index d275df8cc48f3..5f09027b73900 100644 --- a/test/extensions/filters/http/cache/cache_filter_integration_test.cc +++ b/test/extensions/filters/http/cache/cache_filter_integration_test.cc @@ -33,7 +33,7 @@ class CacheIntegrationTest : public Event::TestUsingSimulatedTime, } void initializeFilter(const std::string& config) { - config_helper_.addFilter(config); + config_helper_.prependFilter(config); initialize(); codec_client_ = makeHttpConnection(makeClientConnection((lookupPort("http")))); } diff --git a/test/extensions/filters/http/cdn_loop/filter_integration_test.cc b/test/extensions/filters/http/cdn_loop/filter_integration_test.cc index 5e61749d0bf28..1403d84aa5adc 100644 --- a/test/extensions/filters/http/cdn_loop/filter_integration_test.cc +++ b/test/extensions/filters/http/cdn_loop/filter_integration_test.cc @@ -31,7 +31,7 @@ name: envoy.filters.http.cdn_loop class CdnLoopFilterIntegrationTest : public HttpProtocolIntegrationTest {}; TEST_P(CdnLoopFilterIntegrationTest, NoCdnLoopHeader) { - config_helper_.addFilter(MaxDefaultConfig); + config_helper_.prependFilter(MaxDefaultConfig); initialize(); codec_client_ = makeHttpConnection(makeClientConnection(lookupPort("http"))); @@ -48,7 +48,7 @@ TEST_P(CdnLoopFilterIntegrationTest, NoCdnLoopHeader) { } TEST_P(CdnLoopFilterIntegrationTest, CdnLoopHeaderWithOtherCdns) { - config_helper_.addFilter(MaxDefaultConfig); + config_helper_.prependFilter(MaxDefaultConfig); initialize(); codec_client_ = makeHttpConnection(makeClientConnection(lookupPort("http"))); @@ -68,7 +68,7 @@ TEST_P(CdnLoopFilterIntegrationTest, CdnLoopHeaderWithOtherCdns) { } TEST_P(CdnLoopFilterIntegrationTest, MultipleCdnLoopHeaders) { - config_helper_.addFilter(MaxDefaultConfig); + config_helper_.prependFilter(MaxDefaultConfig); initialize(); codec_client_ = makeHttpConnection(makeClientConnection(lookupPort("http"))); @@ -86,7 +86,7 @@ TEST_P(CdnLoopFilterIntegrationTest, MultipleCdnLoopHeaders) { } TEST_P(CdnLoopFilterIntegrationTest, CdnLoop0Allowed1Seen) { - config_helper_.addFilter(MaxDefaultConfig); + config_helper_.prependFilter(MaxDefaultConfig); initialize(); codec_client_ = makeHttpConnection(makeClientConnection(lookupPort("http"))); @@ -103,7 +103,7 @@ TEST_P(CdnLoopFilterIntegrationTest, CdnLoop0Allowed1Seen) { } TEST_P(CdnLoopFilterIntegrationTest, UnparseableHeader) { - config_helper_.addFilter(MaxDefaultConfig); + config_helper_.prependFilter(MaxDefaultConfig); initialize(); codec_client_ = makeHttpConnection(makeClientConnection(lookupPort("http"))); @@ -120,7 +120,7 @@ TEST_P(CdnLoopFilterIntegrationTest, UnparseableHeader) { } TEST_P(CdnLoopFilterIntegrationTest, CdnLoop2Allowed1Seen) { - config_helper_.addFilter(MaxOf2Config); + config_helper_.prependFilter(MaxOf2Config); initialize(); codec_client_ = makeHttpConnection(makeClientConnection(lookupPort("http"))); @@ -140,7 +140,7 @@ TEST_P(CdnLoopFilterIntegrationTest, CdnLoop2Allowed1Seen) { } TEST_P(CdnLoopFilterIntegrationTest, CdnLoop2Allowed2Seen) { - config_helper_.addFilter(MaxOf2Config); + config_helper_.prependFilter(MaxOf2Config); initialize(); codec_client_ = makeHttpConnection(makeClientConnection(lookupPort("http"))); @@ -160,7 +160,7 @@ TEST_P(CdnLoopFilterIntegrationTest, CdnLoop2Allowed2Seen) { } TEST_P(CdnLoopFilterIntegrationTest, CdnLoop2Allowed3Seen) { - config_helper_.addFilter(MaxOf2Config); + config_helper_.prependFilter(MaxOf2Config); initialize(); codec_client_ = makeHttpConnection(makeClientConnection(lookupPort("http"))); diff --git a/test/extensions/filters/http/composite/composite_filter_integration_test.cc b/test/extensions/filters/http/composite/composite_filter_integration_test.cc index cd7ffec42e28d..551112736898c 100644 --- a/test/extensions/filters/http/composite/composite_filter_integration_test.cc +++ b/test/extensions/filters/http/composite/composite_filter_integration_test.cc @@ -16,7 +16,7 @@ class CompositeFilterIntegrationTest : public testing::TestWithParam void { hcm.mutable_http2_protocol_options()->set_allow_connect(true); }); } - config_helper_.addFilter(compressorFilterConfig); + config_helper_.prependFilter(compressorFilterConfig); HttpProtocolIntegrationTest::initialize(); } @@ -247,7 +247,7 @@ void CompressorProxyingConnectIntegrationTest::initialize() { config_helper_.addConfigModifier( [&](envoy::extensions::filters::network::http_connection_manager::v3::HttpConnectionManager& hcm) -> void { ConfigHelper::setConnectConfig(hcm, false, false); }); - config_helper_.addFilter(compressorFilterConfig); + config_helper_.prependFilter(compressorFilterConfig); HttpProtocolIntegrationTest::initialize(); } diff --git a/test/extensions/filters/http/cors/cors_filter_integration_test.cc b/test/extensions/filters/http/cors/cors_filter_integration_test.cc index 77a0522b4bed2..00bd075b37a92 100644 --- a/test/extensions/filters/http/cors/cors_filter_integration_test.cc +++ b/test/extensions/filters/http/cors/cors_filter_integration_test.cc @@ -14,7 +14,7 @@ class CorsFilterIntegrationTest : public testing::TestWithParam void { diff --git a/test/extensions/filters/http/csrf/csrf_filter_integration_test.cc b/test/extensions/filters/http/csrf/csrf_filter_integration_test.cc index 3b8055e79b5e5..c2dc6f4f10dc6 100644 --- a/test/extensions/filters/http/csrf/csrf_filter_integration_test.cc +++ b/test/extensions/filters/http/csrf/csrf_filter_integration_test.cc @@ -79,7 +79,7 @@ INSTANTIATE_TEST_SUITE_P(Protocols, CsrfFilterIntegrationTest, HttpProtocolIntegrationTest::protocolTestParamsToString); TEST_P(CsrfFilterIntegrationTest, TestCsrfSuccess) { - config_helper_.addFilter(CSRF_FILTER_ENABLED_CONFIG); + config_helper_.prependFilter(CSRF_FILTER_ENABLED_CONFIG); Http::TestRequestHeaderMapImpl headers = {{ {":method", "PUT"}, {":path", "/"}, @@ -93,7 +93,7 @@ TEST_P(CsrfFilterIntegrationTest, TestCsrfSuccess) { } TEST_P(CsrfFilterIntegrationTest, TestCsrfDisabled) { - config_helper_.addFilter(CSRF_DISABLED_CONFIG); + config_helper_.prependFilter(CSRF_DISABLED_CONFIG); Http::TestRequestHeaderMapImpl headers = {{ {":method", "PUT"}, {":path", "/"}, @@ -107,7 +107,7 @@ TEST_P(CsrfFilterIntegrationTest, TestCsrfDisabled) { } TEST_P(CsrfFilterIntegrationTest, TestNonMutationMethod) { - config_helper_.addFilter(CSRF_FILTER_ENABLED_CONFIG); + config_helper_.prependFilter(CSRF_FILTER_ENABLED_CONFIG); Http::TestRequestHeaderMapImpl headers = {{ {":method", "GET"}, {":path", "/"}, @@ -121,7 +121,7 @@ TEST_P(CsrfFilterIntegrationTest, TestNonMutationMethod) { } TEST_P(CsrfFilterIntegrationTest, TestOriginMismatch) { - config_helper_.addFilter(CSRF_FILTER_ENABLED_CONFIG); + config_helper_.prependFilter(CSRF_FILTER_ENABLED_CONFIG); Http::TestRequestHeaderMapImpl headers = {{ {":method", "PUT"}, {":path", "/"}, @@ -135,7 +135,7 @@ TEST_P(CsrfFilterIntegrationTest, TestOriginMismatch) { } TEST_P(CsrfFilterIntegrationTest, TestEnforcesPost) { - config_helper_.addFilter(CSRF_FILTER_ENABLED_CONFIG); + config_helper_.prependFilter(CSRF_FILTER_ENABLED_CONFIG); Http::TestRequestHeaderMapImpl headers = {{ {":method", "POST"}, {":path", "/"}, @@ -149,7 +149,7 @@ TEST_P(CsrfFilterIntegrationTest, TestEnforcesPost) { } TEST_P(CsrfFilterIntegrationTest, TestEnforcesDelete) { - config_helper_.addFilter(CSRF_FILTER_ENABLED_CONFIG); + config_helper_.prependFilter(CSRF_FILTER_ENABLED_CONFIG); Http::TestRequestHeaderMapImpl headers = {{ {":method", "DELETE"}, {":path", "/"}, @@ -163,7 +163,7 @@ TEST_P(CsrfFilterIntegrationTest, TestEnforcesDelete) { } TEST_P(CsrfFilterIntegrationTest, TestEnforcesPatch) { - config_helper_.addFilter(CSRF_FILTER_ENABLED_CONFIG); + config_helper_.prependFilter(CSRF_FILTER_ENABLED_CONFIG); Http::TestRequestHeaderMapImpl headers = {{ {":method", "PATCH"}, {":path", "/"}, @@ -177,7 +177,7 @@ TEST_P(CsrfFilterIntegrationTest, TestEnforcesPatch) { } TEST_P(CsrfFilterIntegrationTest, TestRefererFallback) { - config_helper_.addFilter(CSRF_FILTER_ENABLED_CONFIG); + config_helper_.prependFilter(CSRF_FILTER_ENABLED_CONFIG); Http::TestRequestHeaderMapImpl headers = {{":method", "DELETE"}, {":path", "/"}, {":scheme", "http"}, @@ -189,7 +189,7 @@ TEST_P(CsrfFilterIntegrationTest, TestRefererFallback) { } TEST_P(CsrfFilterIntegrationTest, TestMissingOrigin) { - config_helper_.addFilter(CSRF_FILTER_ENABLED_CONFIG); + config_helper_.prependFilter(CSRF_FILTER_ENABLED_CONFIG); Http::TestRequestHeaderMapImpl headers = { {{":method", "DELETE"}, {":path", "/"}, {":scheme", "http"}, {"host", "test-origin"}}}; const auto& response = sendRequest(headers); @@ -198,7 +198,7 @@ TEST_P(CsrfFilterIntegrationTest, TestMissingOrigin) { } TEST_P(CsrfFilterIntegrationTest, TestShadowOnlyMode) { - config_helper_.addFilter(CSRF_SHADOW_ENABLED_CONFIG); + config_helper_.prependFilter(CSRF_SHADOW_ENABLED_CONFIG); Http::TestRequestHeaderMapImpl headers = {{ {":method", "PUT"}, {":path", "/"}, @@ -212,7 +212,7 @@ TEST_P(CsrfFilterIntegrationTest, TestShadowOnlyMode) { } TEST_P(CsrfFilterIntegrationTest, TestFilterAndShadowEnabled) { - config_helper_.addFilter(CSRF_ENABLED_CONFIG); + config_helper_.prependFilter(CSRF_ENABLED_CONFIG); Http::TestRequestHeaderMapImpl headers = {{ {":method", "PUT"}, {":path", "/"}, diff --git a/test/extensions/filters/http/decompressor/BUILD b/test/extensions/filters/http/decompressor/BUILD index 6898a202ba113..6ab72ce93c916 100644 --- a/test/extensions/filters/http/decompressor/BUILD +++ b/test/extensions/filters/http/decompressor/BUILD @@ -40,6 +40,7 @@ envoy_extension_cc_test( "//source/extensions/compression/gzip/decompressor:config", "//source/extensions/filters/http/decompressor:config", "//test/integration:http_integration_lib", + "//test/integration/filters:encoder_decoder_buffer_filter_lib", "//test/mocks/server:factory_context_mocks", "//test/test_common:simulated_time_system_lib", "//test/test_common:utility_lib", diff --git a/test/extensions/filters/http/decompressor/decompressor_filter_integration_test.cc b/test/extensions/filters/http/decompressor/decompressor_filter_integration_test.cc index 30c6f5117ee75..2aa2cf3d9a342 100644 --- a/test/extensions/filters/http/decompressor/decompressor_filter_integration_test.cc +++ b/test/extensions/filters/http/decompressor/decompressor_filter_integration_test.cc @@ -30,7 +30,7 @@ class DecompressorIntegrationTest : public testing::TestWithParamstartRequest(Http::TestRequestHeaderMapImpl{{":method", "POST"}, + {":scheme", "http"}, + {":path", "/test/long/url"}, + {"content-encoding", "gzip"}, + {":authority", "host"}}); + + auto request_encoder = &encoder_decoder.first; + auto response = std::move(encoder_decoder.second); + + // Compressed JSON. + constexpr uint8_t buffer[] = {0x1f, 0x8b, 0x08, 0x00, 0x9c, 0xb3, 0x38, 0x61, 0x00, 0x03, 0xab, + 0x56, 0x50, 0xca, 0xad, 0x4c, 0x29, 0xcd, 0xcd, 0xad, 0x54, 0x52, + 0xb0, 0x52, 0x50, 0xca, 0x2a, 0xce, 0xcf, 0x53, 0x52, 0xa8, 0xe5, + 0x02, 0x00, 0xa6, 0x6a, 0x24, 0x99, 0x17, 0x00, 0x00, 0x00}; + Buffer::OwnedImpl data(buffer, 43); + codec_client_->sendData(*request_encoder, data, true); + + waitForNextUpstreamRequest(); + + upstream_request_->encodeHeaders(Http::TestResponseHeaderMapImpl{{":status", "200"}}, false); + upstream_request_->encodeData(10, true); + + ASSERT_TRUE(response->waitForEndStream()); + EXPECT_TRUE(response->complete()); + + Stats::Store& stats = test_server_->server().stats(); + Stats::CounterSharedPtr counter = TestUtility::findCounter( + stats, "http.config_test.decompressor.gzip_default.gzip.request.decompressed"); + ASSERT_NE(nullptr, counter); + EXPECT_EQ(1L, counter->value()); +} + } // namespace Envoy diff --git a/test/extensions/filters/http/dynamic_forward_proxy/proxy_filter_integration_test.cc b/test/extensions/filters/http/dynamic_forward_proxy/proxy_filter_integration_test.cc index 6b6ff51010c10..a621e3c1ad15f 100644 --- a/test/extensions/filters/http/dynamic_forward_proxy/proxy_filter_integration_test.cc +++ b/test/extensions/filters/http/dynamic_forward_proxy/proxy_filter_integration_test.cc @@ -41,7 +41,7 @@ name: dynamic_forward_proxy )EOF", Network::Test::ipVersionToDnsFamily(GetParam()), max_hosts, max_pending_requests, filename); - config_helper_.addFilter(filter); + config_helper_.prependFilter(filter); config_helper_.addConfigModifier([this](envoy::config::bootstrap::v3::Bootstrap& bootstrap) { // Switch predefined cluster_0 to CDS filesystem sourcing. diff --git a/test/extensions/filters/http/ext_authz/ext_authz_integration_test.cc b/test/extensions/filters/http/ext_authz/ext_authz_integration_test.cc index 5aa3d51953bfd..50600d0e7bccb 100644 --- a/test/extensions/filters/http/ext_authz/ext_authz_integration_test.cc +++ b/test/extensions/filters/http/ext_authz/ext_authz_integration_test.cc @@ -79,7 +79,7 @@ class ExtAuthzGrpcIntegrationTest : public Grpc::GrpcClientIntegrationParamTest, envoy::config::listener::v3::Filter ext_authz_filter; ext_authz_filter.set_name("envoy.filters.http.ext_authz"); ext_authz_filter.mutable_typed_config()->PackFrom(proto_config_); - config_helper_.addFilter(MessageUtil::getJsonStringFromMessageOrDie(ext_authz_filter)); + config_helper_.prependFilter(MessageUtil::getJsonStringFromMessageOrDie(ext_authz_filter)); }); } @@ -506,7 +506,7 @@ class ExtAuthzHttpIntegrationTest : public HttpIntegrationTest, ext_authz_filter.set_name("envoy.filters.http.ext_authz"); ext_authz_filter.mutable_typed_config()->PackFrom(proto_config_); - config_helper_.addFilter(MessageUtil::getJsonStringFromMessageOrDie(ext_authz_filter)); + config_helper_.prependFilter(MessageUtil::getJsonStringFromMessageOrDie(ext_authz_filter)); }); } @@ -795,7 +795,7 @@ TEST_P(ExtAuthzLocalReplyIntegrationTest, DeniedHeaderTest) { envoy::config::listener::v3::Filter ext_authz_filter; ext_authz_filter.set_name("envoy.filters.http.ext_authz"); ext_authz_filter.mutable_typed_config()->PackFrom(proto_config); - config_helper_.addFilter(MessageUtil::getJsonStringFromMessageOrDie(ext_authz_filter)); + config_helper_.prependFilter(MessageUtil::getJsonStringFromMessageOrDie(ext_authz_filter)); }); const std::string local_reply_yaml = R"EOF( diff --git a/test/extensions/filters/http/ext_proc/ext_proc_grpc_fuzz.cc b/test/extensions/filters/http/ext_proc/ext_proc_grpc_fuzz.cc index 85f01a408c12f..c03cafa52ada7 100644 --- a/test/extensions/filters/http/ext_proc/ext_proc_grpc_fuzz.cc +++ b/test/extensions/filters/http/ext_proc/ext_proc_grpc_fuzz.cc @@ -101,7 +101,7 @@ class ExtProcIntegrationFuzz : public HttpIntegrationTest, envoy::config::listener::v3::Filter ext_proc_filter; ext_proc_filter.set_name("envoy.filters.http.ext_proc"); ext_proc_filter.mutable_typed_config()->PackFrom(proto_config_); - config_helper_.addFilter(MessageUtil::getJsonStringFromMessageOrDie(ext_proc_filter)); + config_helper_.prependFilter(MessageUtil::getJsonStringFromMessageOrDie(ext_proc_filter)); }); // Make sure that we have control over when buffers will fill up. diff --git a/test/extensions/filters/http/ext_proc/ext_proc_integration_test.cc b/test/extensions/filters/http/ext_proc/ext_proc_integration_test.cc index f84e0342e52c4..7dc977d926406 100644 --- a/test/extensions/filters/http/ext_proc/ext_proc_integration_test.cc +++ b/test/extensions/filters/http/ext_proc/ext_proc_integration_test.cc @@ -86,7 +86,7 @@ class ExtProcIntegrationTest : public HttpIntegrationTest, envoy::config::listener::v3::Filter ext_proc_filter; ext_proc_filter.set_name("envoy.filters.http.ext_proc"); ext_proc_filter.mutable_typed_config()->PackFrom(proto_config_); - config_helper_.addFilter(MessageUtil::getJsonStringFromMessageOrDie(ext_proc_filter)); + config_helper_.prependFilter(MessageUtil::getJsonStringFromMessageOrDie(ext_proc_filter)); }); setUpstreamProtocol(Http::CodecType::HTTP2); setDownstreamProtocol(Http::CodecType::HTTP2); diff --git a/test/extensions/filters/http/ext_proc/streaming_integration_test.cc b/test/extensions/filters/http/ext_proc/streaming_integration_test.cc index 8bbc35672982c..4e47554838335 100644 --- a/test/extensions/filters/http/ext_proc/streaming_integration_test.cc +++ b/test/extensions/filters/http/ext_proc/streaming_integration_test.cc @@ -74,7 +74,7 @@ class StreamingIntegrationTest : public HttpIntegrationTest, envoy::config::listener::v3::Filter ext_proc_filter; ext_proc_filter.set_name("envoy.filters.http.ext_proc"); ext_proc_filter.mutable_typed_config()->PackFrom(proto_config_); - config_helper_.addFilter(MessageUtil::getJsonStringFromMessageOrDie(ext_proc_filter)); + config_helper_.prependFilter(MessageUtil::getJsonStringFromMessageOrDie(ext_proc_filter)); }); // Make sure that we have control over when buffers will fill up. diff --git a/test/extensions/filters/http/fault/fault_filter_integration_test.cc b/test/extensions/filters/http/fault/fault_filter_integration_test.cc index 370c662438bee..ed4eaf587af6c 100644 --- a/test/extensions/filters/http/fault/fault_filter_integration_test.cc +++ b/test/extensions/filters/http/fault/fault_filter_integration_test.cc @@ -13,7 +13,7 @@ class FaultIntegrationTest : public Event::TestUsingSimulatedTime, public HttpProtocolIntegrationTest { public: void initializeFilter(const std::string& filter_config) { - config_helper_.addFilter(filter_config); + config_helper_.prependFilter(filter_config); initialize(); } diff --git a/test/extensions/filters/http/grpc_http1_reverse_bridge/reverse_bridge_integration_test.cc b/test/extensions/filters/http/grpc_http1_reverse_bridge/reverse_bridge_integration_test.cc index 12e1b7b836d56..9a05608688b5d 100644 --- a/test/extensions/filters/http/grpc_http1_reverse_bridge/reverse_bridge_integration_test.cc +++ b/test/extensions/filters/http/grpc_http1_reverse_bridge/reverse_bridge_integration_test.cc @@ -42,7 +42,7 @@ name: grpc_http1_reverse_bridge response_size_header: "{}" )EOF", response_size_header ? *response_size_header : ""); - config_helper_.addFilter(filter); + config_helper_.prependFilter(filter); auto vhost = config_helper_.createVirtualHost("disabled"); envoy::extensions::filters::http::grpc_http1_reverse_bridge::v3::FilterConfigPerRoute diff --git a/test/extensions/filters/http/grpc_json_transcoder/grpc_json_transcoder_integration_test.cc b/test/extensions/filters/http/grpc_json_transcoder/grpc_json_transcoder_integration_test.cc index 2dd86455f58b7..99baf6b1789b1 100644 --- a/test/extensions/filters/http/grpc_json_transcoder/grpc_json_transcoder_integration_test.cc +++ b/test/extensions/filters/http/grpc_json_transcoder/grpc_json_transcoder_integration_test.cc @@ -40,7 +40,7 @@ class GrpcJsonTranscoderIntegrationTest proto_descriptor : "{}" services : "bookstore.Bookstore" )EOF"; - config_helper_.addFilter( + config_helper_.prependFilter( fmt::format(filter, TestEnvironment::runfilesPath("test/proto/bookstore.descriptor"))); } @@ -509,7 +509,7 @@ TEST_P(GrpcJsonTranscoderIntegrationTest, UnaryGetError1) { services : "bookstore.Bookstore" ignore_unknown_query_parameters : true )EOF"; - config_helper_.addFilter( + config_helper_.prependFilter( fmt::format(filter, TestEnvironment::runfilesPath("test/proto/bookstore.descriptor"))); HttpIntegrationTest::initialize(); testTranscoding( @@ -533,7 +533,7 @@ TEST_P(GrpcJsonTranscoderIntegrationTest, UnaryErrorConvertedToJson) { services: "bookstore.Bookstore" convert_grpc_status: true )EOF"; - config_helper_.addFilter( + config_helper_.prependFilter( fmt::format(filter, TestEnvironment::runfilesPath("test/proto/bookstore.descriptor"))); HttpIntegrationTest::initialize(); testTranscoding( @@ -558,7 +558,7 @@ TEST_P(GrpcJsonTranscoderIntegrationTest, UnaryErrorInTrailerConvertedToJson) { services: "bookstore.Bookstore" convert_grpc_status: true )EOF"; - config_helper_.addFilter( + config_helper_.prependFilter( fmt::format(filter, TestEnvironment::runfilesPath("test/proto/bookstore.descriptor"))); HttpIntegrationTest::initialize(); testTranscoding( @@ -583,7 +583,7 @@ TEST_P(GrpcJsonTranscoderIntegrationTest, StreamingErrorConvertedToJson) { services: "bookstore.Bookstore" convert_grpc_status: true )EOF"; - config_helper_.addFilter( + config_helper_.prependFilter( fmt::format(filter, TestEnvironment::runfilesPath("test/proto/bookstore.descriptor"))); HttpIntegrationTest::initialize(); testTranscoding( @@ -977,7 +977,7 @@ TEST_P(GrpcJsonTranscoderIntegrationTest, RejectUnknownMethod) { request_validation_options: reject_unknown_method: true )EOF"; - config_helper_.addFilter( + config_helper_.prependFilter( fmt::format(filter, TestEnvironment::runfilesPath("test/proto/bookstore.descriptor"))); HttpIntegrationTest::initialize(); @@ -1030,7 +1030,7 @@ TEST_P(GrpcJsonTranscoderIntegrationTest, RejectUnknownQueryParam) { request_validation_options: reject_unknown_query_parameters: true )EOF"; - config_helper_.addFilter( + config_helper_.prependFilter( fmt::format(filter, TestEnvironment::runfilesPath("test/proto/bookstore.descriptor"))); HttpIntegrationTest::initialize(); @@ -1086,7 +1086,7 @@ TEST_P(GrpcJsonTranscoderIntegrationTest, EnableRequestValidationIgnoreQueryPara reject_unknown_method: true reject_unknown_query_parameters: true )EOF"; - config_helper_.addFilter( + config_helper_.prependFilter( fmt::format(filter, TestEnvironment::runfilesPath("test/proto/bookstore.descriptor"))); HttpIntegrationTest::initialize(); @@ -1258,7 +1258,7 @@ class OverrideConfigGrpcJsonTranscoderIntegrationTest : public GrpcJsonTranscode "@type": type.googleapis.com/envoy.extensions.filters.http.grpc_json_transcoder.v3.GrpcJsonTranscoder "proto_descriptor": "" )EOF"; - config_helper_.addFilter(filter); + config_helper_.prependFilter(filter); } }; INSTANTIATE_TEST_SUITE_P(IpVersions, OverrideConfigGrpcJsonTranscoderIntegrationTest, @@ -1306,7 +1306,7 @@ class BufferLimitsDisabledGrpcJsonTranscoderIntegrationTest proto_descriptor : "{}" services : "bookstore.Bookstore" )EOF"; - config_helper_.addFilter( + config_helper_.prependFilter( fmt::format(filter, TestEnvironment::runfilesPath("test/proto/bookstore.descriptor"))); // Disable runtime feature. diff --git a/test/extensions/filters/http/grpc_web/grpc_web_filter_integration_test.cc b/test/extensions/filters/http/grpc_web/grpc_web_filter_integration_test.cc index b7ba884e65965..235168207001c 100644 --- a/test/extensions/filters/http/grpc_web/grpc_web_filter_integration_test.cc +++ b/test/extensions/filters/http/grpc_web/grpc_web_filter_integration_test.cc @@ -27,7 +27,7 @@ class GrpcWebFilterIntegrationTest : public testing::TestWithParam, void SetUp() override { setUpstreamProtocol(Http::CodecType::HTTP2); - config_helper_.addFilter("name: envoy.filters.http.grpc_web"); + config_helper_.prependFilter("name: envoy.filters.http.grpc_web"); } void initialize() override { diff --git a/test/extensions/filters/http/jwt_authn/filter_integration_test.cc b/test/extensions/filters/http/jwt_authn/filter_integration_test.cc index f0ef92af55616..98fea932bc868 100644 --- a/test/extensions/filters/http/jwt_authn/filter_integration_test.cc +++ b/test/extensions/filters/http/jwt_authn/filter_integration_test.cc @@ -107,7 +107,7 @@ INSTANTIATE_TEST_SUITE_P(Protocols, LocalJwksIntegrationTest, // With local Jwks, this test verifies a request is passed with a good Jwt token. TEST_P(LocalJwksIntegrationTest, WithGoodToken) { - config_helper_.addFilter(getFilterConfig(true)); + config_helper_.prependFilter(getFilterConfig(true)); initialize(); codec_client_ = makeHttpConnection(lookupPort("http")); @@ -135,7 +135,7 @@ TEST_P(LocalJwksIntegrationTest, WithGoodToken) { // With local Jwks, this test verifies a request is rejected with an expired Jwt token. TEST_P(LocalJwksIntegrationTest, ExpiredToken) { - config_helper_.addFilter(getFilterConfig(true)); + config_helper_.prependFilter(getFilterConfig(true)); initialize(); codec_client_ = makeHttpConnection(lookupPort("http")); @@ -158,7 +158,7 @@ TEST_P(LocalJwksIntegrationTest, ExpiredToken) { } TEST_P(LocalJwksIntegrationTest, MissingToken) { - config_helper_.addFilter(getFilterConfig(true)); + config_helper_.prependFilter(getFilterConfig(true)); initialize(); codec_client_ = makeHttpConnection(lookupPort("http")); @@ -179,7 +179,7 @@ TEST_P(LocalJwksIntegrationTest, MissingToken) { } TEST_P(LocalJwksIntegrationTest, ExpiredTokenHeadReply) { - config_helper_.addFilter(getFilterConfig(true)); + config_helper_.prependFilter(getFilterConfig(true)); initialize(); codec_client_ = makeHttpConnection(lookupPort("http")); @@ -205,7 +205,7 @@ TEST_P(LocalJwksIntegrationTest, ExpiredTokenHeadReply) { // This test verifies a request is passed with a path that don't match any requirements. TEST_P(LocalJwksIntegrationTest, NoRequiresPath) { - config_helper_.addFilter(getFilterConfig(true)); + config_helper_.prependFilter(getFilterConfig(true)); initialize(); codec_client_ = makeHttpConnection(lookupPort("http")); @@ -227,7 +227,7 @@ TEST_P(LocalJwksIntegrationTest, NoRequiresPath) { // This test verifies a CORS preflight request without JWT token is allowed. TEST_P(LocalJwksIntegrationTest, CorsPreflight) { - config_helper_.addFilter(getFilterConfig(true)); + config_helper_.prependFilter(getFilterConfig(true)); initialize(); codec_client_ = makeHttpConnection(lookupPort("http")); @@ -264,8 +264,8 @@ TEST_P(LocalJwksIntegrationTest, FilterStateRequirement) { provider_name: example_provider )"; - config_helper_.addFilter(getAuthFilterConfig(auth_filter_conf, true)); - config_helper_.addFilter(absl::StrCat("name: ", HeaderToFilterStateFilterName)); + config_helper_.prependFilter(getAuthFilterConfig(auth_filter_conf, true)); + config_helper_.prependFilter(absl::StrCat("name: ", HeaderToFilterStateFilterName)); initialize(); codec_client_ = makeHttpConnection(lookupPort("http")); @@ -336,7 +336,7 @@ class RemoteJwksIntegrationTest : public HttpProtocolIntegrationTest { } void initializeFilter(bool add_cluster) { - config_helper_.addFilter(getFilterConfig(false)); + config_helper_.prependFilter(getFilterConfig(false)); if (add_cluster) { config_helper_.addConfigModifier([](envoy::config::bootstrap::v3::Bootstrap& bootstrap) { @@ -352,7 +352,7 @@ class RemoteJwksIntegrationTest : public HttpProtocolIntegrationTest { } void initializeAsyncFetchFilter(bool fast_listener) { - config_helper_.addFilter(getAsyncFetchFilterConfig(ExampleConfig, fast_listener)); + config_helper_.prependFilter(getAsyncFetchFilterConfig(ExampleConfig, fast_listener)); config_helper_.addConfigModifier([](envoy::config::bootstrap::v3::Bootstrap& bootstrap) { auto* jwks_cluster = bootstrap.mutable_static_resources()->add_clusters(); @@ -605,7 +605,7 @@ TEST_P(RemoteJwksIntegrationTest, WithFailedJwksAsyncFetchFast) { class PerRouteIntegrationTest : public HttpProtocolIntegrationTest { public: void setup(const std::string& filter_config, const PerRouteConfig& per_route) { - config_helper_.addFilter(getAuthFilterConfig(filter_config, true)); + config_helper_.prependFilter(getAuthFilterConfig(filter_config, true)); config_helper_.addConfigModifier( [per_route]( diff --git a/test/extensions/filters/http/kill_request/crash_integration_test.cc b/test/extensions/filters/http/kill_request/crash_integration_test.cc index ec68ab95e298e..6eda4c89effda 100644 --- a/test/extensions/filters/http/kill_request/crash_integration_test.cc +++ b/test/extensions/filters/http/kill_request/crash_integration_test.cc @@ -24,7 +24,7 @@ class CrashIntegrationTest : public Event::TestUsingSimulatedTime, public HttpProtocolIntegrationTest { protected: void initializeFilter(const std::string& filter_config) { - config_helper_.addFilter(filter_config); + config_helper_.prependFilter(filter_config); initialize(); } }; @@ -102,7 +102,7 @@ TEST_P(CrashIntegrationTestAllProtocols, DecodeContinueDoesNotAddTrackedObjectIf probability: numerator: 100 )EOF"; - config_helper_.addFilter(request_kill_config); + config_helper_.prependFilter(request_kill_config); // This will stop iteration, and continue via a callback. const std::string stop_and_continue_config = R"EOF( @@ -111,7 +111,7 @@ TEST_P(CrashIntegrationTestAllProtocols, DecodeContinueDoesNotAddTrackedObjectIf "@type": type.googleapis.com/test.integration.filters.StopAndContinueConfig installScopeTrackedObject: true )EOF"; - config_helper_.addFilter(stop_and_continue_config); + config_helper_.prependFilter(stop_and_continue_config); initialize(); @@ -137,7 +137,7 @@ TEST_P(CrashIntegrationTestAllProtocols, DecodeContinueAddsCrashContextIfNoneExi probability: numerator: 100 )EOF"; - config_helper_.addFilter(request_kill_config); + config_helper_.prependFilter(request_kill_config); // This will stop iteration, and continue via a callback. const std::string stop_and_continue_config = R"EOF( @@ -146,7 +146,7 @@ TEST_P(CrashIntegrationTestAllProtocols, DecodeContinueAddsCrashContextIfNoneExi "@type": type.googleapis.com/test.integration.filters.StopAndContinueConfig installScopeTrackedObject: false )EOF"; - config_helper_.addFilter(stop_and_continue_config); + config_helper_.prependFilter(stop_and_continue_config); initialize(); @@ -172,7 +172,7 @@ TEST_P(CrashIntegrationTestAllProtocols, EncodeContinueDoesNotAddTrackedObjectIf "@type": type.googleapis.com/test.integration.filters.StopAndContinueConfig installScopeTrackedObject: true )EOF"; - config_helper_.addFilter(stop_and_continue_config); + config_helper_.prependFilter(stop_and_continue_config); const std::string request_kill_config = R"EOF( @@ -183,7 +183,7 @@ TEST_P(CrashIntegrationTestAllProtocols, EncodeContinueDoesNotAddTrackedObjectIf numerator: 100 direction: RESPONSE )EOF"; - config_helper_.addFilter(request_kill_config); + config_helper_.prependFilter(request_kill_config); initialize(); @@ -205,7 +205,7 @@ TEST_P(CrashIntegrationTestAllProtocols, EncodeContinueAddsCrashContextIfNoneExi "@type": type.googleapis.com/test.integration.filters.StopAndContinueConfig installScopeTrackedObject: false )EOF"; - config_helper_.addFilter(stop_and_continue_config); + config_helper_.prependFilter(stop_and_continue_config); const std::string request_kill_config = R"EOF( @@ -216,7 +216,7 @@ TEST_P(CrashIntegrationTestAllProtocols, EncodeContinueAddsCrashContextIfNoneExi numerator: 100 direction: RESPONSE )EOF"; - config_helper_.addFilter(request_kill_config); + config_helper_.prependFilter(request_kill_config); initialize(); diff --git a/test/extensions/filters/http/kill_request/kill_request_filter_integration_test.cc b/test/extensions/filters/http/kill_request/kill_request_filter_integration_test.cc index d995c10aad043..6d9a173b99658 100644 --- a/test/extensions/filters/http/kill_request/kill_request_filter_integration_test.cc +++ b/test/extensions/filters/http/kill_request/kill_request_filter_integration_test.cc @@ -12,7 +12,7 @@ class KillRequestFilterIntegrationTest : public Event::TestUsingSimulatedTime, public HttpProtocolIntegrationTest { protected: void initializeFilter(const std::string& filter_config) { - config_helper_.addFilter(filter_config); + config_helper_.prependFilter(filter_config); initialize(); } diff --git a/test/extensions/filters/http/local_ratelimit/local_ratelimit_integration_test.cc b/test/extensions/filters/http/local_ratelimit/local_ratelimit_integration_test.cc index 06a8b0ca1b3f3..784dc656f3cb9 100644 --- a/test/extensions/filters/http/local_ratelimit/local_ratelimit_integration_test.cc +++ b/test/extensions/filters/http/local_ratelimit/local_ratelimit_integration_test.cc @@ -9,7 +9,7 @@ class LocalRateLimitFilterIntegrationTest : public Event::TestUsingSimulatedTime public HttpProtocolIntegrationTest { protected: void initializeFilter(const std::string& filter_config) { - config_helper_.addFilter(filter_config); + config_helper_.prependFilter(filter_config); initialize(); } diff --git a/test/extensions/filters/http/lua/lua_integration_test.cc b/test/extensions/filters/http/lua/lua_integration_test.cc index a1ec02924953c..35bd0a9267670 100644 --- a/test/extensions/filters/http/lua/lua_integration_test.cc +++ b/test/extensions/filters/http/lua/lua_integration_test.cc @@ -26,7 +26,7 @@ class LuaIntegrationTest : public testing::TestWithParamPackFrom(proto_config_); - config_helper_.addFilter(MessageUtil::getJsonStringFromMessageOrDie(ratelimit_filter)); + config_helper_.prependFilter(MessageUtil::getJsonStringFromMessageOrDie(ratelimit_filter)); }); config_helper_.addConfigModifier( [](envoy::extensions::filters::network::http_connection_manager::v3::HttpConnectionManager& diff --git a/test/extensions/filters/http/rbac/rbac_filter_integration_test.cc b/test/extensions/filters/http/rbac/rbac_filter_integration_test.cc index 90bbeb81006d4..242ad7684071d 100644 --- a/test/extensions/filters/http/rbac/rbac_filter_integration_test.cc +++ b/test/extensions/filters/http/rbac/rbac_filter_integration_test.cc @@ -152,7 +152,7 @@ INSTANTIATE_TEST_SUITE_P(Protocols, RBACIntegrationTest, TEST_P(RBACIntegrationTest, Allowed) { useAccessLog("%RESPONSE_CODE_DETAILS%"); - config_helper_.addFilter(RBAC_CONFIG); + config_helper_.prependFilter(RBAC_CONFIG); initialize(); codec_client_ = makeHttpConnection(lookupPort("http")); @@ -177,7 +177,7 @@ TEST_P(RBACIntegrationTest, Allowed) { TEST_P(RBACIntegrationTest, Denied) { useAccessLog("%RESPONSE_CODE_DETAILS%"); - config_helper_.addFilter(RBAC_CONFIG); + config_helper_.prependFilter(RBAC_CONFIG); initialize(); codec_client_ = makeHttpConnection(lookupPort("http")); @@ -200,7 +200,7 @@ TEST_P(RBACIntegrationTest, Denied) { TEST_P(RBACIntegrationTest, DeniedWithDenyAction) { useAccessLog("%RESPONSE_CODE_DETAILS%"); - config_helper_.addFilter(RBAC_CONFIG_WITH_DENY_ACTION); + config_helper_.prependFilter(RBAC_CONFIG_WITH_DENY_ACTION); initialize(); codec_client_ = makeHttpConnection(lookupPort("http")); @@ -226,7 +226,7 @@ TEST_P(RBACIntegrationTest, DeniedWithPrefixRule) { config_helper_.addConfigModifier( [](envoy::extensions::filters::network::http_connection_manager::v3::HttpConnectionManager& cfg) { cfg.mutable_normalize_path()->set_value(false); }); - config_helper_.addFilter(RBAC_CONFIG_WITH_PREFIX_MATCH); + config_helper_.prependFilter(RBAC_CONFIG_WITH_PREFIX_MATCH); initialize(); codec_client_ = makeHttpConnection(lookupPort("http")); @@ -252,7 +252,7 @@ TEST_P(RBACIntegrationTest, RbacPrefixRuleUseNormalizePath) { config_helper_.addConfigModifier( [](envoy::extensions::filters::network::http_connection_manager::v3::HttpConnectionManager& cfg) { cfg.mutable_normalize_path()->set_value(true); }); - config_helper_.addFilter(RBAC_CONFIG_WITH_PREFIX_MATCH); + config_helper_.prependFilter(RBAC_CONFIG_WITH_PREFIX_MATCH); initialize(); codec_client_ = makeHttpConnection(lookupPort("http")); @@ -273,7 +273,7 @@ TEST_P(RBACIntegrationTest, RbacPrefixRuleUseNormalizePath) { } TEST_P(RBACIntegrationTest, DeniedHeadReply) { - config_helper_.addFilter(RBAC_CONFIG); + config_helper_.prependFilter(RBAC_CONFIG); initialize(); codec_client_ = makeHttpConnection(lookupPort("http")); @@ -309,7 +309,7 @@ TEST_P(RBACIntegrationTest, RouteOverride) { (*config)["envoy.filters.http.rbac"].PackFrom(per_route_config); }); - config_helper_.addFilter(RBAC_CONFIG); + config_helper_.prependFilter(RBAC_CONFIG); initialize(); @@ -333,7 +333,7 @@ TEST_P(RBACIntegrationTest, RouteOverride) { } TEST_P(RBACIntegrationTest, PathWithQueryAndFragmentWithOverride) { - config_helper_.addFilter(RBAC_CONFIG_WITH_PATH_EXACT_MATCH); + config_helper_.prependFilter(RBAC_CONFIG_WITH_PATH_EXACT_MATCH); config_helper_.addRuntimeOverride("envoy.reloadable_features.http_reject_path_with_fragment", "false"); initialize(); @@ -362,7 +362,7 @@ TEST_P(RBACIntegrationTest, PathWithQueryAndFragmentWithOverride) { } TEST_P(RBACIntegrationTest, PathWithFragmentRejectedByDefault) { - config_helper_.addFilter(RBAC_CONFIG_WITH_PATH_EXACT_MATCH); + config_helper_.prependFilter(RBAC_CONFIG_WITH_PATH_EXACT_MATCH); initialize(); codec_client_ = makeHttpConnection(lookupPort("http")); @@ -385,7 +385,7 @@ TEST_P(RBACIntegrationTest, PathWithFragmentRejectedByDefault) { // This test ensures that the exact match deny rule is not affected by fragment and query // when Envoy is configured to strip both fragment and query. TEST_P(RBACIntegrationTest, DenyExactMatchIgnoresQueryAndFragment) { - config_helper_.addFilter(RBAC_CONFIG_DENY_WITH_PATH_EXACT_MATCH); + config_helper_.prependFilter(RBAC_CONFIG_DENY_WITH_PATH_EXACT_MATCH); config_helper_.addRuntimeOverride("envoy.reloadable_features.http_reject_path_with_fragment", "false"); initialize(); @@ -418,7 +418,7 @@ TEST_P(RBACIntegrationTest, DenyExactMatchIgnoresQueryAndFragment) { } TEST_P(RBACIntegrationTest, PathIgnoreCase) { - config_helper_.addFilter(RBAC_CONFIG_WITH_PATH_IGNORE_CASE_MATCH); + config_helper_.prependFilter(RBAC_CONFIG_WITH_PATH_IGNORE_CASE_MATCH); initialize(); codec_client_ = makeHttpConnection(lookupPort("http")); @@ -445,7 +445,7 @@ TEST_P(RBACIntegrationTest, PathIgnoreCase) { } TEST_P(RBACIntegrationTest, LogConnectionAllow) { - config_helper_.addFilter(RBAC_CONFIG_WITH_LOG_ACTION); + config_helper_.prependFilter(RBAC_CONFIG_WITH_LOG_ACTION); initialize(); codec_client_ = makeHttpConnection(lookupPort("http")); @@ -469,7 +469,7 @@ TEST_P(RBACIntegrationTest, LogConnectionAllow) { // Basic CEL match on a header value. TEST_P(RBACIntegrationTest, HeaderMatchCondition) { - config_helper_.addFilter(fmt::format(RBAC_CONFIG_HEADER_MATCH_CONDITION, "yyy")); + config_helper_.prependFilter(fmt::format(RBAC_CONFIG_HEADER_MATCH_CONDITION, "yyy")); initialize(); codec_client_ = makeHttpConnection(lookupPort("http")); @@ -494,7 +494,7 @@ TEST_P(RBACIntegrationTest, HeaderMatchCondition) { // CEL match on a header value in which the header is a duplicate. Verifies we handle string // copying correctly inside the CEL expression. TEST_P(RBACIntegrationTest, HeaderMatchConditionDuplicateHeaderNoMatch) { - config_helper_.addFilter(fmt::format(RBAC_CONFIG_HEADER_MATCH_CONDITION, "yyy")); + config_helper_.prependFilter(fmt::format(RBAC_CONFIG_HEADER_MATCH_CONDITION, "yyy")); initialize(); codec_client_ = makeHttpConnection(lookupPort("http")); @@ -517,7 +517,7 @@ TEST_P(RBACIntegrationTest, HeaderMatchConditionDuplicateHeaderNoMatch) { // CEL match on a header value in which the header is a duplicate. Verifies we handle string // copying correctly inside the CEL expression. TEST_P(RBACIntegrationTest, HeaderMatchConditionDuplicateHeaderMatch) { - config_helper_.addFilter(fmt::format(RBAC_CONFIG_HEADER_MATCH_CONDITION, "yyy,zzz")); + config_helper_.prependFilter(fmt::format(RBAC_CONFIG_HEADER_MATCH_CONDITION, "yyy,zzz")); initialize(); codec_client_ = makeHttpConnection(lookupPort("http")); diff --git a/test/extensions/filters/http/tap/tap_filter_integration_test.cc b/test/extensions/filters/http/tap/tap_filter_integration_test.cc index 403c5ca023847..cfd7f764453f5 100644 --- a/test/extensions/filters/http/tap/tap_filter_integration_test.cc +++ b/test/extensions/filters/http/tap/tap_filter_integration_test.cc @@ -26,7 +26,7 @@ class TapIntegrationTest : public testing::TestWithParamwaitForCounterGe("listener_manager.listener_create_success", 2); diff --git a/test/extensions/filters/network/thrift_proxy/requirements.in b/test/extensions/filters/network/thrift_proxy/requirements.in new file mode 100644 index 0000000000000..0e405bcd08bc1 --- /dev/null +++ b/test/extensions/filters/network/thrift_proxy/requirements.in @@ -0,0 +1,2 @@ +six +thrift diff --git a/test/extensions/http/header_formatters/preserve_case/preserve_case_formatter_integration_test.cc b/test/extensions/http/header_formatters/preserve_case/preserve_case_formatter_integration_test.cc index 92c72f43bb660..c4a82dea9eadc 100644 --- a/test/extensions/http/header_formatters/preserve_case/preserve_case_formatter_integration_test.cc +++ b/test/extensions/http/header_formatters/preserve_case/preserve_case_formatter_integration_test.cc @@ -70,7 +70,7 @@ INSTANTIATE_TEST_SUITE_P(IpVersions, PreserveCaseIntegrationTest, // Verify that we preserve case in both directions. TEST_P(PreserveCaseIntegrationTest, EndToEnd) { - config_helper_.addFilter(R"EOF( + config_helper_.prependFilter(R"EOF( name: preserve-case-filter )EOF"); initialize(); diff --git a/test/extensions/transport_sockets/alts/alts_integration_test.cc b/test/extensions/transport_sockets/alts/alts_integration_test.cc index bb572db3cde8d..62a960272a8e7 100644 --- a/test/extensions/transport_sockets/alts/alts_integration_test.cc +++ b/test/extensions/transport_sockets/alts/alts_integration_test.cc @@ -115,7 +115,7 @@ class AltsIntegrationTestBase : public Event::TestUsingSimulatedTime, transport_socket->mutable_typed_config()->PackFrom(alts_config); }); - config_helper_.addFilter(R"EOF( + config_helper_.prependFilter(R"EOF( name: decode-dynamic-metadata-filter typed_config: "@type": type.googleapis.com/google.protobuf.Empty diff --git a/test/integration/README.md b/test/integration/README.md index e36b9d4d031eb..99badc23e76b3 100644 --- a/test/integration/README.md +++ b/test/integration/README.md @@ -57,7 +57,7 @@ or ```c++ // Add a buffering filter on the request path -config_helper_.addFilter(ConfigHelper::DEFAULT_BUFFER_FILTER); +config_helper_.prependFilter(ConfigHelper::DEFAULT_BUFFER_FILTER); ``` For other edits which are less likely reusable, one can add config modifiers. Config modifiers diff --git a/test/integration/drain_close_integration_test.cc b/test/integration/drain_close_integration_test.cc index de57d7fcba64c..65b84f28bf0e9 100644 --- a/test/integration/drain_close_integration_test.cc +++ b/test/integration/drain_close_integration_test.cc @@ -11,7 +11,7 @@ TEST_P(DrainCloseIntegrationTest, DrainCloseGradual) { // the probability will be very low, but the rapid retries prevent this from // increasing total test time. drain_time_ = std::chrono::seconds(100); - config_helper_.addFilter(ConfigHelper::defaultHealthCheckFilter()); + config_helper_.prependFilter(ConfigHelper::defaultHealthCheckFilter()); initialize(); absl::Notification drain_sequence_started; @@ -45,7 +45,7 @@ TEST_P(DrainCloseIntegrationTest, DrainCloseGradual) { TEST_P(DrainCloseIntegrationTest, DrainCloseImmediate) { drain_strategy_ = Server::DrainStrategy::Immediate; drain_time_ = std::chrono::seconds(100); - config_helper_.addFilter(ConfigHelper::defaultHealthCheckFilter()); + config_helper_.prependFilter(ConfigHelper::defaultHealthCheckFilter()); initialize(); absl::Notification drain_sequence_started; diff --git a/test/integration/eds_integration_test.cc b/test/integration/eds_integration_test.cc index 2f2b47d69302d..bfe7ec4c24a06 100644 --- a/test/integration/eds_integration_test.cc +++ b/test/integration/eds_integration_test.cc @@ -408,7 +408,7 @@ TEST_P(EdsIntegrationTest, BatchMemberUpdateCb) { } TEST_P(EdsIntegrationTest, StatsReadyFilter) { - config_helper_.addFilter("name: eds-ready-filter"); + config_helper_.prependFilter("name: eds-ready-filter"); initializeTest(false); // Initial state: no healthy endpoints diff --git a/test/integration/http2_flood_integration_test.cc b/test/integration/http2_flood_integration_test.cc index 64db06468019e..a5c9d864fb7d2 100644 --- a/test/integration/http2_flood_integration_test.cc +++ b/test/integration/http2_flood_integration_test.cc @@ -577,7 +577,7 @@ TEST_P(Http2FloodMitigationTest, Trailers) { // Verify flood detection by the WINDOW_UPDATE frame when a decoder filter is resuming reading from // the downstream via DecoderFilterBelowWriteBufferLowWatermark. TEST_P(Http2FloodMitigationTest, WindowUpdateOnLowWatermarkFlood) { - config_helper_.addFilter(R"EOF( + config_helper_.prependFilter(R"EOF( name: backpressure-filter )EOF"); config_helper_.setBufferLimits(1024 * 1024 * 1024, 1024 * 1024 * 1024); diff --git a/test/integration/http_integration.cc b/test/integration/http_integration.cc index 3cb411a0c730a..4403d39b23171 100644 --- a/test/integration/http_integration.cc +++ b/test/integration/http_integration.cc @@ -1000,7 +1000,7 @@ void HttpIntegrationTest::testEnvoyProxying1xx(bool continue_before_upstream_com bool with_multiple_1xx_headers) { if (with_encoder_filter) { // Add a filter to make sure 100s play well with them. - config_helper_.addFilter("name: passthrough-filter"); + config_helper_.prependFilter("name: passthrough-filter"); } config_helper_.addConfigModifier( [&](envoy::extensions::filters::network::http_connection_manager::v3::HttpConnectionManager& @@ -1071,7 +1071,7 @@ void HttpIntegrationTest::testTwoRequests(bool network_backup) { // created while the socket appears to be in the high watermark state, and regression tests that // flow control will be corrected as the socket "becomes unblocked" if (network_backup) { - config_helper_.addFilter( + config_helper_.prependFilter( fmt::format(R"EOF( name: pause-filter{} typed_config: diff --git a/test/integration/idle_timeout_integration_test.cc b/test/integration/idle_timeout_integration_test.cc index 9a8e0f75e1972..46cbe957e6ae5 100644 --- a/test/integration/idle_timeout_integration_test.cc +++ b/test/integration/idle_timeout_integration_test.cc @@ -194,7 +194,7 @@ TEST_P(IdleTimeoutIntegrationTest, PerStreamIdleTimeoutAfterDownstreamHeaders) { // Per-stream idle timeout with reads disabled. TEST_P(IdleTimeoutIntegrationTest, PerStreamIdleTimeoutWithLargeBuffer) { - config_helper_.addFilter(R"EOF( + config_helper_.prependFilter(R"EOF( name: backpressure-filter )EOF"); enable_per_stream_idle_timeout_ = true; @@ -413,7 +413,7 @@ TEST_P(IdleTimeoutIntegrationTest, RequestTimeoutIsNotDisarmedByEncode100Continu // Per-stream idle timeout reset from within a filter. TEST_P(IdleTimeoutIntegrationTest, PerStreamIdleTimeoutResetFromFilter) { - config_helper_.addFilter(R"EOF( + config_helper_.prependFilter(R"EOF( name: reset-idle-timer-filter )EOF"); enable_per_stream_idle_timeout_ = true; diff --git a/test/integration/integration_admin_test.cc b/test/integration/integration_admin_test.cc index 578017e301112..6fdc3bd7467cd 100644 --- a/test/integration/integration_admin_test.cc +++ b/test/integration/integration_admin_test.cc @@ -69,7 +69,7 @@ TEST_P(IntegrationAdminTest, HealthCheckWithoutServerStats) { } TEST_P(IntegrationAdminTest, HealthCheckWithBufferFilter) { - config_helper_.addFilter(ConfigHelper::defaultBufferFilter()); + config_helper_.prependFilter(ConfigHelper::defaultBufferFilter()); initialize(); BufferingStreamDecoderPtr response; diff --git a/test/integration/integration_admin_test.h b/test/integration/integration_admin_test.h index 194c54d02ae00..b190cef6edbaf 100644 --- a/test/integration/integration_admin_test.h +++ b/test/integration/integration_admin_test.h @@ -15,7 +15,7 @@ namespace Envoy { class IntegrationAdminTest : public HttpProtocolIntegrationTest { public: void initialize() override { - config_helper_.addFilter(ConfigHelper::defaultHealthCheckFilter()); + config_helper_.prependFilter(ConfigHelper::defaultHealthCheckFilter()); config_helper_.addConfigModifier( [](envoy::config::bootstrap::v3::Bootstrap& bootstrap) -> void { auto& hist_settings = diff --git a/test/integration/integration_test.cc b/test/integration/integration_test.cc index 450e787dd71e1..794a49adda065 100644 --- a/test/integration/integration_test.cc +++ b/test/integration/integration_test.cc @@ -303,7 +303,7 @@ TEST_P(IntegrationTest, RouterDirectResponseEmptyBody) { } TEST_P(IntegrationTest, ConnectionClose) { - config_helper_.addFilter(ConfigHelper::defaultHealthCheckFilter()); + config_helper_.prependFilter(ConfigHelper::defaultHealthCheckFilter()); initialize(); codec_client_ = makeHttpConnection(lookupPort("http")); @@ -419,7 +419,7 @@ TEST_P(IntegrationTest, EnvoyProxyingLate100ContinueWithEncoderFilter) { // Regression test for https://github.com/envoyproxy/envoy/issues/10923. TEST_P(IntegrationTest, EnvoyProxying100ContinueWithDecodeDataPause) { - config_helper_.addFilter(R"EOF( + config_helper_.prependFilter(R"EOF( name: stop-iteration-and-continue-filter typed_config: "@type": type.googleapis.com/test.integration.filters.StopAndContinueConfig @@ -433,7 +433,7 @@ TEST_P(IntegrationTest, MatchingHttpFilterConstruction) { concurrency_ = 2; config_helper_.addRuntimeOverride("envoy.reloadable_features.experimental_matching_api", "true"); - config_helper_.addFilter(R"EOF( + config_helper_.prependFilter(R"EOF( name: matcher typed_config: "@type": type.googleapis.com/envoy.extensions.common.matching.v3.ExtensionWithMatcher @@ -500,7 +500,7 @@ TEST_P(IntegrationTest, MatchingHttpFilterConstructionNewProto) { concurrency_ = 2; config_helper_.addRuntimeOverride("envoy.reloadable_features.experimental_matching_api", "true"); - config_helper_.addFilter(R"EOF( + config_helper_.prependFilter(R"EOF( name: matcher typed_config: "@type": type.googleapis.com/envoy.extensions.common.matching.v3.ExtensionWithMatcher @@ -625,7 +625,7 @@ TEST_P(IntegrationTest, UpstreamDisconnectWithTwoRequests) { // Test hitting the bridge filter with too many response bytes to buffer. Given // the headers are not proxied, the connection manager will send a local error reply. TEST_P(IntegrationTest, HittingGrpcFilterLimitBufferingHeaders) { - config_helper_.addFilter( + config_helper_.prependFilter( "{ name: grpc_http1_bridge, typed_config: { \"@type\": " "type.googleapis.com/envoy.extensions.filters.http.grpc_http1_bridge.v3.Config } }"); config_helper_.setBufferLimits(1024, 1024); @@ -1480,8 +1480,8 @@ TEST_P(IntegrationTest, TestDelayedConnectionTeardownOnGracefulClose) { hcm) { hcm.mutable_delayed_close_timeout()->set_seconds(1); }); // This test will trigger an early 413 Payload Too Large response due to buffer limits being // exceeded. The following filter is needed since the router filter will never trigger a 413. - config_helper_.addFilter("{ name: encoder-decoder-buffer-filter, typed_config: { \"@type\": " - "type.googleapis.com/google.protobuf.Empty } }"); + config_helper_.prependFilter("{ name: encoder-decoder-buffer-filter, typed_config: { \"@type\": " + "type.googleapis.com/google.protobuf.Empty } }"); config_helper_.setBufferLimits(1024, 1024); initialize(); @@ -1513,8 +1513,8 @@ TEST_P(IntegrationTest, TestDelayedConnectionTeardownOnGracefulClose) { // Test configuration of the delayed close timeout on downstream HTTP/1.1 connections. A value of 0 // disables delayed close processing. TEST_P(IntegrationTest, TestDelayedConnectionTeardownConfig) { - config_helper_.addFilter("{ name: encoder-decoder-buffer-filter, typed_config: { \"@type\": " - "type.googleapis.com/google.protobuf.Empty } }"); + config_helper_.prependFilter("{ name: encoder-decoder-buffer-filter, typed_config: { \"@type\": " + "type.googleapis.com/google.protobuf.Empty } }"); config_helper_.setBufferLimits(1024, 1024); config_helper_.addConfigModifier( [](envoy::extensions::filters::network::http_connection_manager::v3::HttpConnectionManager& @@ -1547,8 +1547,8 @@ TEST_P(IntegrationTest, TestDelayedConnectionTeardownConfig) { // Test that if the route cache is cleared, it doesn't cause problems. TEST_P(IntegrationTest, TestClearingRouteCacheFilter) { - config_helper_.addFilter("{ name: clear-route-cache, typed_config: { \"@type\": " - "type.googleapis.com/google.protobuf.Empty } }"); + config_helper_.prependFilter("{ name: clear-route-cache, typed_config: { \"@type\": " + "type.googleapis.com/google.protobuf.Empty } }"); initialize(); codec_client_ = makeHttpConnection(lookupPort("http")); sendRequestAndWaitForResponse(default_request_headers_, 0, default_response_headers_, 0); @@ -1585,8 +1585,8 @@ TEST_P(IntegrationTest, NoConnectionPoolsFree) { } TEST_P(IntegrationTest, ProcessObjectHealthy) { - config_helper_.addFilter("{ name: process-context-filter, typed_config: { \"@type\": " - "type.googleapis.com/google.protobuf.Empty } }"); + config_helper_.prependFilter("{ name: process-context-filter, typed_config: { \"@type\": " + "type.googleapis.com/google.protobuf.Empty } }"); ProcessObjectForFilter healthy_object(true); process_object_ = healthy_object; @@ -1606,8 +1606,8 @@ TEST_P(IntegrationTest, ProcessObjectHealthy) { } TEST_P(IntegrationTest, ProcessObjectUnealthy) { - config_helper_.addFilter("{ name: process-context-filter, typed_config: { \"@type\": " - "type.googleapis.com/google.protobuf.Empty } }"); + config_helper_.prependFilter("{ name: process-context-filter, typed_config: { \"@type\": " + "type.googleapis.com/google.protobuf.Empty } }"); ProcessObjectForFilter unhealthy_object(false); process_object_ = unhealthy_object; @@ -2103,7 +2103,7 @@ TEST_P(IntegrationTest, RandomPreconnect) { TEST_P(IntegrationTest, SetRouteToDelegatingRouteWithClusterOverride) { useAccessLog("%UPSTREAM_CLUSTER%\n"); - config_helper_.addFilter(R"EOF( + config_helper_.prependFilter(R"EOF( name: set-route-filter )EOF"); diff --git a/test/integration/multiplexed_integration_test.cc b/test/integration/multiplexed_integration_test.cc index c55fc1dabfbeb..0357402a6993a 100644 --- a/test/integration/multiplexed_integration_test.cc +++ b/test/integration/multiplexed_integration_test.cc @@ -408,7 +408,7 @@ void verifyExpectedMetadata(Http::MetadataMap metadata_map, std::set void { hcm.set_proxy_100_continue(true); }); @@ -652,7 +652,7 @@ name: request-metadata-filter )EOF"; TEST_P(Http2MetadataIntegrationTest, ConsumeAndInsertRequestMetadata) { - addFilters({request_metadata_filter}); + prependFilters({request_metadata_filter}); config_helper_.addConfigModifier( [&](envoy::extensions::filters::network::http_connection_manager::v3::HttpConnectionManager& hcm) -> void { hcm.set_proxy_100_continue(true); }); @@ -799,7 +799,7 @@ void Http2MetadataIntegrationTest::verifyHeadersOnlyTest() { } TEST_P(Http2MetadataIntegrationTest, HeadersOnlyRequestWithRequestMetadata) { - addFilters({request_metadata_filter}); + prependFilters({request_metadata_filter}); // Send a headers only request. runHeaderOnlyTest(false, 0); verifyHeadersOnlyTest(); @@ -842,17 +842,17 @@ name: metadata-stop-all-filter )EOF"; TEST_P(Http2MetadataIntegrationTest, RequestMetadataWithStopAllFilterBeforeMetadataFilter) { - addFilters({request_metadata_filter, metadata_stop_all_filter}); + prependFilters({request_metadata_filter, metadata_stop_all_filter}); testRequestMetadataWithStopAllFilter(); } TEST_P(Http2MetadataIntegrationTest, RequestMetadataWithStopAllFilterAfterMetadataFilter) { - addFilters({metadata_stop_all_filter, request_metadata_filter}); + prependFilters({metadata_stop_all_filter, request_metadata_filter}); testRequestMetadataWithStopAllFilter(); } TEST_P(Http2MetadataIntegrationTest, TestAddEncodedMetadata) { - config_helper_.addFilter(R"EOF( + config_helper_.prependFilter(R"EOF( name: encode-headers-return-stop-all-filter )EOF"); @@ -957,7 +957,7 @@ TEST_P(Http2IntegrationTest, BadFrame) { // response are received. TEST_P(Http2IntegrationTest, GoAway) { EXCLUDE_DOWNSTREAM_HTTP3; // QuicHttpClientConnectionImpl::goAway NOT_REACHED_GCOVR_EXCL_LINE - config_helper_.addFilter(ConfigHelper::defaultHealthCheckFilter()); + config_helper_.prependFilter(ConfigHelper::defaultHealthCheckFilter()); initialize(); codec_client_ = makeHttpConnection(lookupPort("http")); @@ -1319,7 +1319,7 @@ TEST_P(Http2IntegrationTest, DelayedCloseDisabled) { } TEST_P(Http2IntegrationTest, PauseAndResume) { - config_helper_.addFilter(R"EOF( + config_helper_.prependFilter(R"EOF( name: stop-iteration-and-continue-filter typed_config: "@type": type.googleapis.com/test.integration.filters.StopAndContinueConfig @@ -1349,7 +1349,7 @@ TEST_P(Http2IntegrationTest, PauseAndResume) { } TEST_P(Http2IntegrationTest, PauseAndResumeHeadersOnly) { - config_helper_.addFilter(R"EOF( + config_helper_.prependFilter(R"EOF( name: stop-iteration-and-continue-filter typed_config: "@type": type.googleapis.com/test.integration.filters.StopAndContinueConfig @@ -1829,7 +1829,7 @@ name: on-local-reply-filter )EOF"; TEST_P(Http2IntegrationTest, OnLocalReply) { - config_helper_.addFilter(on_local_reply_filter); + config_helper_.prependFilter(on_local_reply_filter); initialize(); codec_client_ = makeHttpConnection(lookupPort("http")); diff --git a/test/integration/multiplexed_integration_test.h b/test/integration/multiplexed_integration_test.h index f2bd7ce3267b3..81876304b44c6 100644 --- a/test/integration/multiplexed_integration_test.h +++ b/test/integration/multiplexed_integration_test.h @@ -16,10 +16,11 @@ class Http2IntegrationTest : public HttpProtocolIntegrationTest { void simultaneousRequest(int32_t request1_bytes, int32_t request2_bytes); protected: - // Utility function to add filters. - void addFilters(std::vector filters) { + // Utility function to prepend filters. Note that the filters + // are added in reverse order. + void prependFilters(std::vector filters) { for (const auto& filter : filters) { - config_helper_.addFilter(filter); + config_helper_.prependFilter(filter); } } }; diff --git a/test/integration/multiplexed_upstream_integration_test.cc b/test/integration/multiplexed_upstream_integration_test.cc index 694aebb9d41c7..6dedddbdea612 100644 --- a/test/integration/multiplexed_upstream_integration_test.cc +++ b/test/integration/multiplexed_upstream_integration_test.cc @@ -334,7 +334,7 @@ TEST_P(Http2UpstreamIntegrationTest, ManyLargeSimultaneousRequestWithRandomBacku // receiving flow control window updates. return; } - config_helper_.addFilter( + config_helper_.prependFilter( fmt::format(R"EOF( name: pause-filter{} typed_config: @@ -432,8 +432,8 @@ name: router // As with ProtocolIntegrationTest.HittingEncoderFilterLimit use a filter // which buffers response data but in this case, make sure the sendLocalReply // is gRPC. - config_helper_.addFilter("{ name: encoder-decoder-buffer-filter, typed_config: { \"@type\": " - "type.googleapis.com/google.protobuf.Empty } }"); + config_helper_.prependFilter("{ name: encoder-decoder-buffer-filter, typed_config: { \"@type\": " + "type.googleapis.com/google.protobuf.Empty } }"); config_helper_.setBufferLimits(1024, 1024); initialize(); diff --git a/test/integration/protocol_integration_test.cc b/test/integration/protocol_integration_test.cc index 2b1bfb5485efe..e6349bd3b8f88 100644 --- a/test/integration/protocol_integration_test.cc +++ b/test/integration/protocol_integration_test.cc @@ -172,7 +172,7 @@ TEST_P(ProtocolIntegrationTest, UnknownResponsecode) { // Add a health check filter and verify correct computation of health based on upstream status. TEST_P(DownstreamProtocolIntegrationTest, ComputedHealthCheck) { - config_helper_.addFilter(R"EOF( + config_helper_.prependFilter(R"EOF( name: health_check typed_config: "@type": type.googleapis.com/envoy.extensions.filters.http.health_check.v3.HealthCheck @@ -193,7 +193,7 @@ name: health_check // Add a health check filter and verify correct computation of health based on upstream status. TEST_P(DownstreamProtocolIntegrationTest, ModifyBuffer) { - config_helper_.addFilter(R"EOF( + config_helper_.prependFilter(R"EOF( name: health_check typed_config: "@type": type.googleapis.com/envoy.extensions.filters.http.health_check.v3.HealthCheck @@ -214,11 +214,10 @@ name: health_check // Verifies behavior for https://github.com/envoyproxy/envoy/pull/11248 TEST_P(ProtocolIntegrationTest, AddBodyToRequestAndWaitForIt) { - // filters are prepended, so add them in reverse order - config_helper_.addFilter(R"EOF( + config_helper_.prependFilter(R"EOF( name: wait-for-whole-request-and-response-filter )EOF"); - config_helper_.addFilter(R"EOF( + config_helper_.prependFilter(R"EOF( name: add-body-filter )EOF"); initialize(); @@ -238,11 +237,10 @@ TEST_P(ProtocolIntegrationTest, AddBodyToRequestAndWaitForIt) { } TEST_P(ProtocolIntegrationTest, AddBodyToResponseAndWaitForIt) { - // filters are prepended, so add them in reverse order - config_helper_.addFilter(R"EOF( + config_helper_.prependFilter(R"EOF( name: add-body-filter )EOF"); - config_helper_.addFilter(R"EOF( + config_helper_.prependFilter(R"EOF( name: wait-for-whole-request-and-response-filter )EOF"); initialize(); @@ -260,7 +258,7 @@ TEST_P(ProtocolIntegrationTest, AddBodyToResponseAndWaitForIt) { } TEST_P(ProtocolIntegrationTest, ContinueHeadersOnlyInjectBodyFilter) { - config_helper_.addFilter(R"EOF( + config_helper_.prependFilter(R"EOF( name: continue-headers-only-inject-body-filter typed_config: "@type": type.googleapis.com/google.protobuf.Empty @@ -288,7 +286,7 @@ TEST_P(ProtocolIntegrationTest, ContinueHeadersOnlyInjectBodyFilter) { } TEST_P(ProtocolIntegrationTest, AddEncodedTrailers) { - config_helper_.addFilter(R"EOF( + config_helper_.prependFilter(R"EOF( name: add-trailers-filter typed_config: "@type": type.googleapis.com/google.protobuf.Empty @@ -552,8 +550,8 @@ TEST_P(DownstreamProtocolIntegrationTest, DownstreamRequestWithFaultyFilter) { autonomous_upstream_ = true; } useAccessLog("%RESPONSE_CODE_DETAILS%"); - config_helper_.addFilter("{ name: invalid-header-filter, typed_config: { \"@type\": " - "type.googleapis.com/google.protobuf.Empty } }"); + config_helper_.prependFilter("{ name: invalid-header-filter, typed_config: { \"@type\": " + "type.googleapis.com/google.protobuf.Empty } }"); initialize(); codec_client_ = makeHttpConnection(lookupPort("http")); @@ -604,8 +602,8 @@ TEST_P(DownstreamProtocolIntegrationTest, FaultyFilterWithConnect) { old_listener->set_name("http_forward"); }); useAccessLog("%RESPONSE_CODE_DETAILS%"); - config_helper_.addFilter("{ name: invalid-header-filter, typed_config: { \"@type\": " - "type.googleapis.com/google.protobuf.Empty } }"); + config_helper_.prependFilter("{ name: invalid-header-filter, typed_config: { \"@type\": " + "type.googleapis.com/google.protobuf.Empty } }"); initialize(); codec_client_ = makeHttpConnection(lookupPort("http")); @@ -625,8 +623,8 @@ TEST_P(DownstreamProtocolIntegrationTest, FaultyFilterWithConnect) { TEST_P(DownstreamProtocolIntegrationTest, MissingHeadersLocalReply) { useAccessLog("%RESPONSE_CODE_DETAILS%"); - config_helper_.addFilter("{ name: invalid-header-filter, typed_config: { \"@type\": " - "type.googleapis.com/google.protobuf.Empty } }"); + config_helper_.prependFilter("{ name: invalid-header-filter, typed_config: { \"@type\": " + "type.googleapis.com/google.protobuf.Empty } }"); initialize(); codec_client_ = makeHttpConnection(lookupPort("http")); @@ -646,8 +644,8 @@ TEST_P(DownstreamProtocolIntegrationTest, MissingHeadersLocalReply) { TEST_P(DownstreamProtocolIntegrationTest, MissingHeadersLocalReplyWithBody) { useAccessLog("%RESPONSE_CODE_DETAILS%"); - config_helper_.addFilter("{ name: invalid-header-filter, typed_config: { \"@type\": " - "type.googleapis.com/google.protobuf.Empty } }"); + config_helper_.prependFilter("{ name: invalid-header-filter, typed_config: { \"@type\": " + "type.googleapis.com/google.protobuf.Empty } }"); initialize(); codec_client_ = makeHttpConnection(lookupPort("http")); @@ -1180,8 +1178,8 @@ TEST_P(ProtocolIntegrationTest, RetryHittingRouteLimits) { // Test hitting the decoder buffer filter with too many request bytes to buffer. Ensure the // connection manager sends a 413. TEST_P(DownstreamProtocolIntegrationTest, HittingDecoderFilterLimit) { - config_helper_.addFilter("{ name: encoder-decoder-buffer-filter, typed_config: { \"@type\": " - "type.googleapis.com/google.protobuf.Empty } }"); + config_helper_.prependFilter("{ name: encoder-decoder-buffer-filter, typed_config: { \"@type\": " + "type.googleapis.com/google.protobuf.Empty } }"); config_helper_.setBufferLimits(1024, 1024); initialize(); @@ -1223,8 +1221,8 @@ TEST_P(ProtocolIntegrationTest, HittingEncoderFilterLimit) { }); useAccessLog(); - config_helper_.addFilter("{ name: encoder-decoder-buffer-filter, typed_config: { \"@type\": " - "type.googleapis.com/google.protobuf.Empty } }"); + config_helper_.prependFilter("{ name: encoder-decoder-buffer-filter, typed_config: { \"@type\": " + "type.googleapis.com/google.protobuf.Empty } }"); config_helper_.setBufferLimits(1024, 1024); initialize(); @@ -1863,7 +1861,7 @@ TEST_P(DownstreamProtocolIntegrationTest, MultipleContentLengthsAllowed) { } TEST_P(DownstreamProtocolIntegrationTest, LocalReplyDuringEncoding) { - config_helper_.addFilter(R"EOF( + config_helper_.prependFilter(R"EOF( name: local-reply-during-encode )EOF"); initialize(); @@ -1887,7 +1885,7 @@ name: local-reply-during-encode } TEST_P(DownstreamProtocolIntegrationTest, LocalReplyDuringEncodingData) { - config_helper_.addFilter(R"EOF( + config_helper_.prependFilter(R"EOF( name: local-reply-during-encode-data )EOF"); initialize(); @@ -2125,13 +2123,13 @@ TEST_P(ProtocolIntegrationTest, LargeRequestMethod) { // Tests StopAllIterationAndBuffer. Verifies decode-headers-return-stop-all-filter calls decodeData // once after iteration is resumed. TEST_P(DownstreamProtocolIntegrationTest, TestDecodeHeadersReturnsStopAll) { - config_helper_.addFilter(R"EOF( + config_helper_.prependFilter(R"EOF( name: call-decodedata-once-filter )EOF"); - config_helper_.addFilter(R"EOF( + config_helper_.prependFilter(R"EOF( name: decode-headers-return-stop-all-filter )EOF"); - config_helper_.addFilter(R"EOF( + config_helper_.prependFilter(R"EOF( name: passthrough-filter )EOF"); @@ -2176,10 +2174,10 @@ name: passthrough-filter // Tests StopAllIterationAndWatermark. decode-headers-return-stop-all-watermark-filter sets buffer // limit to 100. Verifies data pause when limit is reached, and resume after iteration continues. TEST_P(DownstreamProtocolIntegrationTest, TestDecodeHeadersReturnsStopAllWatermark) { - config_helper_.addFilter(R"EOF( + config_helper_.prependFilter(R"EOF( name: decode-headers-return-stop-all-filter )EOF"); - config_helper_.addFilter(R"EOF( + config_helper_.prependFilter(R"EOF( name: passthrough-filter )EOF"); @@ -2234,13 +2232,13 @@ name: passthrough-filter // Test two filters that return StopAllIterationAndBuffer back-to-back. TEST_P(DownstreamProtocolIntegrationTest, TestTwoFiltersDecodeHeadersReturnsStopAll) { - config_helper_.addFilter(R"EOF( + config_helper_.prependFilter(R"EOF( name: decode-headers-return-stop-all-filter )EOF"); - config_helper_.addFilter(R"EOF( + config_helper_.prependFilter(R"EOF( name: decode-headers-return-stop-all-filter )EOF"); - config_helper_.addFilter(R"EOF( + config_helper_.prependFilter(R"EOF( name: passthrough-filter )EOF"); @@ -2282,7 +2280,7 @@ name: passthrough-filter // Tests encodeHeaders() returns StopAllIterationAndBuffer. TEST_P(DownstreamProtocolIntegrationTest, TestEncodeHeadersReturnsStopAll) { - config_helper_.addFilter(R"EOF( + config_helper_.prependFilter(R"EOF( name: encode-headers-return-stop-all-filter )EOF"); config_helper_.addConfigModifier( @@ -2316,7 +2314,7 @@ name: encode-headers-return-stop-all-filter // Tests encodeHeaders() returns StopAllIterationAndWatermark. TEST_P(DownstreamProtocolIntegrationTest, TestEncodeHeadersReturnsStopAllWatermark) { - config_helper_.addFilter(R"EOF( + config_helper_.prependFilter(R"EOF( name: encode-headers-return-stop-all-filter )EOF"); config_helper_.addConfigModifier( @@ -2386,8 +2384,8 @@ TEST_P(ProtocolIntegrationTest, MultipleCookiesAndSetCookies) { // Test that delay closed connections are eventually force closed when the timeout triggers. TEST_P(DownstreamProtocolIntegrationTest, TestDelayedConnectionTeardownTimeoutTrigger) { - config_helper_.addFilter("{ name: encoder-decoder-buffer-filter, typed_config: { \"@type\": " - "type.googleapis.com/google.protobuf.Empty } }"); + config_helper_.prependFilter("{ name: encoder-decoder-buffer-filter, typed_config: { \"@type\": " + "type.googleapis.com/google.protobuf.Empty } }"); config_helper_.setBufferLimits(1024, 1024); config_helper_.addConfigModifier( [](envoy::extensions::filters::network::http_connection_manager::v3::HttpConnectionManager& @@ -2730,7 +2728,7 @@ TEST_P(DownstreamProtocolIntegrationTest, HeaderNormalizationRejection) { // Tests a filter that returns a FilterHeadersStatus::Continue after a local reply without // processing new metadata generated in decodeHeader TEST_P(DownstreamProtocolIntegrationTest, LocalReplyWithMetadata) { - config_helper_.addFilter(R"EOF( + config_helper_.prependFilter(R"EOF( name: local-reply-with-metadata-filter typed_config: "@type": type.googleapis.com/google.protobuf.Empty @@ -2807,7 +2805,7 @@ name: remove-response-headers-filter )EOF"; TEST_P(ProtocolIntegrationTest, HeadersOnlyRequestWithRemoveResponseHeadersFilter) { - config_helper_.addFilter(remove_response_headers_filter); + config_helper_.prependFilter(remove_response_headers_filter); initialize(); codec_client_ = makeHttpConnection(lookupPort("http")); @@ -2824,7 +2822,7 @@ TEST_P(ProtocolIntegrationTest, HeadersOnlyRequestWithRemoveResponseHeadersFilte } TEST_P(ProtocolIntegrationTest, RemoveResponseHeadersFilter) { - config_helper_.addFilter(remove_response_headers_filter); + config_helper_.prependFilter(remove_response_headers_filter); initialize(); codec_client_ = makeHttpConnection(lookupPort("http")); @@ -2866,14 +2864,14 @@ TEST_P(ProtocolIntegrationTest, ReqRespSizeStats) { // filter chain is aborted and 500 is sent to the client. TEST_P(ProtocolIntegrationTest, OverflowEncoderBufferFromEncodeHeaders) { config_helper_.setBufferLimits(64 * 1024, 64 * 1024); - config_helper_.addFilter(R"EOF( + config_helper_.prependFilter(R"EOF( name: add-body-filter typed_config: "@type": type.googleapis.com/test.integration.filters.AddBodyFilterConfig where_to_add_body: ENCODE_HEADERS body_size: 70000 )EOF"); - config_helper_.addFilter(R"EOF( + config_helper_.prependFilter(R"EOF( name: crash-filter typed_config: "@type": type.googleapis.com/test.integration.filters.CrashFilterConfig @@ -2898,10 +2896,10 @@ TEST_P(ProtocolIntegrationTest, OverflowEncoderBufferFromEncodeDataWithResponseH config_helper_.setBufferLimits(64 * 1024, 64 * 1024); // Buffer filter will stop iteration from encodeHeaders preventing response headers from being // sent downstream. - config_helper_.addFilter(R"EOF( + config_helper_.prependFilter(R"EOF( name: encoder-decoder-buffer-filter )EOF"); - config_helper_.addFilter(R"EOF( + config_helper_.prependFilter(R"EOF( name: crash-filter typed_config: "@type": type.googleapis.com/test.integration.filters.CrashFilterConfig @@ -2930,7 +2928,7 @@ TEST_P(ProtocolIntegrationTest, OverflowEncoderBufferFromEncodeDataWithResponseH TEST_P(ProtocolIntegrationTest, OverflowEncoderBufferFromEncodeData) { config_helper_.setBufferLimits(64 * 1024, 64 * 1024); // Make the add-body-filter stop iteration from encodeData. Headers should be sent to the client. - config_helper_.addFilter(R"EOF( + config_helper_.prependFilter(R"EOF( name: add-body-filter typed_config: "@type": type.googleapis.com/test.integration.filters.AddBodyFilterConfig @@ -2938,7 +2936,7 @@ TEST_P(ProtocolIntegrationTest, OverflowEncoderBufferFromEncodeData) { where_to_stop_and_buffer: ENCODE_DATA body_size: 16384 )EOF"); - config_helper_.addFilter(R"EOF( + config_helper_.prependFilter(R"EOF( name: crash-filter typed_config: "@type": type.googleapis.com/test.integration.filters.CrashFilterConfig @@ -2965,13 +2963,13 @@ TEST_P(ProtocolIntegrationTest, OverflowEncoderBufferFromEncodeData) { // filter chain is aborted and 413 is sent to the client. TEST_P(DownstreamProtocolIntegrationTest, OverflowDecoderBufferFromDecodeHeaders) { config_helper_.setBufferLimits(64 * 1024, 64 * 1024); - config_helper_.addFilter(R"EOF( + config_helper_.prependFilter(R"EOF( name: crash-filter typed_config: "@type": type.googleapis.com/test.integration.filters.CrashFilterConfig crash_in_decode_headers: true )EOF"); - config_helper_.addFilter(R"EOF( + config_helper_.prependFilter(R"EOF( name: add-body-filter typed_config: "@type": type.googleapis.com/test.integration.filters.AddBodyFilterConfig @@ -2991,7 +2989,7 @@ TEST_P(DownstreamProtocolIntegrationTest, OverflowDecoderBufferFromDecodeHeaders // filter chain is aborted and 413 is sent to the client. TEST_P(DownstreamProtocolIntegrationTest, OverflowDecoderBufferFromDecodeData) { config_helper_.setBufferLimits(64 * 1024, 64 * 1024); - config_helper_.addFilter(R"EOF( + config_helper_.prependFilter(R"EOF( name: crash-filter typed_config: "@type": type.googleapis.com/test.integration.filters.CrashFilterConfig @@ -2999,7 +2997,7 @@ TEST_P(DownstreamProtocolIntegrationTest, OverflowDecoderBufferFromDecodeData) { crash_in_decode_data: true )EOF"); // Buffer filter causes filter manager to buffer data - config_helper_.addFilter(R"EOF( + config_helper_.prependFilter(R"EOF( name: encoder-decoder-buffer-filter )EOF"); initialize(); @@ -3027,21 +3025,21 @@ TEST_P(DownstreamProtocolIntegrationTest, OverflowDecoderBufferFromDecodeData) { // manager's internal state is slightly different. TEST_P(DownstreamProtocolIntegrationTest, OverflowDecoderBufferFromDecodeDataContinueIteration) { config_helper_.setBufferLimits(64 * 1024, 64 * 1024); - config_helper_.addFilter(R"EOF( + config_helper_.prependFilter(R"EOF( name: crash-filter typed_config: "@type": type.googleapis.com/test.integration.filters.CrashFilterConfig crash_in_decode_headers: false crash_in_decode_data: true )EOF"); - config_helper_.addFilter(R"EOF( + config_helper_.prependFilter(R"EOF( name: add-body-filter typed_config: "@type": type.googleapis.com/test.integration.filters.AddBodyFilterConfig where_to_add_body: DECODE_DATA body_size: 70000 )EOF"); - config_helper_.addFilter(R"EOF( + config_helper_.prependFilter(R"EOF( name: encoder-decoder-buffer-filter )EOF"); initialize(); @@ -3073,7 +3071,7 @@ TEST_P(DownstreamProtocolIntegrationTest, return; } config_helper_.setBufferLimits(64 * 1024, 64 * 1024); - config_helper_.addFilter(R"EOF( + config_helper_.prependFilter(R"EOF( name: add-body-filter typed_config: "@type": type.googleapis.com/test.integration.filters.AddBodyFilterConfig @@ -3112,7 +3110,7 @@ TEST_P(DownstreamProtocolIntegrationTest, OverflowDecoderBufferFromDecodeTrailer return; } config_helper_.setBufferLimits(64 * 1024, 64 * 1024); - config_helper_.addFilter(R"EOF( + config_helper_.prependFilter(R"EOF( name: crash-filter typed_config: "@type": type.googleapis.com/test.integration.filters.CrashFilterConfig @@ -3120,7 +3118,7 @@ TEST_P(DownstreamProtocolIntegrationTest, OverflowDecoderBufferFromDecodeTrailer crash_in_decode_data: true crash_in_decode_trailers: true )EOF"); - config_helper_.addFilter(R"EOF( + config_helper_.prependFilter(R"EOF( name: add-body-filter typed_config: "@type": type.googleapis.com/test.integration.filters.AddBodyFilterConfig diff --git a/test/integration/redirect_integration_test.cc b/test/integration/redirect_integration_test.cc index ff48c8922b704..6c9f983438f28 100644 --- a/test/integration/redirect_integration_test.cc +++ b/test/integration/redirect_integration_test.cc @@ -535,7 +535,7 @@ TEST_P(RedirectIntegrationTest, InternalRedirectToDestinationWithResponseBody) { config_helper_.addConfigModifier( [](envoy::extensions::filters::network::http_connection_manager::v3::HttpConnectionManager& hcm) { hcm.set_via("via_value"); }); - config_helper_.addFilter(R"EOF( + config_helper_.prependFilter(R"EOF( name: pause-filter typed_config: "@type": type.googleapis.com/google.protobuf.Empty diff --git a/test/integration/scoped_rds_integration_test.cc b/test/integration/scoped_rds_integration_test.cc index c1a96010e6c57..b76a18cda3b37 100644 --- a/test/integration/scoped_rds_integration_test.cc +++ b/test/integration/scoped_rds_integration_test.cc @@ -648,7 +648,7 @@ route_configuration_name: foo_route1 // Test that a scoped route config update is performed on demand and http request will succeed. TEST_P(ScopedRdsIntegrationTest, OnDemandUpdateSuccess) { - config_helper_.addFilter(R"EOF( + config_helper_.prependFilter(R"EOF( name: envoy.filters.http.on_demand )EOF"); const std::string scope_route1 = R"EOF( @@ -700,7 +700,7 @@ on_demand: true // With on demand update filter configured, scope not match should still return 404 TEST_P(ScopedRdsIntegrationTest, OnDemandUpdateScopeNotMatch) { - config_helper_.addFilter(R"EOF( + config_helper_.prependFilter(R"EOF( name: envoy.filters.http.on_demand )EOF"); @@ -750,7 +750,7 @@ route_configuration_name: {} // return 404 TEST_P(ScopedRdsIntegrationTest, OnDemandUpdatePrimaryVirtualHostNotMatch) { - config_helper_.addFilter(R"EOF( + config_helper_.prependFilter(R"EOF( name: envoy.filters.http.on_demand )EOF"); @@ -800,7 +800,7 @@ route_configuration_name: {} // return 404 TEST_P(ScopedRdsIntegrationTest, OnDemandUpdateVirtualHostNotMatch) { - config_helper_.addFilter(R"EOF( + config_helper_.prependFilter(R"EOF( name: envoy.filters.http.on_demand )EOF"); @@ -854,7 +854,7 @@ on_demand: true // Eager and lazy scopes share the same route configuration TEST_P(ScopedRdsIntegrationTest, DifferentPriorityScopeShareRoute) { - config_helper_.addFilter(R"EOF( + config_helper_.prependFilter(R"EOF( name: envoy.filters.http.on_demand )EOF"); @@ -910,7 +910,7 @@ on_demand: true } TEST_P(ScopedRdsIntegrationTest, OnDemandUpdateAfterActiveStreamDestroyed) { - config_helper_.addFilter(R"EOF( + config_helper_.prependFilter(R"EOF( name: envoy.filters.http.on_demand )EOF"); const std::string scope_route1 = R"EOF( diff --git a/test/integration/sds_generic_secret_integration_test.cc b/test/integration/sds_generic_secret_integration_test.cc index 5d9c64c921b2b..629213397112a 100644 --- a/test/integration/sds_generic_secret_integration_test.cc +++ b/test/integration/sds_generic_secret_integration_test.cc @@ -101,7 +101,7 @@ class SdsGenericSecretIntegrationTest : public Grpc::GrpcClientIntegrationParamT ConfigHelper::setHttp2(*sds_cluster); }); - config_helper_.addFilter("{ name: sds-generic-secret-test }"); + config_helper_.prependFilter("{ name: sds-generic-secret-test }"); create_xds_upstream_ = true; HttpIntegrationTest::initialize(); diff --git a/test/integration/version_integration_test.cc b/test/integration/version_integration_test.cc index 2ee09525e7519..a4362b80e5595 100644 --- a/test/integration/version_integration_test.cc +++ b/test/integration/version_integration_test.cc @@ -26,25 +26,25 @@ const char ExampleIpTaggingConfig[] = R"EOF( // envoy.filters.http.ip_tagging from v3 TypedStruct config. TEST_P(VersionIntegrationTest, IpTaggingV3StaticTypedStructConfig) { - config_helper_.addFilter(absl::StrCat(R"EOF( + config_helper_.prependFilter(absl::StrCat(R"EOF( name: ip_tagging typed_config: "@type": type.googleapis.com/udpa.type.v1.TypedStruct type_url: type.googleapis.com/envoy.extensions.filters.http.ip_tagging.v3.IPTagging value: )EOF", - ExampleIpTaggingConfig)); + ExampleIpTaggingConfig)); initialize(); } // envoy.filters.http.ip_tagging from v3 typed Any config. TEST_P(VersionIntegrationTest, IpTaggingV3StaticTypedConfig) { - config_helper_.addFilter(absl::StrCat(R"EOF( + config_helper_.prependFilter(absl::StrCat(R"EOF( name: ip_tagging typed_config: "@type": type.googleapis.com/envoy.extensions.filters.http.ip_tagging.v3.IPTagging )EOF", - ExampleIpTaggingConfig)); + ExampleIpTaggingConfig)); initialize(); } diff --git a/test/integration/websocket_integration_test.cc b/test/integration/websocket_integration_test.cc index c7159200ab9e0..1ac084679cd4f 100644 --- a/test/integration/websocket_integration_test.cc +++ b/test/integration/websocket_integration_test.cc @@ -350,7 +350,7 @@ TEST_P(WebsocketIntegrationTest, WebsocketCustomFilterChain) { // Add a small buffer filter to the standard HTTP filter chain. Websocket // upgrades will use the HTTP filter chain so will also have small buffers. - config_helper_.addFilter(ConfigHelper::smallBufferFilter()); + config_helper_.prependFilter(ConfigHelper::smallBufferFilter()); // Add a second upgrade type which goes directly to the router filter. config_helper_.addConfigModifier( diff --git a/tools/base/requirements.in b/tools/base/requirements.in new file mode 100644 index 0000000000000..8ec1dbb9be567 --- /dev/null +++ b/tools/base/requirements.in @@ -0,0 +1,30 @@ +aio.functional +colorama +coloredlogs +coverage +envoy.base.utils +envoy.distribution.release +envoy.distribution.verify +envoy.gpg.sign +flake8 +frozendict +gitpython +jinja2 +pep8-naming +pygithub +pyreadline +pytest +pytest-asyncio +pytest-cov +pytest-patches +pyyaml +setuptools +sphinx +sphinxcontrib-httpdomain +sphinxcontrib-serializinghtml +sphinx-copybutton +sphinxext-rediraffe +sphinx-rtd-theme +sphinx-tabs +verboselogs +yapf diff --git a/tools/base/requirements.txt b/tools/base/requirements.txt index 658174e0bc9ff..8d18996322e79 100644 --- a/tools/base/requirements.txt +++ b/tools/base/requirements.txt @@ -2,32 +2,566 @@ # This file is autogenerated by pip-compile # To update, run: # -# pip-compile --allow-unsafe --generate-hashes tools/base/requirements.txt +# pip-compile --allow-unsafe --generate-hashes tools/base/requirements.in # abstracts==0.0.12 \ --hash=sha256:acc01ff56c8a05fb88150dff62e295f9071fc33388c42f1dfc2787a8d1c755ff - # via aio.functional + # via + # aio.functional + # envoy.abstract.command + # envoy.base.utils + # envoy.github.abstract + # envoy.github.release aio.functional==0.0.9 \ --hash=sha256:824a997a394ad891bc9f403426babc13c9d0d1f4d1708c38e77d6aecae1cab1d - # via -r tools/base/requirements.txt + # via + # -r tools/base/requirements.in + # aio.tasks + # envoy.github.abstract + # envoy.github.release +aio.stream==0.0.2 \ + --hash=sha256:6f5baaff48f6319db134cd56c06ccf89db1f7c5f67a26382e081efc96f2f675d + # via envoy.github.release +aio.tasks==0.0.4 \ + --hash=sha256:9abd4b0881edb292c4f91a2f63b1dea7a9829a4bd4e8440225a1a412a90461fc + # via + # envoy.github.abstract + # envoy.github.release +aiodocker==0.21.0 \ + --hash=sha256:1f2e6db6377195962bb676d4822f6e3a0c525e1b5d60b8ebbab68230bff3d227 \ + --hash=sha256:6fe00135bb7dc40a407669d3157ecdfd856f3737d939df54f40a479d40cf7bdc + # via + # envoy.distribution.distrotest + # envoy.docker.utils +aiofiles==0.7.0 \ + --hash=sha256:a1c4fc9b2ff81568c83e21392a82f344ea9d23da906e4f6a52662764545e19d4 \ + --hash=sha256:c67a6823b5f23fcab0a2595a289cec7d8c863ffcb4322fb8cd6b90400aedfdbc + # via aio.stream +aiohttp==3.7.4.post0 \ + --hash=sha256:02f46fc0e3c5ac58b80d4d56eb0a7c7d97fcef69ace9326289fb9f1955e65cfe \ + --hash=sha256:0563c1b3826945eecd62186f3f5c7d31abb7391fedc893b7e2b26303b5a9f3fe \ + --hash=sha256:114b281e4d68302a324dd33abb04778e8557d88947875cbf4e842c2c01a030c5 \ + --hash=sha256:14762875b22d0055f05d12abc7f7d61d5fd4fe4642ce1a249abdf8c700bf1fd8 \ + --hash=sha256:15492a6368d985b76a2a5fdd2166cddfea5d24e69eefed4630cbaae5c81d89bd \ + --hash=sha256:17c073de315745a1510393a96e680d20af8e67e324f70b42accbd4cb3315c9fb \ + --hash=sha256:209b4a8ee987eccc91e2bd3ac36adee0e53a5970b8ac52c273f7f8fd4872c94c \ + --hash=sha256:230a8f7e24298dea47659251abc0fd8b3c4e38a664c59d4b89cca7f6c09c9e87 \ + --hash=sha256:2e19413bf84934d651344783c9f5e22dee452e251cfd220ebadbed2d9931dbf0 \ + --hash=sha256:393f389841e8f2dfc86f774ad22f00923fdee66d238af89b70ea314c4aefd290 \ + --hash=sha256:3cf75f7cdc2397ed4442594b935a11ed5569961333d49b7539ea741be2cc79d5 \ + --hash=sha256:3d78619672183be860b96ed96f533046ec97ca067fd46ac1f6a09cd9b7484287 \ + --hash=sha256:40eced07f07a9e60e825554a31f923e8d3997cfc7fb31dbc1328c70826e04cde \ + --hash=sha256:493d3299ebe5f5a7c66b9819eacdcfbbaaf1a8e84911ddffcdc48888497afecf \ + --hash=sha256:4b302b45040890cea949ad092479e01ba25911a15e648429c7c5aae9650c67a8 \ + --hash=sha256:515dfef7f869a0feb2afee66b957cc7bbe9ad0cdee45aec7fdc623f4ecd4fb16 \ + --hash=sha256:547da6cacac20666422d4882cfcd51298d45f7ccb60a04ec27424d2f36ba3eaf \ + --hash=sha256:5df68496d19f849921f05f14f31bd6ef53ad4b00245da3195048c69934521809 \ + --hash=sha256:64322071e046020e8797117b3658b9c2f80e3267daec409b350b6a7a05041213 \ + --hash=sha256:7615dab56bb07bff74bc865307aeb89a8bfd9941d2ef9d817b9436da3a0ea54f \ + --hash=sha256:79ebfc238612123a713a457d92afb4096e2148be17df6c50fb9bf7a81c2f8013 \ + --hash=sha256:7b18b97cf8ee5452fa5f4e3af95d01d84d86d32c5e2bfa260cf041749d66360b \ + --hash=sha256:932bb1ea39a54e9ea27fc9232163059a0b8855256f4052e776357ad9add6f1c9 \ + --hash=sha256:a00bb73540af068ca7390e636c01cbc4f644961896fa9363154ff43fd37af2f5 \ + --hash=sha256:a5ca29ee66f8343ed336816c553e82d6cade48a3ad702b9ffa6125d187e2dedb \ + --hash=sha256:af9aa9ef5ba1fd5b8c948bb11f44891968ab30356d65fd0cc6707d989cd521df \ + --hash=sha256:bb437315738aa441251214dad17428cafda9cdc9729499f1d6001748e1d432f4 \ + --hash=sha256:bdb230b4943891321e06fc7def63c7aace16095be7d9cf3b1e01be2f10fba439 \ + --hash=sha256:c6e9dcb4cb338d91a73f178d866d051efe7c62a7166653a91e7d9fb18274058f \ + --hash=sha256:cffe3ab27871bc3ea47df5d8f7013945712c46a3cc5a95b6bee15887f1675c22 \ + --hash=sha256:d012ad7911653a906425d8473a1465caa9f8dea7fcf07b6d870397b774ea7c0f \ + --hash=sha256:d9e13b33afd39ddeb377eff2c1c4f00544e191e1d1dee5b6c51ddee8ea6f0cf5 \ + --hash=sha256:e4b2b334e68b18ac9817d828ba44d8fcb391f6acb398bcc5062b14b2cbeac970 \ + --hash=sha256:e54962802d4b8b18b6207d4a927032826af39395a3bd9196a5af43fc4e60b009 \ + --hash=sha256:f705e12750171c0ab4ef2a3c76b9a4024a62c4103e3a55dd6f99265b9bc6fcfc \ + --hash=sha256:f881853d2643a29e643609da57b96d5f9c9b93f62429dcc1cbb413c7d07f0e1a \ + --hash=sha256:fe60131d21b31fd1a14bd43e6bb88256f69dfc3188b3a89d736d6c71ed43ec95 + # via + # aio.stream + # aiodocker + # envoy.github.abstract + # envoy.github.release +alabaster==0.7.12 \ + --hash=sha256:446438bdcca0e05bd45ea2de1668c1d9b032e1a9154c2c259092d77031ddd359 \ + --hash=sha256:a661d72d58e6ea8a57f7a86e37d86716863ee5e92788398526d58b26a4e4dc02 + # via sphinx +async-timeout==3.0.1 \ + --hash=sha256:0c3c816a028d47f659d6ff5c745cb2acf1f966da1fe5c19c77a70282b25f4c5f \ + --hash=sha256:4291ca197d287d274d0b6cb5d6f8f8f82d434ed288f962539ff18cc9012f9ea3 + # via aiohttp +attrs==21.2.0 \ + --hash=sha256:149e90d6d8ac20db7a955ad60cf0e6881a3f20d37096140088356da6c716b0b1 \ + --hash=sha256:ef6aaac3ca6cd92904cdd0d83f629a15f18053ec84e6432106f7a4d04ae4f5fb + # via + # aiohttp + # pytest +babel==2.9.1 \ + --hash=sha256:ab49e12b91d937cd11f0b67cb259a57ab4ad2b59ac7a3b41d6c06c0ac5b0def9 \ + --hash=sha256:bc0c176f9f6a994582230df350aa6e05ba2ebe4b3ac317eab29d9be5d2768da0 + # via sphinx +certifi==2021.5.30 \ + --hash=sha256:2bbf76fd432960138b3ef6dda3dde0544f27cbf8546c458e60baf371917ba9ee \ + --hash=sha256:50b1e4f8446b06f41be7dd6338db18e0990601dce795c2b1686458aa7e8fa7d8 + # via requests +cffi==1.14.6 \ + --hash=sha256:06c54a68935738d206570b20da5ef2b6b6d92b38ef3ec45c5422c0ebaf338d4d \ + --hash=sha256:0c0591bee64e438883b0c92a7bed78f6290d40bf02e54c5bf0978eaf36061771 \ + --hash=sha256:19ca0dbdeda3b2615421d54bef8985f72af6e0c47082a8d26122adac81a95872 \ + --hash=sha256:22b9c3c320171c108e903d61a3723b51e37aaa8c81255b5e7ce102775bd01e2c \ + --hash=sha256:26bb2549b72708c833f5abe62b756176022a7b9a7f689b571e74c8478ead51dc \ + --hash=sha256:33791e8a2dc2953f28b8d8d300dde42dd929ac28f974c4b4c6272cb2955cb762 \ + --hash=sha256:3c8d896becff2fa653dc4438b54a5a25a971d1f4110b32bd3068db3722c80202 \ + --hash=sha256:4373612d59c404baeb7cbd788a18b2b2a8331abcc84c3ba40051fcd18b17a4d5 \ + --hash=sha256:487d63e1454627c8e47dd230025780e91869cfba4c753a74fda196a1f6ad6548 \ + --hash=sha256:48916e459c54c4a70e52745639f1db524542140433599e13911b2f329834276a \ + --hash=sha256:4922cd707b25e623b902c86188aca466d3620892db76c0bdd7b99a3d5e61d35f \ + --hash=sha256:55af55e32ae468e9946f741a5d51f9896da6b9bf0bbdd326843fec05c730eb20 \ + --hash=sha256:57e555a9feb4a8460415f1aac331a2dc833b1115284f7ded7278b54afc5bd218 \ + --hash=sha256:5d4b68e216fc65e9fe4f524c177b54964af043dde734807586cf5435af84045c \ + --hash=sha256:64fda793737bc4037521d4899be780534b9aea552eb673b9833b01f945904c2e \ + --hash=sha256:6d6169cb3c6c2ad50db5b868db6491a790300ade1ed5d1da29289d73bbe40b56 \ + --hash=sha256:7bcac9a2b4fdbed2c16fa5681356d7121ecabf041f18d97ed5b8e0dd38a80224 \ + --hash=sha256:80b06212075346b5546b0417b9f2bf467fea3bfe7352f781ffc05a8ab24ba14a \ + --hash=sha256:818014c754cd3dba7229c0f5884396264d51ffb87ec86e927ef0be140bfdb0d2 \ + --hash=sha256:8eb687582ed7cd8c4bdbff3df6c0da443eb89c3c72e6e5dcdd9c81729712791a \ + --hash=sha256:99f27fefe34c37ba9875f224a8f36e31d744d8083e00f520f133cab79ad5e819 \ + --hash=sha256:9f3e33c28cd39d1b655ed1ba7247133b6f7fc16fa16887b120c0c670e35ce346 \ + --hash=sha256:a8661b2ce9694ca01c529bfa204dbb144b275a31685a075ce123f12331be790b \ + --hash=sha256:a9da7010cec5a12193d1af9872a00888f396aba3dc79186604a09ea3ee7c029e \ + --hash=sha256:aedb15f0a5a5949ecb129a82b72b19df97bbbca024081ed2ef88bd5c0a610534 \ + --hash=sha256:b315d709717a99f4b27b59b021e6207c64620790ca3e0bde636a6c7f14618abb \ + --hash=sha256:ba6f2b3f452e150945d58f4badd92310449876c4c954836cfb1803bdd7b422f0 \ + --hash=sha256:c33d18eb6e6bc36f09d793c0dc58b0211fccc6ae5149b808da4a62660678b156 \ + --hash=sha256:c9a875ce9d7fe32887784274dd533c57909b7b1dcadcc128a2ac21331a9765dd \ + --hash=sha256:c9e005e9bd57bc987764c32a1bee4364c44fdc11a3cc20a40b93b444984f2b87 \ + --hash=sha256:d2ad4d668a5c0645d281dcd17aff2be3212bc109b33814bbb15c4939f44181cc \ + --hash=sha256:d950695ae4381ecd856bcaf2b1e866720e4ab9a1498cba61c602e56630ca7195 \ + --hash=sha256:e22dcb48709fc51a7b58a927391b23ab37eb3737a98ac4338e2448bef8559b33 \ + --hash=sha256:e8c6a99be100371dbb046880e7a282152aa5d6127ae01783e37662ef73850d8f \ + --hash=sha256:e9dc245e3ac69c92ee4c167fbdd7428ec1956d4e754223124991ef29eb57a09d \ + --hash=sha256:eb687a11f0a7a1839719edd80f41e459cc5366857ecbed383ff376c4e3cc6afd \ + --hash=sha256:eb9e2a346c5238a30a746893f23a9535e700f8192a68c07c0258e7ece6ff3728 \ + --hash=sha256:ed38b924ce794e505647f7c331b22a693bee1538fdf46b0222c4717b42f744e7 \ + --hash=sha256:f0010c6f9d1a4011e429109fda55a225921e3206e7f62a0c22a35344bfd13cca \ + --hash=sha256:f0c5d1acbfca6ebdd6b1e3eded8d261affb6ddcf2186205518f1428b8569bb99 \ + --hash=sha256:f10afb1004f102c7868ebfe91c28f4a712227fe4cb24974350ace1f90e1febbf \ + --hash=sha256:f174135f5609428cc6e1b9090f9268f5c8935fddb1b25ccb8255a2d50de6789e \ + --hash=sha256:f3ebe6e73c319340830a9b2825d32eb6d8475c1dac020b4f0aa774ee3b898d1c \ + --hash=sha256:f627688813d0a4140153ff532537fbe4afea5a3dffce1f9deb7f91f848a832b5 \ + --hash=sha256:fd4305f86f53dfd8cd3522269ed7fc34856a8ee3709a5e28b2836b2db9d4cd69 + # via + # cryptography + # pynacl +chardet==4.0.0 \ + --hash=sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa \ + --hash=sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5 + # via aiohttp +charset-normalizer==2.0.4 \ + --hash=sha256:0c8911edd15d19223366a194a513099a302055a962bca2cec0f54b8b63175d8b \ + --hash=sha256:f23667ebe1084be45f6ae0538e4a5a865206544097e4e8bbcacf42cd02a348f3 + # via requests colorama==0.4.4 \ --hash=sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b \ --hash=sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2 - # via -r tools/base/requirements.txt + # via -r tools/base/requirements.in coloredlogs==15.0.1 \ --hash=sha256:612ee75c546f53e92e70049c9dbfcc18c935a2b9a53b66085ce9ef6a6e5c0934 \ --hash=sha256:7c991aa71a4577af2f82600d8f8f3a89f936baeaf9b50a9c197da014e5bf16b0 - # via -r tools/base/requirements.txt + # via + # -r tools/base/requirements.in + # envoy.base.runner +coverage==5.5 \ + --hash=sha256:004d1880bed2d97151facef49f08e255a20ceb6f9432df75f4eef018fdd5a78c \ + --hash=sha256:01d84219b5cdbfc8122223b39a954820929497a1cb1422824bb86b07b74594b6 \ + --hash=sha256:040af6c32813fa3eae5305d53f18875bedd079960822ef8ec067a66dd8afcd45 \ + --hash=sha256:06191eb60f8d8a5bc046f3799f8a07a2d7aefb9504b0209aff0b47298333302a \ + --hash=sha256:13034c4409db851670bc9acd836243aeee299949bd5673e11844befcb0149f03 \ + --hash=sha256:13c4ee887eca0f4c5a247b75398d4114c37882658300e153113dafb1d76de529 \ + --hash=sha256:184a47bbe0aa6400ed2d41d8e9ed868b8205046518c52464fde713ea06e3a74a \ + --hash=sha256:18ba8bbede96a2c3dde7b868de9dcbd55670690af0988713f0603f037848418a \ + --hash=sha256:1aa846f56c3d49205c952d8318e76ccc2ae23303351d9270ab220004c580cfe2 \ + --hash=sha256:217658ec7187497e3f3ebd901afdca1af062b42cfe3e0dafea4cced3983739f6 \ + --hash=sha256:24d4a7de75446be83244eabbff746d66b9240ae020ced65d060815fac3423759 \ + --hash=sha256:2910f4d36a6a9b4214bb7038d537f015346f413a975d57ca6b43bf23d6563b53 \ + --hash=sha256:2949cad1c5208b8298d5686d5a85b66aae46d73eec2c3e08c817dd3513e5848a \ + --hash=sha256:2a3859cb82dcbda1cfd3e6f71c27081d18aa251d20a17d87d26d4cd216fb0af4 \ + --hash=sha256:2cafbbb3af0733db200c9b5f798d18953b1a304d3f86a938367de1567f4b5bff \ + --hash=sha256:2e0d881ad471768bf6e6c2bf905d183543f10098e3b3640fc029509530091502 \ + --hash=sha256:30c77c1dc9f253283e34c27935fded5015f7d1abe83bc7821680ac444eaf7793 \ + --hash=sha256:3487286bc29a5aa4b93a072e9592f22254291ce96a9fbc5251f566b6b7343cdb \ + --hash=sha256:372da284cfd642d8e08ef606917846fa2ee350f64994bebfbd3afb0040436905 \ + --hash=sha256:41179b8a845742d1eb60449bdb2992196e211341818565abded11cfa90efb821 \ + --hash=sha256:44d654437b8ddd9eee7d1eaee28b7219bec228520ff809af170488fd2fed3e2b \ + --hash=sha256:4a7697d8cb0f27399b0e393c0b90f0f1e40c82023ea4d45d22bce7032a5d7b81 \ + --hash=sha256:51cb9476a3987c8967ebab3f0fe144819781fca264f57f89760037a2ea191cb0 \ + --hash=sha256:52596d3d0e8bdf3af43db3e9ba8dcdaac724ba7b5ca3f6358529d56f7a166f8b \ + --hash=sha256:53194af30d5bad77fcba80e23a1441c71abfb3e01192034f8246e0d8f99528f3 \ + --hash=sha256:5fec2d43a2cc6965edc0bb9e83e1e4b557f76f843a77a2496cbe719583ce8184 \ + --hash=sha256:6c90e11318f0d3c436a42409f2749ee1a115cd8b067d7f14c148f1ce5574d701 \ + --hash=sha256:74d881fc777ebb11c63736622b60cb9e4aee5cace591ce274fb69e582a12a61a \ + --hash=sha256:7501140f755b725495941b43347ba8a2777407fc7f250d4f5a7d2a1050ba8e82 \ + --hash=sha256:796c9c3c79747146ebd278dbe1e5c5c05dd6b10cc3bcb8389dfdf844f3ead638 \ + --hash=sha256:869a64f53488f40fa5b5b9dcb9e9b2962a66a87dab37790f3fcfb5144b996ef5 \ + --hash=sha256:8963a499849a1fc54b35b1c9f162f4108017b2e6db2c46c1bed93a72262ed083 \ + --hash=sha256:8d0a0725ad7c1a0bcd8d1b437e191107d457e2ec1084b9f190630a4fb1af78e6 \ + --hash=sha256:900fbf7759501bc7807fd6638c947d7a831fc9fdf742dc10f02956ff7220fa90 \ + --hash=sha256:92b017ce34b68a7d67bd6d117e6d443a9bf63a2ecf8567bb3d8c6c7bc5014465 \ + --hash=sha256:970284a88b99673ccb2e4e334cfb38a10aab7cd44f7457564d11898a74b62d0a \ + --hash=sha256:972c85d205b51e30e59525694670de6a8a89691186012535f9d7dbaa230e42c3 \ + --hash=sha256:9a1ef3b66e38ef8618ce5fdc7bea3d9f45f3624e2a66295eea5e57966c85909e \ + --hash=sha256:af0e781009aaf59e25c5a678122391cb0f345ac0ec272c7961dc5455e1c40066 \ + --hash=sha256:b6d534e4b2ab35c9f93f46229363e17f63c53ad01330df9f2d6bd1187e5eaacf \ + --hash=sha256:b7895207b4c843c76a25ab8c1e866261bcfe27bfaa20c192de5190121770672b \ + --hash=sha256:c0891a6a97b09c1f3e073a890514d5012eb256845c451bd48f7968ef939bf4ae \ + --hash=sha256:c2723d347ab06e7ddad1a58b2a821218239249a9e4365eaff6649d31180c1669 \ + --hash=sha256:d1f8bf7b90ba55699b3a5e44930e93ff0189aa27186e96071fac7dd0d06a1873 \ + --hash=sha256:d1f9ce122f83b2305592c11d64f181b87153fc2c2bbd3bb4a3dde8303cfb1a6b \ + --hash=sha256:d314ed732c25d29775e84a960c3c60808b682c08d86602ec2c3008e1202e3bb6 \ + --hash=sha256:d636598c8305e1f90b439dbf4f66437de4a5e3c31fdf47ad29542478c8508bbb \ + --hash=sha256:deee1077aae10d8fa88cb02c845cfba9b62c55e1183f52f6ae6a2df6a2187160 \ + --hash=sha256:ebe78fe9a0e874362175b02371bdfbee64d8edc42a044253ddf4ee7d3c15212c \ + --hash=sha256:f030f8873312a16414c0d8e1a1ddff2d3235655a2174e3648b4fa66b3f2f1079 \ + --hash=sha256:f0b278ce10936db1a37e6954e15a3730bea96a0997c26d7fee88e6c396c2086d \ + --hash=sha256:f11642dddbb0253cc8853254301b51390ba0081750a8ac03f20ea8103f0c56b6 + # via + # -r tools/base/requirements.in + # pytest-cov +cryptography==3.4.8 \ + --hash=sha256:0a7dcbcd3f1913f664aca35d47c1331fce738d44ec34b7be8b9d332151b0b01e \ + --hash=sha256:1eb7bb0df6f6f583dd8e054689def236255161ebbcf62b226454ab9ec663746b \ + --hash=sha256:21ca464b3a4b8d8e86ba0ee5045e103a1fcfac3b39319727bc0fc58c09c6aff7 \ + --hash=sha256:34dae04a0dce5730d8eb7894eab617d8a70d0c97da76b905de9efb7128ad7085 \ + --hash=sha256:3520667fda779eb788ea00080124875be18f2d8f0848ec00733c0ec3bb8219fc \ + --hash=sha256:3fa3a7ccf96e826affdf1a0a9432be74dc73423125c8f96a909e3835a5ef194a \ + --hash=sha256:5b0fbfae7ff7febdb74b574055c7466da334a5371f253732d7e2e7525d570498 \ + --hash=sha256:8695456444f277af73a4877db9fc979849cd3ee74c198d04fc0776ebc3db52b9 \ + --hash=sha256:94cc5ed4ceaefcbe5bf38c8fba6a21fc1d365bb8fb826ea1688e3370b2e24a1c \ + --hash=sha256:94fff993ee9bc1b2440d3b7243d488c6a3d9724cc2b09cdb297f6a886d040ef7 \ + --hash=sha256:9965c46c674ba8cc572bc09a03f4c649292ee73e1b683adb1ce81e82e9a6a0fb \ + --hash=sha256:a00cf305f07b26c351d8d4e1af84ad7501eca8a342dedf24a7acb0e7b7406e14 \ + --hash=sha256:a305600e7a6b7b855cd798e00278161b681ad6e9b7eca94c721d5f588ab212af \ + --hash=sha256:cd65b60cfe004790c795cc35f272e41a3df4631e2fb6b35aa7ac6ef2859d554e \ + --hash=sha256:d2a6e5ef66503da51d2110edf6c403dc6b494cc0082f85db12f54e9c5d4c3ec5 \ + --hash=sha256:d9ec0e67a14f9d1d48dd87a2531009a9b251c02ea42851c060b25c782516ff06 \ + --hash=sha256:f44d141b8c4ea5eb4dbc9b3ad992d45580c1d22bf5e24363f2fbf50c2d7ae8a7 + # via pyjwt +deprecated==1.2.13 \ + --hash=sha256:43ac5335da90c31c24ba028af536a91d41d53f9e6901ddb021bcc572ce44e38d \ + --hash=sha256:64756e3e14c8c5eea9795d93c524551432a0be75629f8f29e67ab8caf076c76d + # via pygithub +docutils==0.16 \ + --hash=sha256:0c5b78adfbf7762415433f5515cd5c9e762339e23369dbe8000d84a4bf4ab3af \ + --hash=sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc + # via + # sphinx + # sphinx-rtd-theme + # sphinx-tabs +envoy.abstract.command==0.0.3 \ + --hash=sha256:4b7b15c91bea1f2eb7c2e8e35f95cd9437e1c8f151adc093bf7858fc85d48221 + # via + # envoy.base.runner + # envoy.distribution.release +envoy.base.checker==0.0.2 \ + --hash=sha256:2ac81efa20fd01fff644ff7dc7fadeac1c3e4dbb6210881ac7a7919ec0e048d8 + # via + # envoy.distribution.distrotest + # envoy.distribution.verify +envoy.base.runner==0.0.4 \ + --hash=sha256:4eeb2b661f1f0c402df4425852be554a8a83ef5d338bfae69ddcb9b90755379e + # via + # envoy.base.checker + # envoy.distribution.release + # envoy.github.abstract + # envoy.gpg.sign +envoy.base.utils==0.0.8 \ + --hash=sha256:b82e18ab0535207b7136d6980239c9350f7113fa5da7dda781bcb6ad1e05b3ab + # via + # -r tools/base/requirements.in + # envoy.distribution.distrotest + # envoy.github.release + # envoy.gpg.sign +envoy.distribution.distrotest==0.0.3 \ + --hash=sha256:c094adbd959eb1336f93afc00aedb7ee4e68e8252e2365be816a6f9ede8a3de7 + # via envoy.distribution.verify +envoy.distribution.release==0.0.4 \ + --hash=sha256:41037e0488f0593ce5173739fe0cd1b45a4775f5a47738b85d9d04024ca241a2 + # via -r tools/base/requirements.in +envoy.distribution.verify==0.0.2 \ + --hash=sha256:ae59134085de50203edf51c243dbf3301cbe5550db29f0ec6f9ea1c3b82fee1c + # via -r tools/base/requirements.in +envoy.docker.utils==0.0.2 \ + --hash=sha256:a12cb57f0b6e204d646cbf94f927b3a8f5a27ed15f60d0576176584ec16a4b76 + # via envoy.distribution.distrotest +envoy.github.abstract==0.0.16 \ + --hash=sha256:badf04104492fb6b37ba2163f2b225132ed04aba680beb218e7c7d918564f8ee + # via + # envoy.distribution.release + # envoy.github.release +envoy.github.release==0.0.8 \ + --hash=sha256:fbc4354030137eb565b8c4d679965e4ef60b01de0c09310441836e592ca0cd19 + # via envoy.distribution.release +envoy.gpg.identity==0.0.2 \ + --hash=sha256:7d32ff9133e00b9974b4dabd2512b4872b091b8c5069d0112240dcc1a56bc406 + # via envoy.gpg.sign +envoy.gpg.sign==0.0.3 \ + --hash=sha256:31667931f5d7ff05fd809b89748f277511486311c777652af4cb8889bd641049 + # via -r tools/base/requirements.in +flake8-polyfill==1.0.2 \ + --hash=sha256:12be6a34ee3ab795b19ca73505e7b55826d5f6ad7230d31b18e106400169b9e9 \ + --hash=sha256:e44b087597f6da52ec6393a709e7108b2905317d0c0b744cdca6208e670d8eda + # via pep8-naming +flake8==3.9.2 \ + --hash=sha256:07528381786f2a6237b061f6e96610a4167b226cb926e2aa2b6b1d78057c576b \ + --hash=sha256:bf8fd333346d844f616e8d47905ef3a3384edae6b4e9beb0c5101e25e3110907 + # via + # -r tools/base/requirements.in + # flake8-polyfill + # pep8-naming frozendict==2.0.6 \ --hash=sha256:3f00de72805cf4c9e81b334f3f04809278b967d2fed84552313a0fcce511beb1 \ --hash=sha256:5d3f75832c35d4df041f0e19c268964cbef29c1eb34cd3517cf883f1c2d089b9 - # via -r tools/base/requirements.txt + # via + # -r tools/base/requirements.in + # envoy.base.runner +gidgethub==5.0.1 \ + --hash=sha256:3efbd6998600254ec7a2869318bd3ffde38edc3a0d37be0c14bc46b45947b682 \ + --hash=sha256:67245e93eb0918b37df038148af675df43b62e832c529d7f859f6b90d9f3e70d + # via + # envoy.github.abstract + # envoy.github.release +gitdb==4.0.7 \ + --hash=sha256:6c4cc71933456991da20917998acbe6cf4fb41eeaab7d6d67fbc05ecd4c865b0 \ + --hash=sha256:96bf5c08b157a666fec41129e6d327235284cca4c81e92109260f353ba138005 + # via gitpython +gitpython==3.1.18 \ + --hash=sha256:b838a895977b45ab6f0cc926a9045c8d1c44e2b653c1fcc39fe91f42c6e8f05b \ + --hash=sha256:fce760879cd2aebd2991b3542876dc5c4a909b30c9d69dfc488e504a8db37ee8 + # via -r tools/base/requirements.in humanfriendly==9.2 \ --hash=sha256:332da98c24cc150efcc91b5508b19115209272bfdf4b0764a56795932f854271 \ --hash=sha256:f7dba53ac7935fd0b4a2fc9a29e316ddd9ea135fb3052d3d0279d10c18ff9c48 + # via coloredlogs +idna==3.2 \ + --hash=sha256:14475042e284991034cb48e06f6851428fb14c4dc953acd9be9a5e95c7b6dd7a \ + --hash=sha256:467fbad99067910785144ce333826c71fb0e63a425657295239737f7ecd125f3 + # via + # requests + # yarl +imagesize==1.2.0 \ + --hash=sha256:6965f19a6a2039c7d48bca7dba2473069ff854c36ae6f19d2cde309d998228a1 \ + --hash=sha256:b1f6b5a4eab1f73479a50fb79fcf729514a900c341d8503d62a62dbc4127a2b1 + # via sphinx +iniconfig==1.1.1 \ + --hash=sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3 \ + --hash=sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32 + # via pytest +jinja2==3.0.1 \ + --hash=sha256:1f06f2da51e7b56b8f238affdd6b4e2c61e39598a378cc49345bc1bd42a978a4 \ + --hash=sha256:703f484b47a6af502e743c9122595cc812b0271f661722403114f71a79d0f5a4 + # via + # -r tools/base/requirements.in + # sphinx +markupsafe==2.0.1 \ + --hash=sha256:01a9b8ea66f1658938f65b93a85ebe8bc016e6769611be228d797c9d998dd298 \ + --hash=sha256:023cb26ec21ece8dc3907c0e8320058b2e0cb3c55cf9564da612bc325bed5e64 \ + --hash=sha256:0446679737af14f45767963a1a9ef7620189912317d095f2d9ffa183a4d25d2b \ + --hash=sha256:0717a7390a68be14b8c793ba258e075c6f4ca819f15edfc2a3a027c823718567 \ + --hash=sha256:0955295dd5eec6cb6cc2fe1698f4c6d84af2e92de33fbcac4111913cd100a6ff \ + --hash=sha256:0d4b31cc67ab36e3392bbf3862cfbadac3db12bdd8b02a2731f509ed5b829724 \ + --hash=sha256:10f82115e21dc0dfec9ab5c0223652f7197feb168c940f3ef61563fc2d6beb74 \ + --hash=sha256:168cd0a3642de83558a5153c8bd34f175a9a6e7f6dc6384b9655d2697312a646 \ + --hash=sha256:1d609f577dc6e1aa17d746f8bd3c31aa4d258f4070d61b2aa5c4166c1539de35 \ + --hash=sha256:1f2ade76b9903f39aa442b4aadd2177decb66525062db244b35d71d0ee8599b6 \ + --hash=sha256:2a7d351cbd8cfeb19ca00de495e224dea7e7d919659c2841bbb7f420ad03e2d6 \ + --hash=sha256:2d7d807855b419fc2ed3e631034685db6079889a1f01d5d9dac950f764da3dad \ + --hash=sha256:2ef54abee730b502252bcdf31b10dacb0a416229b72c18b19e24a4509f273d26 \ + --hash=sha256:36bc903cbb393720fad60fc28c10de6acf10dc6cc883f3e24ee4012371399a38 \ + --hash=sha256:37205cac2a79194e3750b0af2a5720d95f786a55ce7df90c3af697bfa100eaac \ + --hash=sha256:3c112550557578c26af18a1ccc9e090bfe03832ae994343cfdacd287db6a6ae7 \ + --hash=sha256:3dd007d54ee88b46be476e293f48c85048603f5f516008bee124ddd891398ed6 \ + --hash=sha256:47ab1e7b91c098ab893b828deafa1203de86d0bc6ab587b160f78fe6c4011f75 \ + --hash=sha256:49e3ceeabbfb9d66c3aef5af3a60cc43b85c33df25ce03d0031a608b0a8b2e3f \ + --hash=sha256:4efca8f86c54b22348a5467704e3fec767b2db12fc39c6d963168ab1d3fc9135 \ + --hash=sha256:53edb4da6925ad13c07b6d26c2a852bd81e364f95301c66e930ab2aef5b5ddd8 \ + --hash=sha256:5855f8438a7d1d458206a2466bf82b0f104a3724bf96a1c781ab731e4201731a \ + --hash=sha256:594c67807fb16238b30c44bdf74f36c02cdf22d1c8cda91ef8a0ed8dabf5620a \ + --hash=sha256:5bb28c636d87e840583ee3adeb78172efc47c8b26127267f54a9c0ec251d41a9 \ + --hash=sha256:60bf42e36abfaf9aff1f50f52644b336d4f0a3fd6d8a60ca0d054ac9f713a864 \ + --hash=sha256:611d1ad9a4288cf3e3c16014564df047fe08410e628f89805e475368bd304914 \ + --hash=sha256:6557b31b5e2c9ddf0de32a691f2312a32f77cd7681d8af66c2692efdbef84c18 \ + --hash=sha256:693ce3f9e70a6cf7d2fb9e6c9d8b204b6b39897a2c4a1aa65728d5ac97dcc1d8 \ + --hash=sha256:6a7fae0dd14cf60ad5ff42baa2e95727c3d81ded453457771d02b7d2b3f9c0c2 \ + --hash=sha256:6c4ca60fa24e85fe25b912b01e62cb969d69a23a5d5867682dd3e80b5b02581d \ + --hash=sha256:6fcf051089389abe060c9cd7caa212c707e58153afa2c649f00346ce6d260f1b \ + --hash=sha256:7d91275b0245b1da4d4cfa07e0faedd5b0812efc15b702576d103293e252af1b \ + --hash=sha256:905fec760bd2fa1388bb5b489ee8ee5f7291d692638ea5f67982d968366bef9f \ + --hash=sha256:97383d78eb34da7e1fa37dd273c20ad4320929af65d156e35a5e2d89566d9dfb \ + --hash=sha256:984d76483eb32f1bcb536dc27e4ad56bba4baa70be32fa87152832cdd9db0833 \ + --hash=sha256:99df47edb6bda1249d3e80fdabb1dab8c08ef3975f69aed437cb69d0a5de1e28 \ + --hash=sha256:a30e67a65b53ea0a5e62fe23682cfe22712e01f453b95233b25502f7c61cb415 \ + --hash=sha256:ab3ef638ace319fa26553db0624c4699e31a28bb2a835c5faca8f8acf6a5a902 \ + --hash=sha256:add36cb2dbb8b736611303cd3bfcee00afd96471b09cda130da3581cbdc56a6d \ + --hash=sha256:b2f4bf27480f5e5e8ce285a8c8fd176c0b03e93dcc6646477d4630e83440c6a9 \ + --hash=sha256:b7f2d075102dc8c794cbde1947378051c4e5180d52d276987b8d28a3bd58c17d \ + --hash=sha256:baa1a4e8f868845af802979fcdbf0bb11f94f1cb7ced4c4b8a351bb60d108145 \ + --hash=sha256:be98f628055368795d818ebf93da628541e10b75b41c559fdf36d104c5787066 \ + --hash=sha256:bf5d821ffabf0ef3533c39c518f3357b171a1651c1ff6827325e4489b0e46c3c \ + --hash=sha256:c47adbc92fc1bb2b3274c4b3a43ae0e4573d9fbff4f54cd484555edbf030baf1 \ + --hash=sha256:d7f9850398e85aba693bb640262d3611788b1f29a79f0c93c565694658f4071f \ + --hash=sha256:d8446c54dc28c01e5a2dbac5a25f071f6653e6e40f3a8818e8b45d790fe6ef53 \ + --hash=sha256:e0f138900af21926a02425cf736db95be9f4af72ba1bb21453432a07f6082134 \ + --hash=sha256:e9936f0b261d4df76ad22f8fee3ae83b60d7c3e871292cd42f40b81b70afae85 \ + --hash=sha256:f5653a225f31e113b152e56f154ccbe59eeb1c7487b39b9d9f9cdb58e6c79dc5 \ + --hash=sha256:f826e31d18b516f653fe296d967d700fddad5901ae07c622bb3705955e1faa94 \ + --hash=sha256:f8ba0e8349a38d3001fae7eadded3f6606f0da5d748ee53cc1dab1d6527b9509 \ + --hash=sha256:f9081981fe268bd86831e5c75f7de206ef275defcb82bc70740ae6dc507aee51 \ + --hash=sha256:fa130dd50c57d53368c9d59395cb5526eda596d3ffe36666cd81a44d56e48872 + # via jinja2 +mccabe==0.6.1 \ + --hash=sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42 \ + --hash=sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f + # via flake8 +multidict==5.1.0 \ + --hash=sha256:018132dbd8688c7a69ad89c4a3f39ea2f9f33302ebe567a879da8f4ca73f0d0a \ + --hash=sha256:051012ccee979b2b06be928a6150d237aec75dd6bf2d1eeeb190baf2b05abc93 \ + --hash=sha256:05c20b68e512166fddba59a918773ba002fdd77800cad9f55b59790030bab632 \ + --hash=sha256:07b42215124aedecc6083f1ce6b7e5ec5b50047afa701f3442054373a6deb656 \ + --hash=sha256:0e3c84e6c67eba89c2dbcee08504ba8644ab4284863452450520dad8f1e89b79 \ + --hash=sha256:0e929169f9c090dae0646a011c8b058e5e5fb391466016b39d21745b48817fd7 \ + --hash=sha256:1ab820665e67373de5802acae069a6a05567ae234ddb129f31d290fc3d1aa56d \ + --hash=sha256:25b4e5f22d3a37ddf3effc0710ba692cfc792c2b9edfb9c05aefe823256e84d5 \ + --hash=sha256:2e68965192c4ea61fff1b81c14ff712fc7dc15d2bd120602e4a3494ea6584224 \ + --hash=sha256:2f1a132f1c88724674271d636e6b7351477c27722f2ed789f719f9e3545a3d26 \ + --hash=sha256:37e5438e1c78931df5d3c0c78ae049092877e5e9c02dd1ff5abb9cf27a5914ea \ + --hash=sha256:3a041b76d13706b7fff23b9fc83117c7b8fe8d5fe9e6be45eee72b9baa75f348 \ + --hash=sha256:3a4f32116f8f72ecf2a29dabfb27b23ab7cdc0ba807e8459e59a93a9be9506f6 \ + --hash=sha256:46c73e09ad374a6d876c599f2328161bcd95e280f84d2060cf57991dec5cfe76 \ + --hash=sha256:46dd362c2f045095c920162e9307de5ffd0a1bfbba0a6e990b344366f55a30c1 \ + --hash=sha256:4b186eb7d6ae7c06eb4392411189469e6a820da81447f46c0072a41c748ab73f \ + --hash=sha256:54fd1e83a184e19c598d5e70ba508196fd0bbdd676ce159feb412a4a6664f952 \ + --hash=sha256:585fd452dd7782130d112f7ddf3473ffdd521414674c33876187e101b588738a \ + --hash=sha256:5cf3443199b83ed9e955f511b5b241fd3ae004e3cb81c58ec10f4fe47c7dce37 \ + --hash=sha256:6a4d5ce640e37b0efcc8441caeea8f43a06addace2335bd11151bc02d2ee31f9 \ + --hash=sha256:7df80d07818b385f3129180369079bd6934cf70469f99daaebfac89dca288359 \ + --hash=sha256:806068d4f86cb06af37cd65821554f98240a19ce646d3cd24e1c33587f313eb8 \ + --hash=sha256:830f57206cc96ed0ccf68304141fec9481a096c4d2e2831f311bde1c404401da \ + --hash=sha256:929006d3c2d923788ba153ad0de8ed2e5ed39fdbe8e7be21e2f22ed06c6783d3 \ + --hash=sha256:9436dc58c123f07b230383083855593550c4d301d2532045a17ccf6eca505f6d \ + --hash=sha256:9dd6e9b1a913d096ac95d0399bd737e00f2af1e1594a787e00f7975778c8b2bf \ + --hash=sha256:ace010325c787c378afd7f7c1ac66b26313b3344628652eacd149bdd23c68841 \ + --hash=sha256:b47a43177a5e65b771b80db71e7be76c0ba23cc8aa73eeeb089ed5219cdbe27d \ + --hash=sha256:b797515be8743b771aa868f83563f789bbd4b236659ba52243b735d80b29ed93 \ + --hash=sha256:b7993704f1a4b204e71debe6095150d43b2ee6150fa4f44d6d966ec356a8d61f \ + --hash=sha256:d5c65bdf4484872c4af3150aeebe101ba560dcfb34488d9a8ff8dbcd21079647 \ + --hash=sha256:d81eddcb12d608cc08081fa88d046c78afb1bf8107e6feab5d43503fea74a635 \ + --hash=sha256:dc862056f76443a0db4509116c5cd480fe1b6a2d45512a653f9a855cc0517456 \ + --hash=sha256:ecc771ab628ea281517e24fd2c52e8f31c41e66652d07599ad8818abaad38cda \ + --hash=sha256:f200755768dc19c6f4e2b672421e0ebb3dd54c38d5a4f262b872d8cfcc9e93b5 \ + --hash=sha256:f21756997ad8ef815d8ef3d34edd98804ab5ea337feedcd62fb52d22bf531281 \ + --hash=sha256:fc13a9524bc18b6fb6e0dbec3533ba0496bbed167c56d0aabefd965584557d80 + # via + # aiohttp + # yarl +packaging==21.0 \ + --hash=sha256:7dc96269f53a4ccec5c0670940a4281106dd0bb343f47b7471f779df49c2fbe7 \ + --hash=sha256:c86254f9220d55e31cc94d69bade760f0847da8000def4dfe1c6b872fd14ff14 # via - # -r tools/base/requirements.txt - # coloredlogs + # envoy.github.release + # pytest + # sphinx +pep8-naming==0.12.1 \ + --hash=sha256:4a8daeaeb33cfcde779309fc0c9c0a68a3bbe2ad8a8308b763c5068f86eb9f37 \ + --hash=sha256:bb2455947757d162aa4cad55dba4ce029005cd1692f2899a21d51d8630ca7841 + # via -r tools/base/requirements.in +pluggy==1.0.0 \ + --hash=sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159 \ + --hash=sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3 + # via pytest +py==1.10.0 \ + --hash=sha256:21b81bda15b66ef5e1a777a21c4dcd9c20ad3efd0b3f817e7a809035269e1bd3 \ + --hash=sha256:3b80836aa6d1feeaa108e046da6423ab8f6ceda6468545ae8d02d9d58d18818a + # via pytest +pycodestyle==2.7.0 \ + --hash=sha256:514f76d918fcc0b55c6680472f0a37970994e07bbb80725808c17089be302068 \ + --hash=sha256:c389c1d06bf7904078ca03399a4816f974a1d590090fecea0c63ec26ebaf1cef + # via flake8 +pycparser==2.20 \ + --hash=sha256:2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0 \ + --hash=sha256:7582ad22678f0fcd81102833f60ef8d0e57288b6b5fb00323d101be910e35705 + # via cffi +pyflakes==2.3.1 \ + --hash=sha256:7893783d01b8a89811dd72d7dfd4d84ff098e5eed95cfa8905b22bbffe52efc3 \ + --hash=sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db + # via flake8 +pygithub==1.55 \ + --hash=sha256:1bbfff9372047ff3f21d5cd8e07720f3dbfdaf6462fcaed9d815f528f1ba7283 \ + --hash=sha256:2caf0054ea079b71e539741ae56c5a95e073b81fa472ce222e81667381b9601b + # via -r tools/base/requirements.in +pygments==2.10.0 \ + --hash=sha256:b8e67fe6af78f492b3c4b3e2970c0624cbf08beb1e493b2c99b9fa1b67a20380 \ + --hash=sha256:f398865f7eb6874156579fdf36bc840a03cab64d1cde9e93d68f46a425ec52c6 + # via + # sphinx + # sphinx-tabs +pyjwt[crypto]==2.1.0 \ + --hash=sha256:934d73fbba91b0483d3857d1aff50e96b2a892384ee2c17417ed3203f173fca1 \ + --hash=sha256:fba44e7898bbca160a2b2b501f492824fc8382485d3a6f11ba5d0c1937ce6130 + # via + # gidgethub + # pygithub +pynacl==1.4.0 \ + --hash=sha256:06cbb4d9b2c4bd3c8dc0d267416aaed79906e7b33f114ddbf0911969794b1cc4 \ + --hash=sha256:11335f09060af52c97137d4ac54285bcb7df0cef29014a1a4efe64ac065434c4 \ + --hash=sha256:2fe0fc5a2480361dcaf4e6e7cea00e078fcda07ba45f811b167e3f99e8cff574 \ + --hash=sha256:30f9b96db44e09b3304f9ea95079b1b7316b2b4f3744fe3aaecccd95d547063d \ + --hash=sha256:4e10569f8cbed81cb7526ae137049759d2a8d57726d52c1a000a3ce366779634 \ + --hash=sha256:511d269ee845037b95c9781aa702f90ccc36036f95d0f31373a6a79bd8242e25 \ + --hash=sha256:537a7ccbea22905a0ab36ea58577b39d1fa9b1884869d173b5cf111f006f689f \ + --hash=sha256:54e9a2c849c742006516ad56a88f5c74bf2ce92c9f67435187c3c5953b346505 \ + --hash=sha256:757250ddb3bff1eecd7e41e65f7f833a8405fede0194319f87899690624f2122 \ + --hash=sha256:7757ae33dae81c300487591c68790dfb5145c7d03324000433d9a2c141f82af7 \ + --hash=sha256:7c6092102219f59ff29788860ccb021e80fffd953920c4a8653889c029b2d420 \ + --hash=sha256:8122ba5f2a2169ca5da936b2e5a511740ffb73979381b4229d9188f6dcb22f1f \ + --hash=sha256:9c4a7ea4fb81536c1b1f5cc44d54a296f96ae78c1ebd2311bd0b60be45a48d96 \ + --hash=sha256:c914f78da4953b33d4685e3cdc7ce63401247a21425c16a39760e282075ac4a6 \ + --hash=sha256:cd401ccbc2a249a47a3a1724c2918fcd04be1f7b54eb2a5a71ff915db0ac51c6 \ + --hash=sha256:d452a6746f0a7e11121e64625109bc4468fc3100452817001dbe018bb8b08514 \ + --hash=sha256:ea6841bc3a76fa4942ce00f3bda7d436fda21e2d91602b9e21b7ca9ecab8f3ff \ + --hash=sha256:f8851ab9041756003119368c1e6cd0b9c631f46d686b3904b18c0139f4419f80 + # via pygithub +pyparsing==2.4.7 \ + --hash=sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1 \ + --hash=sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b + # via packaging +pyreadline==2.1 \ + --hash=sha256:4530592fc2e85b25b1a9f79664433da09237c1a270e4d78ea5aa3a2c7229e2d1 + # via -r tools/base/requirements.in +pytest-asyncio==0.15.1 \ + --hash=sha256:2564ceb9612bbd560d19ca4b41347b54e7835c2f792c504f698e05395ed63f6f \ + --hash=sha256:3042bcdf1c5d978f6b74d96a151c4cfb9dcece65006198389ccd7e6c60eb1eea + # via -r tools/base/requirements.in +pytest-cov==2.12.1 \ + --hash=sha256:261bb9e47e65bd099c89c3edf92972865210c36813f80ede5277dceb77a4a62a \ + --hash=sha256:261ceeb8c227b726249b376b8526b600f38667ee314f910353fa318caa01f4d7 + # via -r tools/base/requirements.in +pytest-patches==0.0.3 \ + --hash=sha256:6f8cdc8641c708c4812f58ae48d410f373a6fd16cd6cc4dc4d3fb8951df9c92a + # via -r tools/base/requirements.in +pytest==6.2.5 \ + --hash=sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89 \ + --hash=sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134 + # via + # -r tools/base/requirements.in + # pytest-asyncio + # pytest-cov + # pytest-patches +python-gnupg==0.4.7 \ + --hash=sha256:2061f56b1942c29b92727bf9aecbd3cea3893acc9cccbdc7eb4604285efe4ac7 \ + --hash=sha256:3ff5b1bf5e397de6e1fe41a7c0f403dad4e242ac92b345f440eaecfb72a7ebae + # via envoy.gpg.identity +pytz==2021.1 \ + --hash=sha256:83a4a90894bf38e243cf052c8b58f381bfe9a7a483f6a9cab140bc7f702ac4da \ + --hash=sha256:eb10ce3e7736052ed3623d49975ce333bcd712c7bb19a58b9e2089d4057d0798 + # via babel pyyaml==5.4.1 \ --hash=sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf \ --hash=sha256:0f5f5786c0e09baddcd8b4b45f20a7b5d61a7e7e99846e3c799b05c7c53fa696 \ @@ -58,14 +592,169 @@ pyyaml==5.4.1 \ --hash=sha256:fd7f6999a8070df521b6384004ef42833b9bd62cfee11a09bda1079b4b704247 \ --hash=sha256:fdc842473cd33f45ff6bce46aea678a54e3d21f1b61a7750ce3c498eedfe25d6 \ --hash=sha256:fe69978f3f768926cfa37b867e3843918e012cf83f680806599ddce33c2c68b0 - # via -r tools/base/requirements.txt + # via + # -r tools/base/requirements.in + # envoy.base.utils +requests==2.26.0 \ + --hash=sha256:6c1246513ecd5ecd4528a0906f910e8f0f9c6b8ec72030dc9fd154dc1a6efd24 \ + --hash=sha256:b8aa58f8cf793ffd8782d3d8cb19e66ef36f7aba4353eec859e74678b01b07a7 + # via + # pygithub + # sphinx +six==1.16.0 \ + --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ + --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 + # via + # pynacl + # sphinxcontrib-httpdomain +smmap==4.0.0 \ + --hash=sha256:7e65386bd122d45405ddf795637b7f7d2b532e7e401d46bbe3fb49b9986d5182 \ + --hash=sha256:a9a7479e4c572e2e775c404dcd3080c8dc49f39918c2cf74913d30c4c478e3c2 + # via gitdb +snowballstemmer==2.1.0 \ + --hash=sha256:b51b447bea85f9968c13b650126a888aabd4cb4463fca868ec596826325dedc2 \ + --hash=sha256:e997baa4f2e9139951b6f4c631bad912dfd3c792467e2f03d7239464af90e914 + # via sphinx +sphinx-copybutton==0.4.0 \ + --hash=sha256:4340d33c169dac6dd82dce2c83333412aa786a42dd01a81a8decac3b130dc8b0 \ + --hash=sha256:8daed13a87afd5013c3a9af3575cc4d5bec052075ccd3db243f895c07a689386 + # via -r tools/base/requirements.in +sphinx-rtd-theme==0.5.2 \ + --hash=sha256:32bd3b5d13dc8186d7a42fc816a23d32e83a4827d7d9882948e7b837c232da5a \ + --hash=sha256:4a05bdbe8b1446d77a01e20a23ebc6777c74f43237035e76be89699308987d6f + # via -r tools/base/requirements.in +sphinx-tabs==3.2.0 \ + --hash=sha256:1e1b1846c80137bd81a78e4a69b02664b98b1e1da361beb30600b939dfc75065 \ + --hash=sha256:33137914ed9b276e6a686d7a337310ee77b1dae316fdcbce60476913a152e0a4 + # via -r tools/base/requirements.in +sphinx==4.1.2 \ + --hash=sha256:3092d929cd807926d846018f2ace47ba2f3b671b309c7a89cd3306e80c826b13 \ + --hash=sha256:46d52c6cee13fec44744b8c01ed692c18a640f6910a725cbb938bc36e8d64544 + # via + # -r tools/base/requirements.in + # sphinx-copybutton + # sphinx-rtd-theme + # sphinx-tabs + # sphinxcontrib-httpdomain + # sphinxext-rediraffe +sphinxcontrib-applehelp==1.0.2 \ + --hash=sha256:806111e5e962be97c29ec4c1e7fe277bfd19e9652fb1a4392105b43e01af885a \ + --hash=sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58 + # via sphinx +sphinxcontrib-devhelp==1.0.2 \ + --hash=sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e \ + --hash=sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4 + # via sphinx +sphinxcontrib-htmlhelp==2.0.0 \ + --hash=sha256:d412243dfb797ae3ec2b59eca0e52dac12e75a241bf0e4eb861e450d06c6ed07 \ + --hash=sha256:f5f8bb2d0d629f398bf47d0d69c07bc13b65f75a81ad9e2f71a63d4b7a2f6db2 + # via sphinx +sphinxcontrib-httpdomain==1.7.0 \ + --hash=sha256:1fb5375007d70bf180cdd1c79e741082be7aa2d37ba99efe561e1c2e3f38191e \ + --hash=sha256:ac40b4fba58c76b073b03931c7b8ead611066a6aebccafb34dc19694f4eb6335 + # via -r tools/base/requirements.in +sphinxcontrib-jsmath==1.0.1 \ + --hash=sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178 \ + --hash=sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8 + # via sphinx +sphinxcontrib-qthelp==1.0.3 \ + --hash=sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72 \ + --hash=sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6 + # via sphinx +sphinxcontrib-serializinghtml==1.1.5 \ + --hash=sha256:352a9a00ae864471d3a7ead8d7d79f5fc0b57e8b3f95e9867eb9eb28999b92fd \ + --hash=sha256:aa5f6de5dfdf809ef505c4895e51ef5c9eac17d0f287933eb49ec495280b6952 + # via + # -r tools/base/requirements.in + # sphinx +sphinxext-rediraffe==0.2.7 \ + --hash=sha256:651dcbfae5ffda9ffd534dfb8025f36120e5efb6ea1a33f5420023862b9f725d \ + --hash=sha256:9e430a52d4403847f4ffb3a8dd6dfc34a9fe43525305131f52ed899743a5fd8c + # via -r tools/base/requirements.in +toml==0.10.2 \ + --hash=sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b \ + --hash=sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f + # via + # pytest + # pytest-cov +trycast==0.3.0 \ + --hash=sha256:1b7b4c0d4b0d674770a53f34a762e52a6cd6879eb251ab21625602699920080d \ + --hash=sha256:687185b812e8d1c45f2ba841e8de7bdcdee0695dcf3464f206800505d4c65f26 + # via envoy.base.utils +typing-extensions==3.10.0.2 \ + --hash=sha256:49f75d16ff11f1cd258e1b988ccff82a3ca5570217d7ad8c5f48205dd99a677e \ + --hash=sha256:d8226d10bc02a29bcc81df19a26e56a9647f8b0a6d4a83924139f4a8b01f17b7 \ + --hash=sha256:f1d25edafde516b146ecd0613dabcc61409817af4766fbbcfb8d1ad4ec441a34 + # via + # aiodocker + # aiohttp +uritemplate==3.0.1 \ + --hash=sha256:07620c3f3f8eed1f12600845892b0e036a2420acf513c53f7de0abd911a5894f \ + --hash=sha256:5af8ad10cec94f215e3f48112de2022e1d5a37ed427fbd88652fa908f2ab7cae + # via gidgethub +urllib3==1.26.6 \ + --hash=sha256:39fb8672126159acb139a7718dd10806104dec1e2f0f6c88aab05d17df10c8d4 \ + --hash=sha256:f57b4c16c62fa2760b7e3d97c35b255512fb6b59a259730f36ba32ce9f8e342f + # via requests verboselogs==1.7 \ --hash=sha256:d63f23bf568295b95d3530c6864a0b580cec70e7ff974177dead1e4ffbc6ff49 \ --hash=sha256:e33ddedcdfdafcb3a174701150430b11b46ceb64c2a9a26198c76a156568e427 - # via -r tools/base/requirements.txt + # via + # -r tools/base/requirements.in + # envoy.base.runner + # envoy.github.abstract + # envoy.github.release +wrapt==1.12.1 \ + --hash=sha256:b62ffa81fb85f4332a4f609cab4ac40709470da05643a082ec1eb88e6d9b97d7 + # via deprecated +yapf==0.31.0 \ + --hash=sha256:408fb9a2b254c302f49db83c59f9aa0b4b0fd0ec25be3a5c51181327922ff63d \ + --hash=sha256:e3a234ba8455fe201eaa649cdac872d590089a18b661e39bbac7020978dd9c2e + # via -r tools/base/requirements.in +yarl==1.6.3 \ + --hash=sha256:00d7ad91b6583602eb9c1d085a2cf281ada267e9a197e8b7cae487dadbfa293e \ + --hash=sha256:0355a701b3998dcd832d0dc47cc5dedf3874f966ac7f870e0f3a6788d802d434 \ + --hash=sha256:15263c3b0b47968c1d90daa89f21fcc889bb4b1aac5555580d74565de6836366 \ + --hash=sha256:2ce4c621d21326a4a5500c25031e102af589edb50c09b321049e388b3934eec3 \ + --hash=sha256:31ede6e8c4329fb81c86706ba8f6bf661a924b53ba191b27aa5fcee5714d18ec \ + --hash=sha256:324ba3d3c6fee56e2e0b0d09bf5c73824b9f08234339d2b788af65e60040c959 \ + --hash=sha256:329412812ecfc94a57cd37c9d547579510a9e83c516bc069470db5f75684629e \ + --hash=sha256:4736eaee5626db8d9cda9eb5282028cc834e2aeb194e0d8b50217d707e98bb5c \ + --hash=sha256:4953fb0b4fdb7e08b2f3b3be80a00d28c5c8a2056bb066169de00e6501b986b6 \ + --hash=sha256:4c5bcfc3ed226bf6419f7a33982fb4b8ec2e45785a0561eb99274ebbf09fdd6a \ + --hash=sha256:547f7665ad50fa8563150ed079f8e805e63dd85def6674c97efd78eed6c224a6 \ + --hash=sha256:5b883e458058f8d6099e4420f0cc2567989032b5f34b271c0827de9f1079a424 \ + --hash=sha256:63f90b20ca654b3ecc7a8d62c03ffa46999595f0167d6450fa8383bab252987e \ + --hash=sha256:68dc568889b1c13f1e4745c96b931cc94fdd0defe92a72c2b8ce01091b22e35f \ + --hash=sha256:69ee97c71fee1f63d04c945f56d5d726483c4762845400a6795a3b75d56b6c50 \ + --hash=sha256:6d6283d8e0631b617edf0fd726353cb76630b83a089a40933043894e7f6721e2 \ + --hash=sha256:72a660bdd24497e3e84f5519e57a9ee9220b6f3ac4d45056961bf22838ce20cc \ + --hash=sha256:73494d5b71099ae8cb8754f1df131c11d433b387efab7b51849e7e1e851f07a4 \ + --hash=sha256:7356644cbed76119d0b6bd32ffba704d30d747e0c217109d7979a7bc36c4d970 \ + --hash=sha256:8a9066529240171b68893d60dca86a763eae2139dd42f42106b03cf4b426bf10 \ + --hash=sha256:8aa3decd5e0e852dc68335abf5478a518b41bf2ab2f330fe44916399efedfae0 \ + --hash=sha256:97b5bdc450d63c3ba30a127d018b866ea94e65655efaf889ebeabc20f7d12406 \ + --hash=sha256:9ede61b0854e267fd565e7527e2f2eb3ef8858b301319be0604177690e1a3896 \ + --hash=sha256:b2e9a456c121e26d13c29251f8267541bd75e6a1ccf9e859179701c36a078643 \ + --hash=sha256:b5dfc9a40c198334f4f3f55880ecf910adebdcb2a0b9a9c23c9345faa9185721 \ + --hash=sha256:bafb450deef6861815ed579c7a6113a879a6ef58aed4c3a4be54400ae8871478 \ + --hash=sha256:c49ff66d479d38ab863c50f7bb27dee97c6627c5fe60697de15529da9c3de724 \ + --hash=sha256:ce3beb46a72d9f2190f9e1027886bfc513702d748047b548b05dab7dfb584d2e \ + --hash=sha256:d26608cf178efb8faa5ff0f2d2e77c208f471c5a3709e577a7b3fd0445703ac8 \ + --hash=sha256:d597767fcd2c3dc49d6eea360c458b65643d1e4dbed91361cf5e36e53c1f8c96 \ + --hash=sha256:d5c32c82990e4ac4d8150fd7652b972216b204de4e83a122546dce571c1bdf25 \ + --hash=sha256:d8d07d102f17b68966e2de0e07bfd6e139c7c02ef06d3a0f8d2f0f055e13bb76 \ + --hash=sha256:e46fba844f4895b36f4c398c5af062a9808d1f26b2999c58909517384d5deda2 \ + --hash=sha256:e6b5460dc5ad42ad2b36cca524491dfcaffbfd9c8df50508bddc354e787b8dc2 \ + --hash=sha256:f040bcc6725c821a4c0665f3aa96a4d0805a7aaf2caf266d256b8ed71b9f041c \ + --hash=sha256:f0b059678fd549c66b89bed03efcabb009075bd131c248ecdf087bdb6faba24a \ + --hash=sha256:fcbb48a93e8699eae920f8d92f7160c03567b421bc17362a9ffbbd706a816f71 + # via aiohttp # The following packages are considered to be unsafe in a requirements file: -setuptools==57.4.0 \ - --hash=sha256:6bac238ffdf24e8806c61440e755192470352850f3419a52f26ffe0a1a64f465 \ - --hash=sha256:a49230977aa6cfb9d933614d2f7b79036e9945c4cdd7583163f4e920b83418d6 - # via -r tools/base/requirements.txt +setuptools==58.0.3 \ + --hash=sha256:1ceadf3ea9a821ef305505db995f2e21550ea62500900164278c4b23109204f3 \ + --hash=sha256:5e4c36f55012a46c1b3e4b67a8236d1d73856a90fc7b3207d29bedb7d2bac417 + # via + # -r tools/base/requirements.in + # sphinx diff --git a/tools/code_format/BUILD b/tools/code_format/BUILD index bb9683c151330..ba9de5fce8557 100644 --- a/tools/code_format/BUILD +++ b/tools/code_format/BUILD @@ -1,4 +1,4 @@ -load("@pylint_pip3//:requirements.bzl", "requirement") +load("@base_pip3//:requirements.bzl", "requirement") load("//bazel:envoy_build_system.bzl", "envoy_package") load("//tools/base:envoy_python.bzl", "envoy_py_binary") diff --git a/tools/code_format/check_format.py b/tools/code_format/check_format.py index d46f40af7dc2c..fa88387510e19 100755 --- a/tools/code_format/check_format.py +++ b/tools/code_format/check_format.py @@ -257,6 +257,8 @@ "extensions/filters/network/redis_proxy", "extensions/filters/network/kafka", "extensions/filters/network/kafka/broker", + "extensions/filters/network/kafka/mesh", + "extensions/filters/network/kafka/mesh/command_handlers", "extensions/filters/network/kafka/protocol", "extensions/filters/network/kafka/serialization", "extensions/filters/network/mongo_proxy", diff --git a/tools/config_validation/BUILD b/tools/config_validation/BUILD index 5ca3d0ef9a4af..ce2c1d3afbc55 100644 --- a/tools/config_validation/BUILD +++ b/tools/config_validation/BUILD @@ -1,5 +1,5 @@ load("@rules_python//python:defs.bzl", "py_binary") -load("@config_validation_pip3//:requirements.bzl", "requirement") +load("@base_pip3//:requirements.bzl", "requirement") licenses(["notice"]) # Apache 2 diff --git a/tools/config_validation/requirements.txt b/tools/config_validation/requirements.txt deleted file mode 100644 index 34601fe949b16..0000000000000 --- a/tools/config_validation/requirements.txt +++ /dev/null @@ -1,30 +0,0 @@ -PyYAML==5.4.1 \ - --hash=sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf \ - --hash=sha256:0f5f5786c0e09baddcd8b4b45f20a7b5d61a7e7e99846e3c799b05c7c53fa696 \ - --hash=sha256:129def1b7c1bf22faffd67b8f3724645203b79d8f4cc81f674654d9902cb4393 \ - --hash=sha256:294db365efa064d00b8d1ef65d8ea2c3426ac366c0c4368d930bf1c5fb497f77 \ - --hash=sha256:3b2b1824fe7112845700f815ff6a489360226a5609b96ec2190a45e62a9fc922 \ - --hash=sha256:3bd0e463264cf257d1ffd2e40223b197271046d09dadf73a0fe82b9c1fc385a5 \ - --hash=sha256:4465124ef1b18d9ace298060f4eccc64b0850899ac4ac53294547536533800c8 \ - --hash=sha256:49d4cdd9065b9b6e206d0595fee27a96b5dd22618e7520c33204a4a3239d5b10 \ - --hash=sha256:4e0583d24c881e14342eaf4ec5fbc97f934b999a6828693a99157fde912540cc \ - --hash=sha256:5accb17103e43963b80e6f837831f38d314a0495500067cb25afab2e8d7a4018 \ - --hash=sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e \ - --hash=sha256:6c78645d400265a062508ae399b60b8c167bf003db364ecb26dcab2bda048253 \ - --hash=sha256:72a01f726a9c7851ca9bfad6fd09ca4e090a023c00945ea05ba1638c09dc3347 \ - --hash=sha256:74c1485f7707cf707a7aef42ef6322b8f97921bd89be2ab6317fd782c2d53183 \ - --hash=sha256:895f61ef02e8fed38159bb70f7e100e00f471eae2bc838cd0f4ebb21e28f8541 \ - --hash=sha256:8c1be557ee92a20f184922c7b6424e8ab6691788e6d86137c5d93c1a6ec1b8fb \ - --hash=sha256:bb4191dfc9306777bc594117aee052446b3fa88737cd13b7188d0e7aa8162185 \ - --hash=sha256:bfb51918d4ff3d77c1c856a9699f8492c612cde32fd3bcd344af9be34999bfdc \ - --hash=sha256:c20cfa2d49991c8b4147af39859b167664f2ad4561704ee74c1de03318e898db \ - --hash=sha256:cb333c16912324fd5f769fff6bc5de372e9e7a202247b48870bc251ed40239aa \ - --hash=sha256:d2d9808ea7b4af864f35ea216be506ecec180628aced0704e34aca0b040ffe46 \ - --hash=sha256:d483ad4e639292c90170eb6f7783ad19490e7a8defb3e46f97dfe4bacae89122 \ - --hash=sha256:dd5de0646207f053eb0d6c74ae45ba98c3395a571a2891858e87df7c9b9bd51b \ - --hash=sha256:e1d4970ea66be07ae37a3c2e48b5ec63f7ba6804bdddfdbd3cfd954d25a82e63 \ - --hash=sha256:e4fac90784481d221a8e4b1162afa7c47ed953be40d31ab4629ae917510051df \ - --hash=sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc \ - --hash=sha256:fd7f6999a8070df521b6384004ef42833b9bd62cfee11a09bda1079b4b704247 \ - --hash=sha256:fdc842473cd33f45ff6bce46aea678a54e3d21f1b61a7750ce3c498eedfe25d6 \ - --hash=sha256:fe69978f3f768926cfa37b867e3843918e012cf83f680806599ddce33c2c68b0 diff --git a/tools/dependency/cve_scan.py b/tools/dependency/cve_scan.py index ca2d2a144253c..5cdee9a90af33 100755 --- a/tools/dependency/cve_scan.py +++ b/tools/dependency/cve_scan.py @@ -47,6 +47,9 @@ 'CVE-2020-8169', 'CVE-2020-8177', 'CVE-2020-8284', + # Low severity Curl issue with incorrect re-use of connections due to case + # in/sensitivity + 'CVE-2021-22924', # Node.js issue unrelated to http-parser (Node TLS). 'CVE-2020-8265', # Node.js request smuggling. @@ -64,8 +67,14 @@ # Node.js issues unrelated to http-parser. # See https://nvd.nist.gov/vuln/detail/CVE-2021-22918 # See https://nvd.nist.gov/vuln/detail/CVE-2021-22921 + # See https://nvd.nist.gov/vuln/detail/CVE-2021-22931 + # See https://nvd.nist.gov/vuln/detail/CVE-2021-22939 + # See https://nvd.nist.gov/vuln/detail/CVE-2021-22940 'CVE-2021-22918', 'CVE-2021-22921', + 'CVE-2021-22931', + 'CVE-2021-22939', + 'CVE-2021-22940', ]) # Subset of CVE fields that are useful below. diff --git a/tools/distribution/BUILD b/tools/distribution/BUILD index 11142e4324f0e..6b60dda875708 100644 --- a/tools/distribution/BUILD +++ b/tools/distribution/BUILD @@ -1,6 +1,6 @@ load("//bazel:envoy_build_system.bzl", "envoy_package") load("//tools/base:envoy_python.bzl", "envoy_py_script") -load("@distribution_pip3//:requirements.bzl", "requirement") +load("@base_pip3//:requirements.bzl", "requirement") licenses(["notice"]) # Apache 2 diff --git a/tools/distribution/requirements.txt b/tools/distribution/requirements.txt deleted file mode 100644 index dcd1a7600ba2f..0000000000000 --- a/tools/distribution/requirements.txt +++ /dev/null @@ -1,382 +0,0 @@ -# -# This file is autogenerated by pip-compile -# To update, run: -# -# pip-compile --generate-hashes tools/distribution/requirements.txt -# -abstracts==0.0.12 \ - --hash=sha256:acc01ff56c8a05fb88150dff62e295f9071fc33388c42f1dfc2787a8d1c755ff - # via - # aio.functional - # envoy.abstract.command - # envoy.github.abstract - # envoy.github.release -aio.functional==0.0.9 \ - --hash=sha256:824a997a394ad891bc9f403426babc13c9d0d1f4d1708c38e77d6aecae1cab1d - # via - # aio.tasks - # envoy.github.abstract - # envoy.github.release -aio.stream==0.0.2 \ - --hash=sha256:6f5baaff48f6319db134cd56c06ccf89db1f7c5f67a26382e081efc96f2f675d - # via envoy.github.release -aio.tasks==0.0.4 \ - --hash=sha256:9abd4b0881edb292c4f91a2f63b1dea7a9829a4bd4e8440225a1a412a90461fc - # via - # envoy.github.abstract - # envoy.github.release -aiodocker==0.21.0 \ - --hash=sha256:1f2e6db6377195962bb676d4822f6e3a0c525e1b5d60b8ebbab68230bff3d227 \ - --hash=sha256:6fe00135bb7dc40a407669d3157ecdfd856f3737d939df54f40a479d40cf7bdc - # via - # envoy.distribution.distrotest - # envoy.docker.utils -aiofiles==0.7.0 \ - --hash=sha256:a1c4fc9b2ff81568c83e21392a82f344ea9d23da906e4f6a52662764545e19d4 \ - --hash=sha256:c67a6823b5f23fcab0a2595a289cec7d8c863ffcb4322fb8cd6b90400aedfdbc - # via aio.stream -aiohttp==3.7.4.post0 \ - --hash=sha256:02f46fc0e3c5ac58b80d4d56eb0a7c7d97fcef69ace9326289fb9f1955e65cfe \ - --hash=sha256:0563c1b3826945eecd62186f3f5c7d31abb7391fedc893b7e2b26303b5a9f3fe \ - --hash=sha256:114b281e4d68302a324dd33abb04778e8557d88947875cbf4e842c2c01a030c5 \ - --hash=sha256:14762875b22d0055f05d12abc7f7d61d5fd4fe4642ce1a249abdf8c700bf1fd8 \ - --hash=sha256:15492a6368d985b76a2a5fdd2166cddfea5d24e69eefed4630cbaae5c81d89bd \ - --hash=sha256:17c073de315745a1510393a96e680d20af8e67e324f70b42accbd4cb3315c9fb \ - --hash=sha256:209b4a8ee987eccc91e2bd3ac36adee0e53a5970b8ac52c273f7f8fd4872c94c \ - --hash=sha256:230a8f7e24298dea47659251abc0fd8b3c4e38a664c59d4b89cca7f6c09c9e87 \ - --hash=sha256:2e19413bf84934d651344783c9f5e22dee452e251cfd220ebadbed2d9931dbf0 \ - --hash=sha256:393f389841e8f2dfc86f774ad22f00923fdee66d238af89b70ea314c4aefd290 \ - --hash=sha256:3cf75f7cdc2397ed4442594b935a11ed5569961333d49b7539ea741be2cc79d5 \ - --hash=sha256:3d78619672183be860b96ed96f533046ec97ca067fd46ac1f6a09cd9b7484287 \ - --hash=sha256:40eced07f07a9e60e825554a31f923e8d3997cfc7fb31dbc1328c70826e04cde \ - --hash=sha256:493d3299ebe5f5a7c66b9819eacdcfbbaaf1a8e84911ddffcdc48888497afecf \ - --hash=sha256:4b302b45040890cea949ad092479e01ba25911a15e648429c7c5aae9650c67a8 \ - --hash=sha256:515dfef7f869a0feb2afee66b957cc7bbe9ad0cdee45aec7fdc623f4ecd4fb16 \ - --hash=sha256:547da6cacac20666422d4882cfcd51298d45f7ccb60a04ec27424d2f36ba3eaf \ - --hash=sha256:5df68496d19f849921f05f14f31bd6ef53ad4b00245da3195048c69934521809 \ - --hash=sha256:64322071e046020e8797117b3658b9c2f80e3267daec409b350b6a7a05041213 \ - --hash=sha256:7615dab56bb07bff74bc865307aeb89a8bfd9941d2ef9d817b9436da3a0ea54f \ - --hash=sha256:79ebfc238612123a713a457d92afb4096e2148be17df6c50fb9bf7a81c2f8013 \ - --hash=sha256:7b18b97cf8ee5452fa5f4e3af95d01d84d86d32c5e2bfa260cf041749d66360b \ - --hash=sha256:932bb1ea39a54e9ea27fc9232163059a0b8855256f4052e776357ad9add6f1c9 \ - --hash=sha256:a00bb73540af068ca7390e636c01cbc4f644961896fa9363154ff43fd37af2f5 \ - --hash=sha256:a5ca29ee66f8343ed336816c553e82d6cade48a3ad702b9ffa6125d187e2dedb \ - --hash=sha256:af9aa9ef5ba1fd5b8c948bb11f44891968ab30356d65fd0cc6707d989cd521df \ - --hash=sha256:bb437315738aa441251214dad17428cafda9cdc9729499f1d6001748e1d432f4 \ - --hash=sha256:bdb230b4943891321e06fc7def63c7aace16095be7d9cf3b1e01be2f10fba439 \ - --hash=sha256:c6e9dcb4cb338d91a73f178d866d051efe7c62a7166653a91e7d9fb18274058f \ - --hash=sha256:cffe3ab27871bc3ea47df5d8f7013945712c46a3cc5a95b6bee15887f1675c22 \ - --hash=sha256:d012ad7911653a906425d8473a1465caa9f8dea7fcf07b6d870397b774ea7c0f \ - --hash=sha256:d9e13b33afd39ddeb377eff2c1c4f00544e191e1d1dee5b6c51ddee8ea6f0cf5 \ - --hash=sha256:e4b2b334e68b18ac9817d828ba44d8fcb391f6acb398bcc5062b14b2cbeac970 \ - --hash=sha256:e54962802d4b8b18b6207d4a927032826af39395a3bd9196a5af43fc4e60b009 \ - --hash=sha256:f705e12750171c0ab4ef2a3c76b9a4024a62c4103e3a55dd6f99265b9bc6fcfc \ - --hash=sha256:f881853d2643a29e643609da57b96d5f9c9b93f62429dcc1cbb413c7d07f0e1a \ - --hash=sha256:fe60131d21b31fd1a14bd43e6bb88256f69dfc3188b3a89d736d6c71ed43ec95 - # via - # aio.stream - # aiodocker - # envoy.github.abstract - # envoy.github.release -async-timeout==3.0.1 \ - --hash=sha256:0c3c816a028d47f659d6ff5c745cb2acf1f966da1fe5c19c77a70282b25f4c5f \ - --hash=sha256:4291ca197d287d274d0b6cb5d6f8f8f82d434ed288f962539ff18cc9012f9ea3 - # via aiohttp -attrs==21.2.0 \ - --hash=sha256:149e90d6d8ac20db7a955ad60cf0e6881a3f20d37096140088356da6c716b0b1 \ - --hash=sha256:ef6aaac3ca6cd92904cdd0d83f629a15f18053ec84e6432106f7a4d04ae4f5fb - # via aiohttp -cffi==1.14.6 \ - --hash=sha256:06c54a68935738d206570b20da5ef2b6b6d92b38ef3ec45c5422c0ebaf338d4d \ - --hash=sha256:0c0591bee64e438883b0c92a7bed78f6290d40bf02e54c5bf0978eaf36061771 \ - --hash=sha256:19ca0dbdeda3b2615421d54bef8985f72af6e0c47082a8d26122adac81a95872 \ - --hash=sha256:22b9c3c320171c108e903d61a3723b51e37aaa8c81255b5e7ce102775bd01e2c \ - --hash=sha256:26bb2549b72708c833f5abe62b756176022a7b9a7f689b571e74c8478ead51dc \ - --hash=sha256:33791e8a2dc2953f28b8d8d300dde42dd929ac28f974c4b4c6272cb2955cb762 \ - --hash=sha256:3c8d896becff2fa653dc4438b54a5a25a971d1f4110b32bd3068db3722c80202 \ - --hash=sha256:4373612d59c404baeb7cbd788a18b2b2a8331abcc84c3ba40051fcd18b17a4d5 \ - --hash=sha256:487d63e1454627c8e47dd230025780e91869cfba4c753a74fda196a1f6ad6548 \ - --hash=sha256:48916e459c54c4a70e52745639f1db524542140433599e13911b2f329834276a \ - --hash=sha256:4922cd707b25e623b902c86188aca466d3620892db76c0bdd7b99a3d5e61d35f \ - --hash=sha256:55af55e32ae468e9946f741a5d51f9896da6b9bf0bbdd326843fec05c730eb20 \ - --hash=sha256:57e555a9feb4a8460415f1aac331a2dc833b1115284f7ded7278b54afc5bd218 \ - --hash=sha256:5d4b68e216fc65e9fe4f524c177b54964af043dde734807586cf5435af84045c \ - --hash=sha256:64fda793737bc4037521d4899be780534b9aea552eb673b9833b01f945904c2e \ - --hash=sha256:6d6169cb3c6c2ad50db5b868db6491a790300ade1ed5d1da29289d73bbe40b56 \ - --hash=sha256:7bcac9a2b4fdbed2c16fa5681356d7121ecabf041f18d97ed5b8e0dd38a80224 \ - --hash=sha256:80b06212075346b5546b0417b9f2bf467fea3bfe7352f781ffc05a8ab24ba14a \ - --hash=sha256:818014c754cd3dba7229c0f5884396264d51ffb87ec86e927ef0be140bfdb0d2 \ - --hash=sha256:8eb687582ed7cd8c4bdbff3df6c0da443eb89c3c72e6e5dcdd9c81729712791a \ - --hash=sha256:99f27fefe34c37ba9875f224a8f36e31d744d8083e00f520f133cab79ad5e819 \ - --hash=sha256:9f3e33c28cd39d1b655ed1ba7247133b6f7fc16fa16887b120c0c670e35ce346 \ - --hash=sha256:a8661b2ce9694ca01c529bfa204dbb144b275a31685a075ce123f12331be790b \ - --hash=sha256:a9da7010cec5a12193d1af9872a00888f396aba3dc79186604a09ea3ee7c029e \ - --hash=sha256:aedb15f0a5a5949ecb129a82b72b19df97bbbca024081ed2ef88bd5c0a610534 \ - --hash=sha256:b315d709717a99f4b27b59b021e6207c64620790ca3e0bde636a6c7f14618abb \ - --hash=sha256:ba6f2b3f452e150945d58f4badd92310449876c4c954836cfb1803bdd7b422f0 \ - --hash=sha256:c33d18eb6e6bc36f09d793c0dc58b0211fccc6ae5149b808da4a62660678b156 \ - --hash=sha256:c9a875ce9d7fe32887784274dd533c57909b7b1dcadcc128a2ac21331a9765dd \ - --hash=sha256:c9e005e9bd57bc987764c32a1bee4364c44fdc11a3cc20a40b93b444984f2b87 \ - --hash=sha256:d2ad4d668a5c0645d281dcd17aff2be3212bc109b33814bbb15c4939f44181cc \ - --hash=sha256:d950695ae4381ecd856bcaf2b1e866720e4ab9a1498cba61c602e56630ca7195 \ - --hash=sha256:e22dcb48709fc51a7b58a927391b23ab37eb3737a98ac4338e2448bef8559b33 \ - --hash=sha256:e8c6a99be100371dbb046880e7a282152aa5d6127ae01783e37662ef73850d8f \ - --hash=sha256:e9dc245e3ac69c92ee4c167fbdd7428ec1956d4e754223124991ef29eb57a09d \ - --hash=sha256:eb687a11f0a7a1839719edd80f41e459cc5366857ecbed383ff376c4e3cc6afd \ - --hash=sha256:eb9e2a346c5238a30a746893f23a9535e700f8192a68c07c0258e7ece6ff3728 \ - --hash=sha256:ed38b924ce794e505647f7c331b22a693bee1538fdf46b0222c4717b42f744e7 \ - --hash=sha256:f0010c6f9d1a4011e429109fda55a225921e3206e7f62a0c22a35344bfd13cca \ - --hash=sha256:f0c5d1acbfca6ebdd6b1e3eded8d261affb6ddcf2186205518f1428b8569bb99 \ - --hash=sha256:f10afb1004f102c7868ebfe91c28f4a712227fe4cb24974350ace1f90e1febbf \ - --hash=sha256:f174135f5609428cc6e1b9090f9268f5c8935fddb1b25ccb8255a2d50de6789e \ - --hash=sha256:f3ebe6e73c319340830a9b2825d32eb6d8475c1dac020b4f0aa774ee3b898d1c \ - --hash=sha256:f627688813d0a4140153ff532537fbe4afea5a3dffce1f9deb7f91f848a832b5 \ - --hash=sha256:fd4305f86f53dfd8cd3522269ed7fc34856a8ee3709a5e28b2836b2db9d4cd69 - # via cryptography -chardet==4.0.0 \ - --hash=sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa \ - --hash=sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5 - # via aiohttp -coloredlogs==15.0.1 \ - --hash=sha256:612ee75c546f53e92e70049c9dbfcc18c935a2b9a53b66085ce9ef6a6e5c0934 \ - --hash=sha256:7c991aa71a4577af2f82600d8f8f3a89f936baeaf9b50a9c197da014e5bf16b0 - # via envoy.base.runner -cryptography==3.4.8 \ - --hash=sha256:0a7dcbcd3f1913f664aca35d47c1331fce738d44ec34b7be8b9d332151b0b01e \ - --hash=sha256:1eb7bb0df6f6f583dd8e054689def236255161ebbcf62b226454ab9ec663746b \ - --hash=sha256:21ca464b3a4b8d8e86ba0ee5045e103a1fcfac3b39319727bc0fc58c09c6aff7 \ - --hash=sha256:34dae04a0dce5730d8eb7894eab617d8a70d0c97da76b905de9efb7128ad7085 \ - --hash=sha256:3520667fda779eb788ea00080124875be18f2d8f0848ec00733c0ec3bb8219fc \ - --hash=sha256:3fa3a7ccf96e826affdf1a0a9432be74dc73423125c8f96a909e3835a5ef194a \ - --hash=sha256:5b0fbfae7ff7febdb74b574055c7466da334a5371f253732d7e2e7525d570498 \ - --hash=sha256:8695456444f277af73a4877db9fc979849cd3ee74c198d04fc0776ebc3db52b9 \ - --hash=sha256:94cc5ed4ceaefcbe5bf38c8fba6a21fc1d365bb8fb826ea1688e3370b2e24a1c \ - --hash=sha256:94fff993ee9bc1b2440d3b7243d488c6a3d9724cc2b09cdb297f6a886d040ef7 \ - --hash=sha256:9965c46c674ba8cc572bc09a03f4c649292ee73e1b683adb1ce81e82e9a6a0fb \ - --hash=sha256:a00cf305f07b26c351d8d4e1af84ad7501eca8a342dedf24a7acb0e7b7406e14 \ - --hash=sha256:a305600e7a6b7b855cd798e00278161b681ad6e9b7eca94c721d5f588ab212af \ - --hash=sha256:cd65b60cfe004790c795cc35f272e41a3df4631e2fb6b35aa7ac6ef2859d554e \ - --hash=sha256:d2a6e5ef66503da51d2110edf6c403dc6b494cc0082f85db12f54e9c5d4c3ec5 \ - --hash=sha256:d9ec0e67a14f9d1d48dd87a2531009a9b251c02ea42851c060b25c782516ff06 \ - --hash=sha256:f44d141b8c4ea5eb4dbc9b3ad992d45580c1d22bf5e24363f2fbf50c2d7ae8a7 - # via pyjwt -envoy.abstract.command==0.0.3 \ - --hash=sha256:4b7b15c91bea1f2eb7c2e8e35f95cd9437e1c8f151adc093bf7858fc85d48221 - # via - # envoy.base.runner - # envoy.distribution.release -envoy.base.checker==0.0.2 \ - --hash=sha256:2ac81efa20fd01fff644ff7dc7fadeac1c3e4dbb6210881ac7a7919ec0e048d8 - # via - # envoy.distribution.distrotest - # envoy.distribution.verify -envoy.base.runner==0.0.4 \ - --hash=sha256:4eeb2b661f1f0c402df4425852be554a8a83ef5d338bfae69ddcb9b90755379e - # via - # envoy.base.checker - # envoy.distribution.release - # envoy.github.abstract - # envoy.gpg.sign -envoy.base.utils==0.0.6 \ - --hash=sha256:58ed057137ebe80d78db90997efc59822115ee616e435a9afc3d7a19069bb53c - # via - # envoy.distribution.distrotest - # envoy.github.release - # envoy.gpg.sign -envoy.distribution.distrotest==0.0.3 \ - --hash=sha256:c094adbd959eb1336f93afc00aedb7ee4e68e8252e2365be816a6f9ede8a3de7 - # via envoy.distribution.verify -envoy.distribution.release==0.0.4 \ - --hash=sha256:41037e0488f0593ce5173739fe0cd1b45a4775f5a47738b85d9d04024ca241a2 - # via -r tools/distribution/requirements.txt -envoy.distribution.verify==0.0.2 \ - --hash=sha256:ae59134085de50203edf51c243dbf3301cbe5550db29f0ec6f9ea1c3b82fee1c - # via -r tools/distribution/requirements.txt -envoy.docker.utils==0.0.2 \ - --hash=sha256:a12cb57f0b6e204d646cbf94f927b3a8f5a27ed15f60d0576176584ec16a4b76 - # via envoy.distribution.distrotest -envoy.github.abstract==0.0.16 \ - --hash=sha256:badf04104492fb6b37ba2163f2b225132ed04aba680beb218e7c7d918564f8ee - # via - # envoy.distribution.release - # envoy.github.release -envoy.github.release==0.0.8 \ - --hash=sha256:fbc4354030137eb565b8c4d679965e4ef60b01de0c09310441836e592ca0cd19 - # via envoy.distribution.release -envoy.gpg.identity==0.0.2 \ - --hash=sha256:7d32ff9133e00b9974b4dabd2512b4872b091b8c5069d0112240dcc1a56bc406 - # via envoy.gpg.sign -envoy.gpg.sign==0.0.3 \ - --hash=sha256:31667931f5d7ff05fd809b89748f277511486311c777652af4cb8889bd641049 - # via -r tools/distribution/requirements.txt -frozendict==2.0.6 \ - --hash=sha256:3f00de72805cf4c9e81b334f3f04809278b967d2fed84552313a0fcce511beb1 \ - --hash=sha256:5d3f75832c35d4df041f0e19c268964cbef29c1eb34cd3517cf883f1c2d089b9 - # via envoy.base.runner -gidgethub==5.0.1 \ - --hash=sha256:3efbd6998600254ec7a2869318bd3ffde38edc3a0d37be0c14bc46b45947b682 \ - --hash=sha256:67245e93eb0918b37df038148af675df43b62e832c529d7f859f6b90d9f3e70d - # via - # envoy.github.abstract - # envoy.github.release -humanfriendly==9.2 \ - --hash=sha256:332da98c24cc150efcc91b5508b19115209272bfdf4b0764a56795932f854271 \ - --hash=sha256:f7dba53ac7935fd0b4a2fc9a29e316ddd9ea135fb3052d3d0279d10c18ff9c48 - # via coloredlogs -idna==3.2 \ - --hash=sha256:14475042e284991034cb48e06f6851428fb14c4dc953acd9be9a5e95c7b6dd7a \ - --hash=sha256:467fbad99067910785144ce333826c71fb0e63a425657295239737f7ecd125f3 - # via yarl -multidict==5.1.0 \ - --hash=sha256:018132dbd8688c7a69ad89c4a3f39ea2f9f33302ebe567a879da8f4ca73f0d0a \ - --hash=sha256:051012ccee979b2b06be928a6150d237aec75dd6bf2d1eeeb190baf2b05abc93 \ - --hash=sha256:05c20b68e512166fddba59a918773ba002fdd77800cad9f55b59790030bab632 \ - --hash=sha256:07b42215124aedecc6083f1ce6b7e5ec5b50047afa701f3442054373a6deb656 \ - --hash=sha256:0e3c84e6c67eba89c2dbcee08504ba8644ab4284863452450520dad8f1e89b79 \ - --hash=sha256:0e929169f9c090dae0646a011c8b058e5e5fb391466016b39d21745b48817fd7 \ - --hash=sha256:1ab820665e67373de5802acae069a6a05567ae234ddb129f31d290fc3d1aa56d \ - --hash=sha256:25b4e5f22d3a37ddf3effc0710ba692cfc792c2b9edfb9c05aefe823256e84d5 \ - --hash=sha256:2e68965192c4ea61fff1b81c14ff712fc7dc15d2bd120602e4a3494ea6584224 \ - --hash=sha256:2f1a132f1c88724674271d636e6b7351477c27722f2ed789f719f9e3545a3d26 \ - --hash=sha256:37e5438e1c78931df5d3c0c78ae049092877e5e9c02dd1ff5abb9cf27a5914ea \ - --hash=sha256:3a041b76d13706b7fff23b9fc83117c7b8fe8d5fe9e6be45eee72b9baa75f348 \ - --hash=sha256:3a4f32116f8f72ecf2a29dabfb27b23ab7cdc0ba807e8459e59a93a9be9506f6 \ - --hash=sha256:46c73e09ad374a6d876c599f2328161bcd95e280f84d2060cf57991dec5cfe76 \ - --hash=sha256:46dd362c2f045095c920162e9307de5ffd0a1bfbba0a6e990b344366f55a30c1 \ - --hash=sha256:4b186eb7d6ae7c06eb4392411189469e6a820da81447f46c0072a41c748ab73f \ - --hash=sha256:54fd1e83a184e19c598d5e70ba508196fd0bbdd676ce159feb412a4a6664f952 \ - --hash=sha256:585fd452dd7782130d112f7ddf3473ffdd521414674c33876187e101b588738a \ - --hash=sha256:5cf3443199b83ed9e955f511b5b241fd3ae004e3cb81c58ec10f4fe47c7dce37 \ - --hash=sha256:6a4d5ce640e37b0efcc8441caeea8f43a06addace2335bd11151bc02d2ee31f9 \ - --hash=sha256:7df80d07818b385f3129180369079bd6934cf70469f99daaebfac89dca288359 \ - --hash=sha256:806068d4f86cb06af37cd65821554f98240a19ce646d3cd24e1c33587f313eb8 \ - --hash=sha256:830f57206cc96ed0ccf68304141fec9481a096c4d2e2831f311bde1c404401da \ - --hash=sha256:929006d3c2d923788ba153ad0de8ed2e5ed39fdbe8e7be21e2f22ed06c6783d3 \ - --hash=sha256:9436dc58c123f07b230383083855593550c4d301d2532045a17ccf6eca505f6d \ - --hash=sha256:9dd6e9b1a913d096ac95d0399bd737e00f2af1e1594a787e00f7975778c8b2bf \ - --hash=sha256:ace010325c787c378afd7f7c1ac66b26313b3344628652eacd149bdd23c68841 \ - --hash=sha256:b47a43177a5e65b771b80db71e7be76c0ba23cc8aa73eeeb089ed5219cdbe27d \ - --hash=sha256:b797515be8743b771aa868f83563f789bbd4b236659ba52243b735d80b29ed93 \ - --hash=sha256:b7993704f1a4b204e71debe6095150d43b2ee6150fa4f44d6d966ec356a8d61f \ - --hash=sha256:d5c65bdf4484872c4af3150aeebe101ba560dcfb34488d9a8ff8dbcd21079647 \ - --hash=sha256:d81eddcb12d608cc08081fa88d046c78afb1bf8107e6feab5d43503fea74a635 \ - --hash=sha256:dc862056f76443a0db4509116c5cd480fe1b6a2d45512a653f9a855cc0517456 \ - --hash=sha256:ecc771ab628ea281517e24fd2c52e8f31c41e66652d07599ad8818abaad38cda \ - --hash=sha256:f200755768dc19c6f4e2b672421e0ebb3dd54c38d5a4f262b872d8cfcc9e93b5 \ - --hash=sha256:f21756997ad8ef815d8ef3d34edd98804ab5ea337feedcd62fb52d22bf531281 \ - --hash=sha256:fc13a9524bc18b6fb6e0dbec3533ba0496bbed167c56d0aabefd965584557d80 - # via - # aiohttp - # yarl -packaging==21.0 \ - --hash=sha256:7dc96269f53a4ccec5c0670940a4281106dd0bb343f47b7471f779df49c2fbe7 \ - --hash=sha256:c86254f9220d55e31cc94d69bade760f0847da8000def4dfe1c6b872fd14ff14 - # via envoy.github.release -pycparser==2.20 \ - --hash=sha256:2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0 \ - --hash=sha256:7582ad22678f0fcd81102833f60ef8d0e57288b6b5fb00323d101be910e35705 - # via cffi -pyjwt[crypto]==2.1.0 \ - --hash=sha256:934d73fbba91b0483d3857d1aff50e96b2a892384ee2c17417ed3203f173fca1 \ - --hash=sha256:fba44e7898bbca160a2b2b501f492824fc8382485d3a6f11ba5d0c1937ce6130 - # via gidgethub -pyparsing==2.4.7 \ - --hash=sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1 \ - --hash=sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b - # via packaging -python-gnupg==0.4.7 \ - --hash=sha256:2061f56b1942c29b92727bf9aecbd3cea3893acc9cccbdc7eb4604285efe4ac7 \ - --hash=sha256:3ff5b1bf5e397de6e1fe41a7c0f403dad4e242ac92b345f440eaecfb72a7ebae - # via envoy.gpg.identity -pyyaml==5.4.1 \ - --hash=sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf \ - --hash=sha256:0f5f5786c0e09baddcd8b4b45f20a7b5d61a7e7e99846e3c799b05c7c53fa696 \ - --hash=sha256:129def1b7c1bf22faffd67b8f3724645203b79d8f4cc81f674654d9902cb4393 \ - --hash=sha256:294db365efa064d00b8d1ef65d8ea2c3426ac366c0c4368d930bf1c5fb497f77 \ - --hash=sha256:3b2b1824fe7112845700f815ff6a489360226a5609b96ec2190a45e62a9fc922 \ - --hash=sha256:3bd0e463264cf257d1ffd2e40223b197271046d09dadf73a0fe82b9c1fc385a5 \ - --hash=sha256:4465124ef1b18d9ace298060f4eccc64b0850899ac4ac53294547536533800c8 \ - --hash=sha256:49d4cdd9065b9b6e206d0595fee27a96b5dd22618e7520c33204a4a3239d5b10 \ - --hash=sha256:4e0583d24c881e14342eaf4ec5fbc97f934b999a6828693a99157fde912540cc \ - --hash=sha256:5accb17103e43963b80e6f837831f38d314a0495500067cb25afab2e8d7a4018 \ - --hash=sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e \ - --hash=sha256:6c78645d400265a062508ae399b60b8c167bf003db364ecb26dcab2bda048253 \ - --hash=sha256:72a01f726a9c7851ca9bfad6fd09ca4e090a023c00945ea05ba1638c09dc3347 \ - --hash=sha256:74c1485f7707cf707a7aef42ef6322b8f97921bd89be2ab6317fd782c2d53183 \ - --hash=sha256:895f61ef02e8fed38159bb70f7e100e00f471eae2bc838cd0f4ebb21e28f8541 \ - --hash=sha256:8c1be557ee92a20f184922c7b6424e8ab6691788e6d86137c5d93c1a6ec1b8fb \ - --hash=sha256:bb4191dfc9306777bc594117aee052446b3fa88737cd13b7188d0e7aa8162185 \ - --hash=sha256:bfb51918d4ff3d77c1c856a9699f8492c612cde32fd3bcd344af9be34999bfdc \ - --hash=sha256:c20cfa2d49991c8b4147af39859b167664f2ad4561704ee74c1de03318e898db \ - --hash=sha256:cb333c16912324fd5f769fff6bc5de372e9e7a202247b48870bc251ed40239aa \ - --hash=sha256:d2d9808ea7b4af864f35ea216be506ecec180628aced0704e34aca0b040ffe46 \ - --hash=sha256:d483ad4e639292c90170eb6f7783ad19490e7a8defb3e46f97dfe4bacae89122 \ - --hash=sha256:dd5de0646207f053eb0d6c74ae45ba98c3395a571a2891858e87df7c9b9bd51b \ - --hash=sha256:e1d4970ea66be07ae37a3c2e48b5ec63f7ba6804bdddfdbd3cfd954d25a82e63 \ - --hash=sha256:e4fac90784481d221a8e4b1162afa7c47ed953be40d31ab4629ae917510051df \ - --hash=sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc \ - --hash=sha256:fd7f6999a8070df521b6384004ef42833b9bd62cfee11a09bda1079b4b704247 \ - --hash=sha256:fdc842473cd33f45ff6bce46aea678a54e3d21f1b61a7750ce3c498eedfe25d6 \ - --hash=sha256:fe69978f3f768926cfa37b867e3843918e012cf83f680806599ddce33c2c68b0 - # via envoy.base.utils -trycast==0.3.0 \ - --hash=sha256:1b7b4c0d4b0d674770a53f34a762e52a6cd6879eb251ab21625602699920080d \ - --hash=sha256:687185b812e8d1c45f2ba841e8de7bdcdee0695dcf3464f206800505d4c65f26 - # via envoy.base.utils -typing-extensions==3.10.0.2 \ - --hash=sha256:49f75d16ff11f1cd258e1b988ccff82a3ca5570217d7ad8c5f48205dd99a677e \ - --hash=sha256:d8226d10bc02a29bcc81df19a26e56a9647f8b0a6d4a83924139f4a8b01f17b7 \ - --hash=sha256:f1d25edafde516b146ecd0613dabcc61409817af4766fbbcfb8d1ad4ec441a34 - # via - # aiodocker - # aiohttp -uritemplate==3.0.1 \ - --hash=sha256:07620c3f3f8eed1f12600845892b0e036a2420acf513c53f7de0abd911a5894f \ - --hash=sha256:5af8ad10cec94f215e3f48112de2022e1d5a37ed427fbd88652fa908f2ab7cae - # via gidgethub -verboselogs==1.7 \ - --hash=sha256:d63f23bf568295b95d3530c6864a0b580cec70e7ff974177dead1e4ffbc6ff49 \ - --hash=sha256:e33ddedcdfdafcb3a174701150430b11b46ceb64c2a9a26198c76a156568e427 - # via - # envoy.base.runner - # envoy.github.abstract - # envoy.github.release -yarl==1.6.3 \ - --hash=sha256:00d7ad91b6583602eb9c1d085a2cf281ada267e9a197e8b7cae487dadbfa293e \ - --hash=sha256:0355a701b3998dcd832d0dc47cc5dedf3874f966ac7f870e0f3a6788d802d434 \ - --hash=sha256:15263c3b0b47968c1d90daa89f21fcc889bb4b1aac5555580d74565de6836366 \ - --hash=sha256:2ce4c621d21326a4a5500c25031e102af589edb50c09b321049e388b3934eec3 \ - --hash=sha256:31ede6e8c4329fb81c86706ba8f6bf661a924b53ba191b27aa5fcee5714d18ec \ - --hash=sha256:324ba3d3c6fee56e2e0b0d09bf5c73824b9f08234339d2b788af65e60040c959 \ - --hash=sha256:329412812ecfc94a57cd37c9d547579510a9e83c516bc069470db5f75684629e \ - --hash=sha256:4736eaee5626db8d9cda9eb5282028cc834e2aeb194e0d8b50217d707e98bb5c \ - --hash=sha256:4953fb0b4fdb7e08b2f3b3be80a00d28c5c8a2056bb066169de00e6501b986b6 \ - --hash=sha256:4c5bcfc3ed226bf6419f7a33982fb4b8ec2e45785a0561eb99274ebbf09fdd6a \ - --hash=sha256:547f7665ad50fa8563150ed079f8e805e63dd85def6674c97efd78eed6c224a6 \ - --hash=sha256:5b883e458058f8d6099e4420f0cc2567989032b5f34b271c0827de9f1079a424 \ - --hash=sha256:63f90b20ca654b3ecc7a8d62c03ffa46999595f0167d6450fa8383bab252987e \ - --hash=sha256:68dc568889b1c13f1e4745c96b931cc94fdd0defe92a72c2b8ce01091b22e35f \ - --hash=sha256:69ee97c71fee1f63d04c945f56d5d726483c4762845400a6795a3b75d56b6c50 \ - --hash=sha256:6d6283d8e0631b617edf0fd726353cb76630b83a089a40933043894e7f6721e2 \ - --hash=sha256:72a660bdd24497e3e84f5519e57a9ee9220b6f3ac4d45056961bf22838ce20cc \ - --hash=sha256:73494d5b71099ae8cb8754f1df131c11d433b387efab7b51849e7e1e851f07a4 \ - --hash=sha256:7356644cbed76119d0b6bd32ffba704d30d747e0c217109d7979a7bc36c4d970 \ - --hash=sha256:8a9066529240171b68893d60dca86a763eae2139dd42f42106b03cf4b426bf10 \ - --hash=sha256:8aa3decd5e0e852dc68335abf5478a518b41bf2ab2f330fe44916399efedfae0 \ - --hash=sha256:97b5bdc450d63c3ba30a127d018b866ea94e65655efaf889ebeabc20f7d12406 \ - --hash=sha256:9ede61b0854e267fd565e7527e2f2eb3ef8858b301319be0604177690e1a3896 \ - --hash=sha256:b2e9a456c121e26d13c29251f8267541bd75e6a1ccf9e859179701c36a078643 \ - --hash=sha256:b5dfc9a40c198334f4f3f55880ecf910adebdcb2a0b9a9c23c9345faa9185721 \ - --hash=sha256:bafb450deef6861815ed579c7a6113a879a6ef58aed4c3a4be54400ae8871478 \ - --hash=sha256:c49ff66d479d38ab863c50f7bb27dee97c6627c5fe60697de15529da9c3de724 \ - --hash=sha256:ce3beb46a72d9f2190f9e1027886bfc513702d748047b548b05dab7dfb584d2e \ - --hash=sha256:d26608cf178efb8faa5ff0f2d2e77c208f471c5a3709e577a7b3fd0445703ac8 \ - --hash=sha256:d597767fcd2c3dc49d6eea360c458b65643d1e4dbed91361cf5e36e53c1f8c96 \ - --hash=sha256:d5c32c82990e4ac4d8150fd7652b972216b204de4e83a122546dce571c1bdf25 \ - --hash=sha256:d8d07d102f17b68966e2de0e07bfd6e139c7c02ef06d3a0f8d2f0f055e13bb76 \ - --hash=sha256:e46fba844f4895b36f4c398c5af062a9808d1f26b2999c58909517384d5deda2 \ - --hash=sha256:e6b5460dc5ad42ad2b36cca524491dfcaffbfd9c8df50508bddc354e787b8dc2 \ - --hash=sha256:f040bcc6725c821a4c0665f3aa96a4d0805a7aaf2caf266d256b8ed71b9f041c \ - --hash=sha256:f0b059678fd549c66b89bed03efcabb009075bd131c248ecdf087bdb6faba24a \ - --hash=sha256:fcbb48a93e8699eae920f8d92f7160c03567b421bc17362a9ffbbd706a816f71 - # via aiohttp diff --git a/tools/docs/BUILD b/tools/docs/BUILD index 56892499d24d5..4f82feb9de76e 100644 --- a/tools/docs/BUILD +++ b/tools/docs/BUILD @@ -1,6 +1,6 @@ load("@rules_python//python:defs.bzl", "py_binary") load("//bazel:envoy_build_system.bzl", "envoy_package") -load("@docs_pip3//:requirements.bzl", "requirement") +load("@base_pip3//:requirements.bzl", "requirement") load("//tools/base:envoy_python.bzl", "envoy_py_binary") licenses(["notice"]) # Apache 2 @@ -38,40 +38,14 @@ envoy_py_binary( deps = [ "//tools/base:runner", "//tools/base:utils", - requirement("alabaster"), - requirement("Babel"), - requirement("certifi"), - requirement("chardet"), requirement("colorama"), - requirement("docutils"), - requirement("gitdb"), - requirement("GitPython"), - requirement("idna"), - requirement("imagesize"), - requirement("Jinja2"), - requirement("MarkupSafe"), - requirement("packaging"), - requirement("Pygments"), - requirement("pyparsing"), - requirement("pytz"), - requirement("requests"), - requirement("setuptools"), - requirement("six"), - requirement("smmap"), - requirement("snowballstemmer"), requirement("Sphinx"), requirement("sphinx-copybutton"), requirement("sphinx-rtd-theme"), requirement("sphinx-tabs"), - requirement("sphinxcontrib-applehelp"), - requirement("sphinxcontrib-devhelp"), - requirement("sphinxcontrib-htmlhelp"), requirement("sphinxcontrib-httpdomain"), - requirement("sphinxcontrib-jsmath"), - requirement("sphinxcontrib-qthelp"), - requirement("sphinxext-rediraffe"), requirement("sphinxcontrib-serializinghtml"), - requirement("urllib3"), + requirement("sphinxext-rediraffe"), ], ) diff --git a/tools/docs/requirements.txt b/tools/docs/requirements.txt deleted file mode 100644 index 075bb65491822..0000000000000 --- a/tools/docs/requirements.txt +++ /dev/null @@ -1,239 +0,0 @@ -# -# This file is autogenerated by pip-compile -# To update, run: -# -# pip-compile --allow-unsafe --generate-hashes tools/docs/requirements.txt -# -alabaster==0.7.12 \ - --hash=sha256:446438bdcca0e05bd45ea2de1668c1d9b032e1a9154c2c259092d77031ddd359 \ - --hash=sha256:a661d72d58e6ea8a57f7a86e37d86716863ee5e92788398526d58b26a4e4dc02 - # via - # -r tools/docs/requirements.txt - # sphinx -babel==2.9.1 \ - --hash=sha256:ab49e12b91d937cd11f0b67cb259a57ab4ad2b59ac7a3b41d6c06c0ac5b0def9 \ - --hash=sha256:bc0c176f9f6a994582230df350aa6e05ba2ebe4b3ac317eab29d9be5d2768da0 - # via - # -r tools/docs/requirements.txt - # sphinx -certifi==2021.5.30 \ - --hash=sha256:2bbf76fd432960138b3ef6dda3dde0544f27cbf8546c458e60baf371917ba9ee \ - --hash=sha256:50b1e4f8446b06f41be7dd6338db18e0990601dce795c2b1686458aa7e8fa7d8 - # via - # -r tools/docs/requirements.txt - # requests -chardet==4.0.0 \ - --hash=sha256:0d6f53a15db4120f2b08c94f11e7d93d2c911ee118b6b30a04ec3ee8310179fa \ - --hash=sha256:f864054d66fd9118f2e67044ac8981a54775ec5b67aed0441892edb553d21da5 - # via -r tools/docs/requirements.txt -charset-normalizer==2.0.4 \ - --hash=sha256:0c8911edd15d19223366a194a513099a302055a962bca2cec0f54b8b63175d8b \ - --hash=sha256:f23667ebe1084be45f6ae0538e4a5a865206544097e4e8bbcacf42cd02a348f3 - # via requests -colorama==0.4.4 \ - --hash=sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b \ - --hash=sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2 - # via -r tools/docs/requirements.txt -docutils==0.16 \ - --hash=sha256:0c5b78adfbf7762415433f5515cd5c9e762339e23369dbe8000d84a4bf4ab3af \ - --hash=sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc - # via - # -r tools/docs/requirements.txt - # sphinx - # sphinx-rtd-theme - # sphinx-tabs -gitdb==4.0.7 \ - --hash=sha256:6c4cc71933456991da20917998acbe6cf4fb41eeaab7d6d67fbc05ecd4c865b0 \ - --hash=sha256:96bf5c08b157a666fec41129e6d327235284cca4c81e92109260f353ba138005 - # via - # -r tools/docs/requirements.txt - # gitpython -gitpython==3.1.18 \ - --hash=sha256:b838a895977b45ab6f0cc926a9045c8d1c44e2b653c1fcc39fe91f42c6e8f05b \ - --hash=sha256:fce760879cd2aebd2991b3542876dc5c4a909b30c9d69dfc488e504a8db37ee8 - # via -r tools/docs/requirements.txt -idna==2.10 \ - --hash=sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6 \ - --hash=sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0 - # via - # -r tools/docs/requirements.txt - # requests -imagesize==1.2.0 \ - --hash=sha256:6965f19a6a2039c7d48bca7dba2473069ff854c36ae6f19d2cde309d998228a1 \ - --hash=sha256:b1f6b5a4eab1f73479a50fb79fcf729514a900c341d8503d62a62dbc4127a2b1 - # via - # -r tools/docs/requirements.txt - # sphinx -jinja2==3.0.1 \ - --hash=sha256:1f06f2da51e7b56b8f238affdd6b4e2c61e39598a378cc49345bc1bd42a978a4 \ - --hash=sha256:703f484b47a6af502e743c9122595cc812b0271f661722403114f71a79d0f5a4 - # via - # -r tools/docs/requirements.txt - # sphinx -markupsafe==2.0.1 \ - --hash=sha256:01a9b8ea66f1658938f65b93a85ebe8bc016e6769611be228d797c9d998dd298 \ - --hash=sha256:023cb26ec21ece8dc3907c0e8320058b2e0cb3c55cf9564da612bc325bed5e64 \ - --hash=sha256:0446679737af14f45767963a1a9ef7620189912317d095f2d9ffa183a4d25d2b \ - --hash=sha256:0717a7390a68be14b8c793ba258e075c6f4ca819f15edfc2a3a027c823718567 \ - --hash=sha256:0955295dd5eec6cb6cc2fe1698f4c6d84af2e92de33fbcac4111913cd100a6ff \ - --hash=sha256:10f82115e21dc0dfec9ab5c0223652f7197feb168c940f3ef61563fc2d6beb74 \ - --hash=sha256:1d609f577dc6e1aa17d746f8bd3c31aa4d258f4070d61b2aa5c4166c1539de35 \ - --hash=sha256:2ef54abee730b502252bcdf31b10dacb0a416229b72c18b19e24a4509f273d26 \ - --hash=sha256:3c112550557578c26af18a1ccc9e090bfe03832ae994343cfdacd287db6a6ae7 \ - --hash=sha256:47ab1e7b91c098ab893b828deafa1203de86d0bc6ab587b160f78fe6c4011f75 \ - --hash=sha256:49e3ceeabbfb9d66c3aef5af3a60cc43b85c33df25ce03d0031a608b0a8b2e3f \ - --hash=sha256:4efca8f86c54b22348a5467704e3fec767b2db12fc39c6d963168ab1d3fc9135 \ - --hash=sha256:53edb4da6925ad13c07b6d26c2a852bd81e364f95301c66e930ab2aef5b5ddd8 \ - --hash=sha256:594c67807fb16238b30c44bdf74f36c02cdf22d1c8cda91ef8a0ed8dabf5620a \ - --hash=sha256:611d1ad9a4288cf3e3c16014564df047fe08410e628f89805e475368bd304914 \ - --hash=sha256:6557b31b5e2c9ddf0de32a691f2312a32f77cd7681d8af66c2692efdbef84c18 \ - --hash=sha256:693ce3f9e70a6cf7d2fb9e6c9d8b204b6b39897a2c4a1aa65728d5ac97dcc1d8 \ - --hash=sha256:6a7fae0dd14cf60ad5ff42baa2e95727c3d81ded453457771d02b7d2b3f9c0c2 \ - --hash=sha256:6c4ca60fa24e85fe25b912b01e62cb969d69a23a5d5867682dd3e80b5b02581d \ - --hash=sha256:7d91275b0245b1da4d4cfa07e0faedd5b0812efc15b702576d103293e252af1b \ - --hash=sha256:905fec760bd2fa1388bb5b489ee8ee5f7291d692638ea5f67982d968366bef9f \ - --hash=sha256:97383d78eb34da7e1fa37dd273c20ad4320929af65d156e35a5e2d89566d9dfb \ - --hash=sha256:984d76483eb32f1bcb536dc27e4ad56bba4baa70be32fa87152832cdd9db0833 \ - --hash=sha256:a30e67a65b53ea0a5e62fe23682cfe22712e01f453b95233b25502f7c61cb415 \ - --hash=sha256:ab3ef638ace319fa26553db0624c4699e31a28bb2a835c5faca8f8acf6a5a902 \ - --hash=sha256:b2f4bf27480f5e5e8ce285a8c8fd176c0b03e93dcc6646477d4630e83440c6a9 \ - --hash=sha256:b7f2d075102dc8c794cbde1947378051c4e5180d52d276987b8d28a3bd58c17d \ - --hash=sha256:be98f628055368795d818ebf93da628541e10b75b41c559fdf36d104c5787066 \ - --hash=sha256:d7f9850398e85aba693bb640262d3611788b1f29a79f0c93c565694658f4071f \ - --hash=sha256:f5653a225f31e113b152e56f154ccbe59eeb1c7487b39b9d9f9cdb58e6c79dc5 \ - --hash=sha256:f826e31d18b516f653fe296d967d700fddad5901ae07c622bb3705955e1faa94 \ - --hash=sha256:f8ba0e8349a38d3001fae7eadded3f6606f0da5d748ee53cc1dab1d6527b9509 \ - --hash=sha256:f9081981fe268bd86831e5c75f7de206ef275defcb82bc70740ae6dc507aee51 \ - --hash=sha256:fa130dd50c57d53368c9d59395cb5526eda596d3ffe36666cd81a44d56e48872 - # via - # -r tools/docs/requirements.txt - # jinja2 -packaging==21.0 \ - --hash=sha256:7dc96269f53a4ccec5c0670940a4281106dd0bb343f47b7471f779df49c2fbe7 \ - --hash=sha256:c86254f9220d55e31cc94d69bade760f0847da8000def4dfe1c6b872fd14ff14 - # via - # -r tools/docs/requirements.txt - # sphinx -pygments==2.10.0 \ - --hash=sha256:b8e67fe6af78f492b3c4b3e2970c0624cbf08beb1e493b2c99b9fa1b67a20380 \ - --hash=sha256:f398865f7eb6874156579fdf36bc840a03cab64d1cde9e93d68f46a425ec52c6 - # via - # -r tools/docs/requirements.txt - # sphinx - # sphinx-tabs -pyparsing==2.4.7 \ - --hash=sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1 \ - --hash=sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b - # via - # -r tools/docs/requirements.txt - # packaging -pytz==2021.1 \ - --hash=sha256:83a4a90894bf38e243cf052c8b58f381bfe9a7a483f6a9cab140bc7f702ac4da \ - --hash=sha256:eb10ce3e7736052ed3623d49975ce333bcd712c7bb19a58b9e2089d4057d0798 - # via - # -r tools/docs/requirements.txt - # babel -requests==2.26.0 \ - --hash=sha256:6c1246513ecd5ecd4528a0906f910e8f0f9c6b8ec72030dc9fd154dc1a6efd24 \ - --hash=sha256:b8aa58f8cf793ffd8782d3d8cb19e66ef36f7aba4353eec859e74678b01b07a7 - # via - # -r tools/docs/requirements.txt - # sphinx -six==1.16.0 \ - --hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \ - --hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254 - # via - # -r tools/docs/requirements.txt - # sphinxcontrib-httpdomain -smmap==4.0.0 \ - --hash=sha256:7e65386bd122d45405ddf795637b7f7d2b532e7e401d46bbe3fb49b9986d5182 \ - --hash=sha256:a9a7479e4c572e2e775c404dcd3080c8dc49f39918c2cf74913d30c4c478e3c2 - # via - # -r tools/docs/requirements.txt - # gitdb -snowballstemmer==2.1.0 \ - --hash=sha256:b51b447bea85f9968c13b650126a888aabd4cb4463fca868ec596826325dedc2 \ - --hash=sha256:e997baa4f2e9139951b6f4c631bad912dfd3c792467e2f03d7239464af90e914 - # via - # -r tools/docs/requirements.txt - # sphinx -sphinx-copybutton==0.4.0 \ - --hash=sha256:4340d33c169dac6dd82dce2c83333412aa786a42dd01a81a8decac3b130dc8b0 \ - --hash=sha256:8daed13a87afd5013c3a9af3575cc4d5bec052075ccd3db243f895c07a689386 - # via -r tools/docs/requirements.txt -sphinx-rtd-theme==0.5.2 \ - --hash=sha256:32bd3b5d13dc8186d7a42fc816a23d32e83a4827d7d9882948e7b837c232da5a \ - --hash=sha256:4a05bdbe8b1446d77a01e20a23ebc6777c74f43237035e76be89699308987d6f - # via -r tools/docs/requirements.txt -sphinx-tabs==3.2.0 \ - --hash=sha256:1e1b1846c80137bd81a78e4a69b02664b98b1e1da361beb30600b939dfc75065 \ - --hash=sha256:33137914ed9b276e6a686d7a337310ee77b1dae316fdcbce60476913a152e0a4 - # via -r tools/docs/requirements.txt -sphinx==4.1.1 \ - --hash=sha256:23c846a1841af998cb736218539bb86d16f5eb95f5760b1966abcd2d584e62b8 \ - --hash=sha256:3d513088236eef51e5b0adb78b0492eb22cc3b8ccdb0b36dd021173b365d4454 - # via - # -r tools/docs/requirements.txt - # sphinx-copybutton - # sphinx-rtd-theme - # sphinx-tabs - # sphinxcontrib-httpdomain - # sphinxext-rediraffe -sphinxcontrib-applehelp==1.0.2 \ - --hash=sha256:806111e5e962be97c29ec4c1e7fe277bfd19e9652fb1a4392105b43e01af885a \ - --hash=sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58 - # via - # -r tools/docs/requirements.txt - # sphinx -sphinxcontrib-devhelp==1.0.2 \ - --hash=sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e \ - --hash=sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4 - # via - # -r tools/docs/requirements.txt - # sphinx -sphinxcontrib-htmlhelp==2.0.0 \ - --hash=sha256:d412243dfb797ae3ec2b59eca0e52dac12e75a241bf0e4eb861e450d06c6ed07 \ - --hash=sha256:f5f8bb2d0d629f398bf47d0d69c07bc13b65f75a81ad9e2f71a63d4b7a2f6db2 - # via - # -r tools/docs/requirements.txt - # sphinx -sphinxcontrib-httpdomain==1.7.0 \ - --hash=sha256:1fb5375007d70bf180cdd1c79e741082be7aa2d37ba99efe561e1c2e3f38191e \ - --hash=sha256:ac40b4fba58c76b073b03931c7b8ead611066a6aebccafb34dc19694f4eb6335 - # via -r tools/docs/requirements.txt -sphinxcontrib-jsmath==1.0.1 \ - --hash=sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178 \ - --hash=sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8 - # via - # -r tools/docs/requirements.txt - # sphinx -sphinxcontrib-qthelp==1.0.3 \ - --hash=sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72 \ - --hash=sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6 - # via - # -r tools/docs/requirements.txt - # sphinx -sphinxcontrib-serializinghtml==1.1.5 \ - --hash=sha256:352a9a00ae864471d3a7ead8d7d79f5fc0b57e8b3f95e9867eb9eb28999b92fd \ - --hash=sha256:aa5f6de5dfdf809ef505c4895e51ef5c9eac17d0f287933eb49ec495280b6952 - # via - # -r tools/docs/requirements.txt - # sphinx -sphinxext-rediraffe==0.2.7 \ - --hash=sha256:651dcbfae5ffda9ffd534dfb8025f36120e5efb6ea1a33f5420023862b9f725d \ - --hash=sha256:9e430a52d4403847f4ffb3a8dd6dfc34a9fe43525305131f52ed899743a5fd8c - # via -r tools/docs/requirements.txt -urllib3==1.26.6 \ - --hash=sha256:39fb8672126159acb139a7718dd10806104dec1e2f0f6c88aab05d17df10c8d4 \ - --hash=sha256:f57b4c16c62fa2760b7e3d97c35b255512fb6b59a259730f36ba32ce9f8e342f - # via - # -r tools/docs/requirements.txt - # requests - -# The following packages are considered to be unsafe in a requirements file: -setuptools==57.0.0 \ - --hash=sha256:401cbf33a7bf817d08014d51560fc003b895c4cdc1a5b521ad2969e928a07535 \ - --hash=sha256:c8b9f1a457949002e358fea7d3f2a1e1b94ddc0354b2e40afc066bf95d21bf7b - # via - # -r tools/docs/requirements.txt - # sphinx diff --git a/tools/git/BUILD b/tools/git/BUILD index d7baad83ad3f3..46f740a6b6641 100644 --- a/tools/git/BUILD +++ b/tools/git/BUILD @@ -1,4 +1,4 @@ -load("@git_pip3//:requirements.bzl", "requirement") +load("@base_pip3//:requirements.bzl", "requirement") load("//bazel:envoy_build_system.bzl", "envoy_package") load("//tools/base:envoy_python.bzl", "envoy_py_library") diff --git a/tools/git/requirements.txt b/tools/git/requirements.txt deleted file mode 100644 index 886a35eec12ab..0000000000000 --- a/tools/git/requirements.txt +++ /dev/null @@ -1,18 +0,0 @@ -# -# This file is autogenerated by pip-compile -# To update, run: -# -# pip-compile --generate-hashes tools/git/requirements.txt -# -gitdb==4.0.7 \ - --hash=sha256:6c4cc71933456991da20917998acbe6cf4fb41eeaab7d6d67fbc05ecd4c865b0 \ - --hash=sha256:96bf5c08b157a666fec41129e6d327235284cca4c81e92109260f353ba138005 - # via gitpython -gitpython==3.1.18 \ - --hash=sha256:b838a895977b45ab6f0cc926a9045c8d1c44e2b653c1fcc39fe91f42c6e8f05b \ - --hash=sha256:fce760879cd2aebd2991b3542876dc5c4a909b30c9d69dfc488e504a8db37ee8 - # via -r tools/git/requirements.txt -smmap==4.0.0 \ - --hash=sha256:7e65386bd122d45405ddf795637b7f7d2b532e7e401d46bbe3fb49b9986d5182 \ - --hash=sha256:a9a7479e4c572e2e775c404dcd3080c8dc49f39918c2cf74913d30c4c478e3c2 - # via gitdb diff --git a/tools/github/BUILD b/tools/github/BUILD index 779d1695d3b7c..ae7eae1cf310d 100644 --- a/tools/github/BUILD +++ b/tools/github/BUILD @@ -1 +1,10 @@ +load("@rules_python//python:defs.bzl", "py_binary") +load("@base_pip3//:requirements.bzl", "requirement") + licenses(["notice"]) # Apache 2 + +py_binary( + name = "sync_assignable", + srcs = ["sync_assignable.py"], + deps = [requirement("pygithub")], +) diff --git a/tools/github/sync_assignable.sh b/tools/github/sync_assignable.sh deleted file mode 100755 index ac11d9ccc3c86..0000000000000 --- a/tools/github/sync_assignable.sh +++ /dev/null @@ -1,7 +0,0 @@ -#!/bin/bash - -. tools/shell_utils.sh - -set -e - -python_venv sync_assignable diff --git a/tools/protodoc/BUILD b/tools/protodoc/BUILD index 0636bd08fbc75..4eb0e6f5cf9ec 100644 --- a/tools/protodoc/BUILD +++ b/tools/protodoc/BUILD @@ -1,5 +1,5 @@ load("@rules_python//python:defs.bzl", "py_binary") -load("@protodoc_pip3//:requirements.bzl", "requirement") +load("@base_pip3//:requirements.bzl", "requirement") load("//bazel:envoy_build_system.bzl", "envoy_package", "envoy_proto_library") load("//tools/protodoc:protodoc.bzl", "protodoc_rule") diff --git a/tools/protodoc/requirements.txt b/tools/protodoc/requirements.txt deleted file mode 100644 index 1cd69909b9962..0000000000000 --- a/tools/protodoc/requirements.txt +++ /dev/null @@ -1,38 +0,0 @@ -Jinja2==3.0.1 \ - --hash=sha256:1f06f2da51e7b56b8f238affdd6b4e2c61e39598a378cc49345bc1bd42a978a4 \ - --hash=sha256:703f484b47a6af502e743c9122595cc812b0271f661722403114f71a79d0f5a4 -MarkupSafe==2.0.1 \ - --hash=sha256:f9081981fe268bd86831e5c75f7de206ef275defcb82bc70740ae6dc507aee51 \ - --hash=sha256:0955295dd5eec6cb6cc2fe1698f4c6d84af2e92de33fbcac4111913cd100a6ff \ - --hash=sha256:0446679737af14f45767963a1a9ef7620189912317d095f2d9ffa183a4d25d2b \ - --hash=sha256:f826e31d18b516f653fe296d967d700fddad5901ae07c622bb3705955e1faa94 \ - --hash=sha256:fa130dd50c57d53368c9d59395cb5526eda596d3ffe36666cd81a44d56e48872 \ - --hash=sha256:905fec760bd2fa1388bb5b489ee8ee5f7291d692638ea5f67982d968366bef9f \ - --hash=sha256:6c4ca60fa24e85fe25b912b01e62cb969d69a23a5d5867682dd3e80b5b02581d \ - --hash=sha256:b2f4bf27480f5e5e8ce285a8c8fd176c0b03e93dcc6646477d4630e83440c6a9 \ - --hash=sha256:0717a7390a68be14b8c793ba258e075c6f4ca819f15edfc2a3a027c823718567 \ - --hash=sha256:6557b31b5e2c9ddf0de32a691f2312a32f77cd7681d8af66c2692efdbef84c18 \ - --hash=sha256:49e3ceeabbfb9d66c3aef5af3a60cc43b85c33df25ce03d0031a608b0a8b2e3f \ - --hash=sha256:d7f9850398e85aba693bb640262d3611788b1f29a79f0c93c565694658f4071f \ - --hash=sha256:6a7fae0dd14cf60ad5ff42baa2e95727c3d81ded453457771d02b7d2b3f9c0c2 \ - --hash=sha256:b7f2d075102dc8c794cbde1947378051c4e5180d52d276987b8d28a3bd58c17d \ - --hash=sha256:a30e67a65b53ea0a5e62fe23682cfe22712e01f453b95233b25502f7c61cb415 \ - --hash=sha256:611d1ad9a4288cf3e3c16014564df047fe08410e628f89805e475368bd304914 \ - --hash=sha256:be98f628055368795d818ebf93da628541e10b75b41c559fdf36d104c5787066 \ - --hash=sha256:1d609f577dc6e1aa17d746f8bd3c31aa4d258f4070d61b2aa5c4166c1539de35 \ - --hash=sha256:7d91275b0245b1da4d4cfa07e0faedd5b0812efc15b702576d103293e252af1b \ - --hash=sha256:01a9b8ea66f1658938f65b93a85ebe8bc016e6769611be228d797c9d998dd298 \ - --hash=sha256:47ab1e7b91c098ab893b828deafa1203de86d0bc6ab587b160f78fe6c4011f75 \ - --hash=sha256:97383d78eb34da7e1fa37dd273c20ad4320929af65d156e35a5e2d89566d9dfb \ - --hash=sha256:023cb26ec21ece8dc3907c0e8320058b2e0cb3c55cf9564da612bc325bed5e64 \ - --hash=sha256:984d76483eb32f1bcb536dc27e4ad56bba4baa70be32fa87152832cdd9db0833 \ - --hash=sha256:2ef54abee730b502252bcdf31b10dacb0a416229b72c18b19e24a4509f273d26 \ - --hash=sha256:3c112550557578c26af18a1ccc9e090bfe03832ae994343cfdacd287db6a6ae7 \ - --hash=sha256:53edb4da6925ad13c07b6d26c2a852bd81e364f95301c66e930ab2aef5b5ddd8 \ - --hash=sha256:f5653a225f31e113b152e56f154ccbe59eeb1c7487b39b9d9f9cdb58e6c79dc5 \ - --hash=sha256:4efca8f86c54b22348a5467704e3fec767b2db12fc39c6d963168ab1d3fc9135 \ - --hash=sha256:ab3ef638ace319fa26553db0624c4699e31a28bb2a835c5faca8f8acf6a5a902 \ - --hash=sha256:f8ba0e8349a38d3001fae7eadded3f6606f0da5d748ee53cc1dab1d6527b9509 \ - --hash=sha256:10f82115e21dc0dfec9ab5c0223652f7197feb168c940f3ef61563fc2d6beb74 \ - --hash=sha256:693ce3f9e70a6cf7d2fb9e6c9d8b204b6b39897a2c4a1aa65728d5ac97dcc1d8 \ - --hash=sha256:594c67807fb16238b30c44bdf74f36c02cdf22d1c8cda91ef8a0ed8dabf5620a diff --git a/tools/testing/BUILD b/tools/testing/BUILD index a740e89ad5e32..35b9cf843a286 100644 --- a/tools/testing/BUILD +++ b/tools/testing/BUILD @@ -1,5 +1,5 @@ load("@rules_python//python:defs.bzl", "py_library") -load("@testing_pip3//:requirements.bzl", "requirement") +load("@base_pip3//:requirements.bzl", "requirement") load("//bazel:envoy_build_system.bzl", "envoy_package") load("//tools/base:envoy_python.bzl", "envoy_py_binary") diff --git a/tools/testing/requirements.txt b/tools/testing/requirements.txt deleted file mode 100644 index 0c4dc20def2bb..0000000000000 --- a/tools/testing/requirements.txt +++ /dev/null @@ -1,124 +0,0 @@ -# -# This file is autogenerated by pip-compile -# To update, run: -# -# pip-compile --generate-hashes tools/testing/requirements.txt -# -attrs==21.2.0 \ - --hash=sha256:149e90d6d8ac20db7a955ad60cf0e6881a3f20d37096140088356da6c716b0b1 \ - --hash=sha256:ef6aaac3ca6cd92904cdd0d83f629a15f18053ec84e6432106f7a4d04ae4f5fb - # via - # -r tools/testing/requirements.txt - # pytest -coverage==5.5 \ - --hash=sha256:004d1880bed2d97151facef49f08e255a20ceb6f9432df75f4eef018fdd5a78c \ - --hash=sha256:01d84219b5cdbfc8122223b39a954820929497a1cb1422824bb86b07b74594b6 \ - --hash=sha256:040af6c32813fa3eae5305d53f18875bedd079960822ef8ec067a66dd8afcd45 \ - --hash=sha256:06191eb60f8d8a5bc046f3799f8a07a2d7aefb9504b0209aff0b47298333302a \ - --hash=sha256:13034c4409db851670bc9acd836243aeee299949bd5673e11844befcb0149f03 \ - --hash=sha256:13c4ee887eca0f4c5a247b75398d4114c37882658300e153113dafb1d76de529 \ - --hash=sha256:184a47bbe0aa6400ed2d41d8e9ed868b8205046518c52464fde713ea06e3a74a \ - --hash=sha256:18ba8bbede96a2c3dde7b868de9dcbd55670690af0988713f0603f037848418a \ - --hash=sha256:1aa846f56c3d49205c952d8318e76ccc2ae23303351d9270ab220004c580cfe2 \ - --hash=sha256:217658ec7187497e3f3ebd901afdca1af062b42cfe3e0dafea4cced3983739f6 \ - --hash=sha256:24d4a7de75446be83244eabbff746d66b9240ae020ced65d060815fac3423759 \ - --hash=sha256:2910f4d36a6a9b4214bb7038d537f015346f413a975d57ca6b43bf23d6563b53 \ - --hash=sha256:2949cad1c5208b8298d5686d5a85b66aae46d73eec2c3e08c817dd3513e5848a \ - --hash=sha256:2a3859cb82dcbda1cfd3e6f71c27081d18aa251d20a17d87d26d4cd216fb0af4 \ - --hash=sha256:2cafbbb3af0733db200c9b5f798d18953b1a304d3f86a938367de1567f4b5bff \ - --hash=sha256:2e0d881ad471768bf6e6c2bf905d183543f10098e3b3640fc029509530091502 \ - --hash=sha256:30c77c1dc9f253283e34c27935fded5015f7d1abe83bc7821680ac444eaf7793 \ - --hash=sha256:3487286bc29a5aa4b93a072e9592f22254291ce96a9fbc5251f566b6b7343cdb \ - --hash=sha256:372da284cfd642d8e08ef606917846fa2ee350f64994bebfbd3afb0040436905 \ - --hash=sha256:41179b8a845742d1eb60449bdb2992196e211341818565abded11cfa90efb821 \ - --hash=sha256:44d654437b8ddd9eee7d1eaee28b7219bec228520ff809af170488fd2fed3e2b \ - --hash=sha256:4a7697d8cb0f27399b0e393c0b90f0f1e40c82023ea4d45d22bce7032a5d7b81 \ - --hash=sha256:51cb9476a3987c8967ebab3f0fe144819781fca264f57f89760037a2ea191cb0 \ - --hash=sha256:52596d3d0e8bdf3af43db3e9ba8dcdaac724ba7b5ca3f6358529d56f7a166f8b \ - --hash=sha256:53194af30d5bad77fcba80e23a1441c71abfb3e01192034f8246e0d8f99528f3 \ - --hash=sha256:5fec2d43a2cc6965edc0bb9e83e1e4b557f76f843a77a2496cbe719583ce8184 \ - --hash=sha256:6c90e11318f0d3c436a42409f2749ee1a115cd8b067d7f14c148f1ce5574d701 \ - --hash=sha256:74d881fc777ebb11c63736622b60cb9e4aee5cace591ce274fb69e582a12a61a \ - --hash=sha256:7501140f755b725495941b43347ba8a2777407fc7f250d4f5a7d2a1050ba8e82 \ - --hash=sha256:796c9c3c79747146ebd278dbe1e5c5c05dd6b10cc3bcb8389dfdf844f3ead638 \ - --hash=sha256:869a64f53488f40fa5b5b9dcb9e9b2962a66a87dab37790f3fcfb5144b996ef5 \ - --hash=sha256:8963a499849a1fc54b35b1c9f162f4108017b2e6db2c46c1bed93a72262ed083 \ - --hash=sha256:8d0a0725ad7c1a0bcd8d1b437e191107d457e2ec1084b9f190630a4fb1af78e6 \ - --hash=sha256:900fbf7759501bc7807fd6638c947d7a831fc9fdf742dc10f02956ff7220fa90 \ - --hash=sha256:92b017ce34b68a7d67bd6d117e6d443a9bf63a2ecf8567bb3d8c6c7bc5014465 \ - --hash=sha256:970284a88b99673ccb2e4e334cfb38a10aab7cd44f7457564d11898a74b62d0a \ - --hash=sha256:972c85d205b51e30e59525694670de6a8a89691186012535f9d7dbaa230e42c3 \ - --hash=sha256:9a1ef3b66e38ef8618ce5fdc7bea3d9f45f3624e2a66295eea5e57966c85909e \ - --hash=sha256:af0e781009aaf59e25c5a678122391cb0f345ac0ec272c7961dc5455e1c40066 \ - --hash=sha256:b6d534e4b2ab35c9f93f46229363e17f63c53ad01330df9f2d6bd1187e5eaacf \ - --hash=sha256:b7895207b4c843c76a25ab8c1e866261bcfe27bfaa20c192de5190121770672b \ - --hash=sha256:c0891a6a97b09c1f3e073a890514d5012eb256845c451bd48f7968ef939bf4ae \ - --hash=sha256:c2723d347ab06e7ddad1a58b2a821218239249a9e4365eaff6649d31180c1669 \ - --hash=sha256:d1f8bf7b90ba55699b3a5e44930e93ff0189aa27186e96071fac7dd0d06a1873 \ - --hash=sha256:d1f9ce122f83b2305592c11d64f181b87153fc2c2bbd3bb4a3dde8303cfb1a6b \ - --hash=sha256:d314ed732c25d29775e84a960c3c60808b682c08d86602ec2c3008e1202e3bb6 \ - --hash=sha256:d636598c8305e1f90b439dbf4f66437de4a5e3c31fdf47ad29542478c8508bbb \ - --hash=sha256:deee1077aae10d8fa88cb02c845cfba9b62c55e1183f52f6ae6a2df6a2187160 \ - --hash=sha256:ebe78fe9a0e874362175b02371bdfbee64d8edc42a044253ddf4ee7d3c15212c \ - --hash=sha256:f030f8873312a16414c0d8e1a1ddff2d3235655a2174e3648b4fa66b3f2f1079 \ - --hash=sha256:f0b278ce10936db1a37e6954e15a3730bea96a0997c26d7fee88e6c396c2086d \ - --hash=sha256:f11642dddbb0253cc8853254301b51390ba0081750a8ac03f20ea8103f0c56b6 - # via - # -r tools/testing/requirements.txt - # pytest-cov -iniconfig==1.1.1 \ - --hash=sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3 \ - --hash=sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32 - # via - # -r tools/testing/requirements.txt - # pytest -packaging==21.0 \ - --hash=sha256:7dc96269f53a4ccec5c0670940a4281106dd0bb343f47b7471f779df49c2fbe7 \ - --hash=sha256:c86254f9220d55e31cc94d69bade760f0847da8000def4dfe1c6b872fd14ff14 - # via - # -r tools/testing/requirements.txt - # pytest -pluggy==1.0.0 \ - --hash=sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3 \ - --hash=sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159 - # via - # -r tools/testing/requirements.txt - # pytest -py==1.10.0 \ - --hash=sha256:21b81bda15b66ef5e1a777a21c4dcd9c20ad3efd0b3f817e7a809035269e1bd3 \ - --hash=sha256:3b80836aa6d1feeaa108e046da6423ab8f6ceda6468545ae8d02d9d58d18818a - # via - # -r tools/testing/requirements.txt - # pytest -pyparsing==2.4.7 \ - --hash=sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1 \ - --hash=sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b - # via - # -r tools/testing/requirements.txt - # packaging -pytest-asyncio==0.15.1 \ - --hash=sha256:2564ceb9612bbd560d19ca4b41347b54e7835c2f792c504f698e05395ed63f6f \ - --hash=sha256:3042bcdf1c5d978f6b74d96a151c4cfb9dcece65006198389ccd7e6c60eb1eea - # via -r tools/testing/requirements.txt -pytest-cov==2.12.1 \ - --hash=sha256:261bb9e47e65bd099c89c3edf92972865210c36813f80ede5277dceb77a4a62a \ - --hash=sha256:261ceeb8c227b726249b376b8526b600f38667ee314f910353fa318caa01f4d7 - # via -r tools/testing/requirements.txt -pytest-patches==0.0.3 \ - --hash=sha256:6f8cdc8641c708c4812f58ae48d410f373a6fd16cd6cc4dc4d3fb8951df9c92a - # via -r tools/testing/requirements.txt -pytest==6.2.5 \ - --hash=sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134 \ - --hash=sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89 - # via - # -r tools/testing/requirements.txt - # pytest-asyncio - # pytest-cov - # pytest-patches -toml==0.10.2 \ - --hash=sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b \ - --hash=sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f - # via - # -r tools/testing/requirements.txt - # pytest - # pytest-cov