diff --git a/CODEOWNERS b/CODEOWNERS index af0e28fdc3cf3..de1f3f5afe3ee 100644 --- a/CODEOWNERS +++ b/CODEOWNERS @@ -18,6 +18,8 @@ extensions/filters/common/original_src @snowp @klarose /*/extensions/filters/network/dubbo_proxy @zyfjeff @lizan # thrift_proxy extension /*/extensions/filters/network/thrift_proxy @zuercher @brian-pane +# compressor used by http compression filters +/*/extensions/filters/http/common/compressor @gsagula @rojkov @dio # jwt_authn http filter extension /*/extensions/filters/http/jwt_authn @qiwzhang @lizan # grpc_http1_reverse_bridge http filter extension diff --git a/api/BUILD b/api/BUILD index 17520935be06a..9edcd4e60b270 100644 --- a/api/BUILD +++ b/api/BUILD @@ -31,6 +31,7 @@ proto_library( "//envoy/config/filter/http/aws_request_signing/v2alpha:pkg", "//envoy/config/filter/http/buffer/v2:pkg", "//envoy/config/filter/http/cache/v2alpha:pkg", + "//envoy/config/filter/http/compressor/v2:pkg", "//envoy/config/filter/http/cors/v2:pkg", "//envoy/config/filter/http/csrf/v2:pkg", "//envoy/config/filter/http/dynamic_forward_proxy/v2alpha:pkg", @@ -161,6 +162,7 @@ proto_library( "//envoy/extensions/filters/http/aws_request_signing/v3:pkg", "//envoy/extensions/filters/http/buffer/v3:pkg", "//envoy/extensions/filters/http/cache/v3alpha:pkg", + "//envoy/extensions/filters/http/compressor/v3:pkg", "//envoy/extensions/filters/http/cors/v3:pkg", "//envoy/extensions/filters/http/csrf/v3:pkg", "//envoy/extensions/filters/http/dynamic_forward_proxy/v3:pkg", diff --git a/api/docs/BUILD b/api/docs/BUILD index da6e0524aad9f..b929f06b209fd 100644 --- a/api/docs/BUILD +++ b/api/docs/BUILD @@ -37,6 +37,7 @@ proto_library( "//envoy/config/filter/http/aws_request_signing/v2alpha:pkg", "//envoy/config/filter/http/buffer/v2:pkg", "//envoy/config/filter/http/cache/v2alpha:pkg", + "//envoy/config/filter/http/compressor/v2:pkg", "//envoy/config/filter/http/cors/v2:pkg", "//envoy/config/filter/http/csrf/v2:pkg", "//envoy/config/filter/http/dynamic_forward_proxy/v2alpha:pkg", diff --git a/api/envoy/config/filter/http/compressor/v2/BUILD b/api/envoy/config/filter/http/compressor/v2/BUILD new file mode 100644 index 0000000000000..69168ad0cf246 --- /dev/null +++ b/api/envoy/config/filter/http/compressor/v2/BUILD @@ -0,0 +1,12 @@ +# DO NOT EDIT. This file is generated by tools/proto_sync.py. + +load("@envoy_api//bazel:api_build_system.bzl", "api_proto_package") + +licenses(["notice"]) # Apache 2 + +api_proto_package( + deps = [ + "//envoy/api/v2/core:pkg", + "@com_github_cncf_udpa//udpa/annotations:pkg", + ], +) diff --git a/api/envoy/config/filter/http/compressor/v2/compressor.proto b/api/envoy/config/filter/http/compressor/v2/compressor.proto new file mode 100644 index 0000000000000..54814f9820737 --- /dev/null +++ b/api/envoy/config/filter/http/compressor/v2/compressor.proto @@ -0,0 +1,46 @@ +syntax = "proto3"; + +package envoy.config.filter.http.compressor.v2; + +import "envoy/api/v2/core/base.proto"; + +import "google/protobuf/wrappers.proto"; + +import "udpa/annotations/migrate.proto"; + +option java_package = "io.envoyproxy.envoy.config.filter.http.compressor.v2"; +option java_outer_classname = "CompressorProto"; +option java_multiple_files = true; +option (udpa.annotations.file_migrate).move_to_package = + "envoy.extensions.filters.http.compressor.v3"; + +// [#protodoc-title: Compressor] + +// [#next-free-field: 6] +message Compressor { + // Minimum response length, in bytes, which will trigger compression. The default value is 30. + google.protobuf.UInt32Value content_length = 1; + + // Set of strings that allows specifying which mime-types yield compression; e.g., + // application/json, text/html, etc. When this field is not defined, compression will be applied + // to the following mime-types: "application/javascript", "application/json", + // "application/xhtml+xml", "image/svg+xml", "text/css", "text/html", "text/plain", "text/xml" + // and their synonyms. + repeated string content_type = 2; + + // If true, disables compression when the response contains an etag header. When it is false, the + // filter will preserve weak etags and remove the ones that require strong validation. + bool disable_on_etag_header = 3; + + // If true, removes accept-encoding from the request headers before dispatching it to the upstream + // so that responses do not get compressed before reaching the filter. + // .. attention: + // + // To avoid interfering with other compression filters in the same chain use this option in + // the filter closest to the upstream. + bool remove_accept_encoding_header = 4; + + // Runtime flag that controls whether the filter is enabled or not. If set to false, the + // filter will operate as a pass-through filter. If not specified, defaults to enabled. + api.v2.core.RuntimeFeatureFlag runtime_enabled = 5; +} diff --git a/api/envoy/extensions/filters/http/compressor/v3/BUILD b/api/envoy/extensions/filters/http/compressor/v3/BUILD new file mode 100644 index 0000000000000..8dc07faa8f9f9 --- /dev/null +++ b/api/envoy/extensions/filters/http/compressor/v3/BUILD @@ -0,0 +1,13 @@ +# DO NOT EDIT. This file is generated by tools/proto_sync.py. + +load("@envoy_api//bazel:api_build_system.bzl", "api_proto_package") + +licenses(["notice"]) # Apache 2 + +api_proto_package( + deps = [ + "//envoy/config/core/v3:pkg", + "//envoy/config/filter/http/compressor/v2:pkg", + "@com_github_cncf_udpa//udpa/annotations:pkg", + ], +) diff --git a/api/envoy/extensions/filters/http/compressor/v3/compressor.proto b/api/envoy/extensions/filters/http/compressor/v3/compressor.proto new file mode 100644 index 0000000000000..7d506a2152afa --- /dev/null +++ b/api/envoy/extensions/filters/http/compressor/v3/compressor.proto @@ -0,0 +1,47 @@ +syntax = "proto3"; + +package envoy.extensions.filters.http.compressor.v3; + +import "envoy/config/core/v3/base.proto"; + +import "google/protobuf/wrappers.proto"; + +import "udpa/annotations/versioning.proto"; + +option java_package = "io.envoyproxy.envoy.extensions.filters.http.compressor.v3"; +option java_outer_classname = "CompressorProto"; +option java_multiple_files = true; + +// [#protodoc-title: Compressor] + +// [#next-free-field: 6] +message Compressor { + option (udpa.annotations.versioning).previous_message_type = + "envoy.config.filter.http.compressor.v2.Compressor"; + + // Minimum response length, in bytes, which will trigger compression. The default value is 30. + google.protobuf.UInt32Value content_length = 1; + + // Set of strings that allows specifying which mime-types yield compression; e.g., + // application/json, text/html, etc. When this field is not defined, compression will be applied + // to the following mime-types: "application/javascript", "application/json", + // "application/xhtml+xml", "image/svg+xml", "text/css", "text/html", "text/plain", "text/xml" + // and their synonyms. + repeated string content_type = 2; + + // If true, disables compression when the response contains an etag header. When it is false, the + // filter will preserve weak etags and remove the ones that require strong validation. + bool disable_on_etag_header = 3; + + // If true, removes accept-encoding from the request headers before dispatching it to the upstream + // so that responses do not get compressed before reaching the filter. + // .. attention: + // + // To avoid interfering with other compression filters in the same chain use this option in + // the filter closest to the upstream. + bool remove_accept_encoding_header = 4; + + // Runtime flag that controls whether the filter is enabled or not. If set to false, the + // filter will operate as a pass-through filter. If not specified, defaults to enabled. + config.core.v3.RuntimeFeatureFlag runtime_enabled = 5; +} diff --git a/generated_api_shadow/BUILD b/generated_api_shadow/BUILD index 1277007a662e7..a028250022dd9 100644 --- a/generated_api_shadow/BUILD +++ b/generated_api_shadow/BUILD @@ -35,6 +35,7 @@ proto_library( "//envoy/config/filter/fault/v2:pkg", "//envoy/config/filter/http/adaptive_concurrency/v2alpha:pkg", "//envoy/config/filter/http/buffer/v2:pkg", + "//envoy/config/filter/http/compressor/v2:pkg", "//envoy/config/filter/http/cors/v2:pkg", "//envoy/config/filter/http/csrf/v2:pkg", "//envoy/config/filter/http/dynamic_forward_proxy/v2alpha:pkg", @@ -129,6 +130,7 @@ proto_library( "//envoy/extensions/filters/common/fault/v3:pkg", "//envoy/extensions/filters/http/adaptive_concurrency/v3:pkg", "//envoy/extensions/filters/http/buffer/v3:pkg", + "//envoy/extensions/filters/http/compressor/v3:pkg", "//envoy/extensions/filters/http/cors/v3:pkg", "//envoy/extensions/filters/http/csrf/v3:pkg", "//envoy/extensions/filters/http/dynamic_forward_proxy/v3:pkg", diff --git a/generated_api_shadow/envoy/config/filter/http/compressor/v2/BUILD b/generated_api_shadow/envoy/config/filter/http/compressor/v2/BUILD new file mode 100644 index 0000000000000..69168ad0cf246 --- /dev/null +++ b/generated_api_shadow/envoy/config/filter/http/compressor/v2/BUILD @@ -0,0 +1,12 @@ +# DO NOT EDIT. This file is generated by tools/proto_sync.py. + +load("@envoy_api//bazel:api_build_system.bzl", "api_proto_package") + +licenses(["notice"]) # Apache 2 + +api_proto_package( + deps = [ + "//envoy/api/v2/core:pkg", + "@com_github_cncf_udpa//udpa/annotations:pkg", + ], +) diff --git a/generated_api_shadow/envoy/config/filter/http/compressor/v2/compressor.proto b/generated_api_shadow/envoy/config/filter/http/compressor/v2/compressor.proto new file mode 100644 index 0000000000000..54814f9820737 --- /dev/null +++ b/generated_api_shadow/envoy/config/filter/http/compressor/v2/compressor.proto @@ -0,0 +1,46 @@ +syntax = "proto3"; + +package envoy.config.filter.http.compressor.v2; + +import "envoy/api/v2/core/base.proto"; + +import "google/protobuf/wrappers.proto"; + +import "udpa/annotations/migrate.proto"; + +option java_package = "io.envoyproxy.envoy.config.filter.http.compressor.v2"; +option java_outer_classname = "CompressorProto"; +option java_multiple_files = true; +option (udpa.annotations.file_migrate).move_to_package = + "envoy.extensions.filters.http.compressor.v3"; + +// [#protodoc-title: Compressor] + +// [#next-free-field: 6] +message Compressor { + // Minimum response length, in bytes, which will trigger compression. The default value is 30. + google.protobuf.UInt32Value content_length = 1; + + // Set of strings that allows specifying which mime-types yield compression; e.g., + // application/json, text/html, etc. When this field is not defined, compression will be applied + // to the following mime-types: "application/javascript", "application/json", + // "application/xhtml+xml", "image/svg+xml", "text/css", "text/html", "text/plain", "text/xml" + // and their synonyms. + repeated string content_type = 2; + + // If true, disables compression when the response contains an etag header. When it is false, the + // filter will preserve weak etags and remove the ones that require strong validation. + bool disable_on_etag_header = 3; + + // If true, removes accept-encoding from the request headers before dispatching it to the upstream + // so that responses do not get compressed before reaching the filter. + // .. attention: + // + // To avoid interfering with other compression filters in the same chain use this option in + // the filter closest to the upstream. + bool remove_accept_encoding_header = 4; + + // Runtime flag that controls whether the filter is enabled or not. If set to false, the + // filter will operate as a pass-through filter. If not specified, defaults to enabled. + api.v2.core.RuntimeFeatureFlag runtime_enabled = 5; +} diff --git a/generated_api_shadow/envoy/extensions/filters/http/compressor/v3/BUILD b/generated_api_shadow/envoy/extensions/filters/http/compressor/v3/BUILD new file mode 100644 index 0000000000000..8dc07faa8f9f9 --- /dev/null +++ b/generated_api_shadow/envoy/extensions/filters/http/compressor/v3/BUILD @@ -0,0 +1,13 @@ +# DO NOT EDIT. This file is generated by tools/proto_sync.py. + +load("@envoy_api//bazel:api_build_system.bzl", "api_proto_package") + +licenses(["notice"]) # Apache 2 + +api_proto_package( + deps = [ + "//envoy/config/core/v3:pkg", + "//envoy/config/filter/http/compressor/v2:pkg", + "@com_github_cncf_udpa//udpa/annotations:pkg", + ], +) diff --git a/generated_api_shadow/envoy/extensions/filters/http/compressor/v3/compressor.proto b/generated_api_shadow/envoy/extensions/filters/http/compressor/v3/compressor.proto new file mode 100644 index 0000000000000..7d506a2152afa --- /dev/null +++ b/generated_api_shadow/envoy/extensions/filters/http/compressor/v3/compressor.proto @@ -0,0 +1,47 @@ +syntax = "proto3"; + +package envoy.extensions.filters.http.compressor.v3; + +import "envoy/config/core/v3/base.proto"; + +import "google/protobuf/wrappers.proto"; + +import "udpa/annotations/versioning.proto"; + +option java_package = "io.envoyproxy.envoy.extensions.filters.http.compressor.v3"; +option java_outer_classname = "CompressorProto"; +option java_multiple_files = true; + +// [#protodoc-title: Compressor] + +// [#next-free-field: 6] +message Compressor { + option (udpa.annotations.versioning).previous_message_type = + "envoy.config.filter.http.compressor.v2.Compressor"; + + // Minimum response length, in bytes, which will trigger compression. The default value is 30. + google.protobuf.UInt32Value content_length = 1; + + // Set of strings that allows specifying which mime-types yield compression; e.g., + // application/json, text/html, etc. When this field is not defined, compression will be applied + // to the following mime-types: "application/javascript", "application/json", + // "application/xhtml+xml", "image/svg+xml", "text/css", "text/html", "text/plain", "text/xml" + // and their synonyms. + repeated string content_type = 2; + + // If true, disables compression when the response contains an etag header. When it is false, the + // filter will preserve weak etags and remove the ones that require strong validation. + bool disable_on_etag_header = 3; + + // If true, removes accept-encoding from the request headers before dispatching it to the upstream + // so that responses do not get compressed before reaching the filter. + // .. attention: + // + // To avoid interfering with other compression filters in the same chain use this option in + // the filter closest to the upstream. + bool remove_accept_encoding_header = 4; + + // Runtime flag that controls whether the filter is enabled or not. If set to false, the + // filter will operate as a pass-through filter. If not specified, defaults to enabled. + config.core.v3.RuntimeFeatureFlag runtime_enabled = 5; +} diff --git a/source/extensions/filters/http/common/compressor/BUILD b/source/extensions/filters/http/common/compressor/BUILD new file mode 100644 index 0000000000000..55e6a87aa2c72 --- /dev/null +++ b/source/extensions/filters/http/common/compressor/BUILD @@ -0,0 +1,26 @@ +licenses(["notice"]) # Apache 2 + +load( + "//bazel:envoy_build_system.bzl", + "envoy_cc_library", + "envoy_package", +) + +envoy_package() + +envoy_cc_library( + name = "compressor_lib", + srcs = ["compressor.cc"], + hdrs = ["compressor.h"], + deps = [ + "//include/envoy/compressor:compressor_interface", + "//include/envoy/stats:stats_macros", + "//include/envoy/stream_info:filter_state_interface", + "//source/common/buffer:buffer_lib", + "//source/common/http:header_map_lib", + "//source/common/protobuf", + "//source/common/runtime:runtime_lib", + "//source/extensions/filters/http/common:pass_through_filter_lib", + "@envoy_api//envoy/extensions/filters/http/compressor/v3:pkg_cc_proto", + ], +) diff --git a/source/extensions/filters/http/common/compressor/compressor.cc b/source/extensions/filters/http/common/compressor/compressor.cc new file mode 100644 index 0000000000000..4f3135e6798a7 --- /dev/null +++ b/source/extensions/filters/http/common/compressor/compressor.cc @@ -0,0 +1,436 @@ +#include "extensions/filters/http/common/compressor/compressor.h" + +#include "common/buffer/buffer_impl.h" +#include "common/http/header_map_impl.h" + +namespace Envoy { +namespace Extensions { +namespace HttpFilters { +namespace Common { +namespace Compressors { + +namespace { + +// Default minimum length of an upstream response that allows compression. +const uint64_t DefaultMinimumContentLength = 30; + +// Default content types will be used if any is provided by the user. +const std::vector& defaultContentEncoding() { + CONSTRUCT_ON_FIRST_USE( + std::vector, + {"text/html", "text/plain", "text/css", "application/javascript", "application/x-javascript", + "text/javascript", "text/x-javascript", "text/ecmascript", "text/js", "text/jscript", + "text/x-js", "application/ecmascript", "application/x-json", "application/xml", + "application/json", "image/svg+xml", "text/xml", "application/xhtml+xml"}); +} + +// List of CompressorFilterConfig objects registered for a stream. +struct CompressorRegistry : public StreamInfo::FilterState::Object { + std::list filter_configs_; +}; + +// Key to per stream CompressorRegistry objects. +const std::string& compressorRegistryKey() { CONSTRUCT_ON_FIRST_USE(std::string, "compressors"); } + +} // namespace + +CompressorFilterConfig::CompressorFilterConfig( + const envoy::extensions::filters::http::compressor::v3::Compressor& compressor, + const std::string& stats_prefix, Stats::Scope& scope, Runtime::Loader& runtime, + const std::string& content_encoding) + : content_length_(contentLengthUint(compressor.content_length().value())), + content_type_values_(contentTypeSet(compressor.content_type())), + disable_on_etag_header_(compressor.disable_on_etag_header()), + remove_accept_encoding_header_(compressor.remove_accept_encoding_header()), + stats_(generateStats(stats_prefix, scope)), enabled_(compressor.runtime_enabled(), runtime), + content_encoding_(content_encoding) {} + +StringUtil::CaseUnorderedSet +CompressorFilterConfig::contentTypeSet(const Protobuf::RepeatedPtrField& types) { + const auto& default_content_encodings = defaultContentEncoding(); + return types.empty() ? StringUtil::CaseUnorderedSet(default_content_encodings.begin(), + default_content_encodings.end()) + : StringUtil::CaseUnorderedSet(types.cbegin(), types.cend()); +} + +uint32_t CompressorFilterConfig::contentLengthUint(Protobuf::uint32 length) { + return length > 0 ? length : DefaultMinimumContentLength; +} + +CompressorFilter::CompressorFilter(const CompressorFilterConfigSharedPtr config) + : skip_compression_{true}, config_(std::move(config)) {} + +Http::FilterHeadersStatus CompressorFilter::decodeHeaders(Http::RequestHeaderMap& headers, bool) { + const Http::HeaderEntry* accept_encoding = headers.AcceptEncoding(); + if (accept_encoding != nullptr) { + // Capture the value of the "Accept-Encoding" request header to use it later when making + // decision on compressing the corresponding HTTP response. + accept_encoding_ = std::make_unique(accept_encoding->value().getStringView()); + } + + if (config_->enabled()) { + skip_compression_ = false; + if (config_->removeAcceptEncodingHeader()) { + headers.removeAcceptEncoding(); + } + } else { + config_->stats().not_compressed_.inc(); + } + + return Http::FilterHeadersStatus::Continue; +} + +void CompressorFilter::setDecoderFilterCallbacks(Http::StreamDecoderFilterCallbacks& callbacks) { + decoder_callbacks_ = &callbacks; + + absl::string_view key = compressorRegistryKey(); + // To properly handle the cases where the decision on instantiating a compressor depends on + // the presence of other compression filters in the chain the filters need to be aware of each + // other. This is achieved by exploiting per-request data objects StreamInfo::FilterState: upon + // setting up a CompressorFilter, the new instance registers itself in the filter state. Then in + // the method isAcceptEncodingAllowed() the first filter is making a decision which encoder needs + // to be used for a request, with e.g. "Accept-Encoding: deflate;q=0.75, gzip;q=0.5", and caches + // it in the state. All other compression filters in the sequence use the cached decision. + const StreamInfo::FilterStateSharedPtr& filter_state = callbacks.streamInfo().filterState(); + if (filter_state->hasData(key)) { + CompressorRegistry& registry = filter_state->getDataMutable(key); + registry.filter_configs_.push_back(config_); + } else { + auto registry_ptr = std::make_unique(); + registry_ptr->filter_configs_.push_back(config_); + filter_state->setData(key, std::move(registry_ptr), + StreamInfo::FilterState::StateType::Mutable); + } +} + +Http::FilterHeadersStatus CompressorFilter::encodeHeaders(Http::ResponseHeaderMap& headers, + bool end_stream) { + if (!end_stream && !skip_compression_ && isMinimumContentLength(headers) && + isAcceptEncodingAllowed(headers) && isContentTypeAllowed(headers) && + !hasCacheControlNoTransform(headers) && isEtagAllowed(headers) && + isTransferEncodingAllowed(headers) && !headers.ContentEncoding()) { + sanitizeEtagHeader(headers); + insertVaryHeader(headers); + headers.removeContentLength(); + headers.setContentEncoding(config_->contentEncoding()); + config_->stats().compressed_.inc(); + // Finally instantiate the compressor. + compressor_ = config_->makeCompressor(); + } else if (!skip_compression_) { + skip_compression_ = true; + config_->stats().not_compressed_.inc(); + } + return Http::FilterHeadersStatus::Continue; +} + +Http::FilterDataStatus CompressorFilter::encodeData(Buffer::Instance& data, bool end_stream) { + if (!skip_compression_) { + config_->stats().total_uncompressed_bytes_.add(data.length()); + compressor_->compress(data, end_stream ? Compressor::State::Finish : Compressor::State::Flush); + config_->stats().total_compressed_bytes_.add(data.length()); + } + return Http::FilterDataStatus::Continue; +} + +Http::FilterTrailersStatus CompressorFilter::encodeTrailers(Http::ResponseTrailerMap&) { + if (!skip_compression_) { + Buffer::OwnedImpl empty_buffer; + compressor_->compress(empty_buffer, Compressor::State::Finish); + config_->stats().total_compressed_bytes_.add(empty_buffer.length()); + encoder_callbacks_->addEncodedData(empty_buffer, true); + } + return Http::FilterTrailersStatus::Continue; +} + +bool CompressorFilter::hasCacheControlNoTransform(Http::ResponseHeaderMap& headers) const { + const Http::HeaderEntry* cache_control = headers.CacheControl(); + if (cache_control) { + return StringUtil::caseFindToken(cache_control->value().getStringView(), ",", + Http::Headers::get().CacheControlValues.NoTransform); + } + + return false; +} + +// This function makes decision on which encoding to use for the response body and is +// supposed to be called only once per request even if there are multiple compressor +// filters in the chain. To make a decision the function needs to know what's the +// request's Accept-Encoding, the response's Content-Type and the list of compressor +// filters in the current chain. +// TODO(rojkov): add an explicit fuzzer for chooseEncoding(). +std::unique_ptr +CompressorFilter::chooseEncoding(const Http::ResponseHeaderMap& headers) const { + using EncPair = std::pair; // pair of {encoding, q_value} + std::vector pairs; + absl::string_view content_type_value; + + const Http::HeaderEntry* content_type = headers.ContentType(); + if (content_type != nullptr) { + content_type_value = + StringUtil::trim(StringUtil::cropRight(content_type->value().getStringView(), ";")); + } + + // Find all compressors enabled for the filter chain. + std::map allowed_compressors; + uint32_t registration_count{0}; + for (const auto& filter_config : + decoder_callbacks_->streamInfo() + .filterState() + ->getDataReadOnly(compressorRegistryKey()) + .filter_configs_) { + // A compressor filter may be limited to compress certain Content-Types. If the response's + // content type doesn't match the list of content types this filter is enabled for then + // it must be excluded from the decision process. + // For example, there are two compressor filters in the chain e.g. "gzip" and "deflate". + // "gzip" is configured to compress only "text/html" and "deflate" is configured to compress + // only "application/javascript". Then comes a request with Accept-Encoding header + // "gzip;q=1,deflate;q=.5". The corresponding response content type is "application/javascript". + // If "gzip" is not excluded from the decision process then it will take precedence over + // "deflate" and the resulting response won't be compressed at all. + if (!content_type_value.empty() && !filter_config->contentTypeValues().empty()) { + auto iter = filter_config->contentTypeValues().find(content_type_value); + if (iter == filter_config->contentTypeValues().end()) { + // Skip adding this filter to the list of allowed compressors. + continue; + } + } + + // There could be many compressors registered for the same content encoding, e.g. consider a + // case when there are two gzip filters using different compression levels for different content + // sizes. In such case we ignore duplicates (or different filters for the same encoding) + // registered last. + auto enc = allowed_compressors.find(filter_config->contentEncoding()); + if (enc == allowed_compressors.end()) { + allowed_compressors.insert({filter_config->contentEncoding(), registration_count}); + ++registration_count; + } + } + + // Find all encodings accepted by the user agent and adjust the list of allowed compressors. + for (const auto token : StringUtil::splitToken(*accept_encoding_, ",", false /* keep_empty */)) { + EncPair pair = std::make_pair(StringUtil::trim(StringUtil::cropRight(token, ";")), 1); + const auto params = StringUtil::cropLeft(token, ";"); + if (params != token) { + const auto q_value = StringUtil::cropLeft(params, "="); + if (q_value != params && + absl::EqualsIgnoreCase("q", StringUtil::trim(StringUtil::cropRight(params, "=")))) { + auto result = absl::SimpleAtof(StringUtil::trim(q_value), &pair.second); + if (!result) { + // Skip not parseable q-value. + continue; + } + } + } + + pairs.push_back(pair); + + if (!pair.second) { + // Disallow compressors with "q=0". + // The reason why we add encodings to "pairs" even with "q=0" is that "pairs" contains + // client's expectations and "allowed_compressors" is what Envoy can handle. Consider + // the cases of "Accept-Encoding: gzip;q=0, deflate, *" and "Accept-Encoding: deflate, *" + // whereas the proxy has only "gzip" configured. If we just exclude the encodings with "q=0" + // from "pairs" then upon noticing "*" we don't know if "gzip" is acceptable by the client. + allowed_compressors.erase(std::string(pair.first)); + } + } + + if (pairs.empty() || allowed_compressors.empty()) { + // If there's no intersection between accepted encodings and the ones provided by the allowed + // compressors, then only the "identity" encoding is acceptable. + return std::make_unique( + Http::Headers::get().AcceptEncodingValues.Identity, + CompressorFilter::EncodingDecision::HeaderStat::NotValid); + } + + // Find intersection of encodings accepted by the user agent and provided + // by the allowed compressors and choose the one with the highest q-value. + EncPair choice{Http::Headers::get().AcceptEncodingValues.Identity, 0}; + for (const auto pair : pairs) { + if ((pair.second > choice.second) && + (allowed_compressors.count(std::string(pair.first)) || + pair.first == Http::Headers::get().AcceptEncodingValues.Identity || + pair.first == Http::Headers::get().AcceptEncodingValues.Wildcard)) { + choice = pair; + } + } + + if (!choice.second) { + // The value of "Accept-Encoding" must be invalid as we ended up with zero q-value. + return std::make_unique( + Http::Headers::get().AcceptEncodingValues.Identity, + CompressorFilter::EncodingDecision::HeaderStat::NotValid); + } + + // The "identity" encoding (no compression) is always available. + if (choice.first == Http::Headers::get().AcceptEncodingValues.Identity) { + return std::make_unique( + Http::Headers::get().AcceptEncodingValues.Identity, + CompressorFilter::EncodingDecision::HeaderStat::Identity); + } + + // If wildcard is given then use which ever compressor is registered first. + if (choice.first == Http::Headers::get().AcceptEncodingValues.Wildcard) { + auto first_registered = std::min_element( + allowed_compressors.begin(), allowed_compressors.end(), + [](const std::pair& a, + const std::pair& b) -> bool { return a.second < b.second; }); + return std::make_unique( + first_registered->first, CompressorFilter::EncodingDecision::HeaderStat::Wildcard); + } + + return std::make_unique( + std::string(choice.first), CompressorFilter::EncodingDecision::HeaderStat::ValidCompressor); +} + +// Check if this filter was chosen to compress. Also update the filter's stat counters related to +// the Accept-Encoding header. +bool CompressorFilter::shouldCompress(const CompressorFilter::EncodingDecision& decision) const { + const bool should_compress = + absl::EqualsIgnoreCase(config_->contentEncoding(), decision.encoding()); + + switch (decision.stat()) { + case CompressorFilter::EncodingDecision::HeaderStat::ValidCompressor: + if (should_compress) { + config_->stats().header_compressor_used_.inc(); + // TODO(rojkov): Remove this increment when the gzip-specific stat is gone. + if (absl::EqualsIgnoreCase("gzip", config_->contentEncoding())) { + config_->stats().header_gzip_.inc(); + } + } else { + // Some other compressor filter in the same chain compressed the response body, + // but not this filter. + config_->stats().header_compressor_overshadowed_.inc(); + } + break; + case CompressorFilter::EncodingDecision::HeaderStat::Identity: + config_->stats().header_identity_.inc(); + break; + case CompressorFilter::EncodingDecision::HeaderStat::Wildcard: + config_->stats().header_wildcard_.inc(); + break; + default: + config_->stats().header_not_valid_.inc(); + break; + } + + return should_compress; +} + +bool CompressorFilter::isAcceptEncodingAllowed(const Http::ResponseHeaderMap& headers) const { + if (accept_encoding_ == nullptr) { + config_->stats().no_accept_header_.inc(); + return false; + } + + const absl::string_view encoding_decision_key{"encoding_decision"}; + + // Check if we have already cached our decision on encoding. + const StreamInfo::FilterStateSharedPtr& filter_state = + decoder_callbacks_->streamInfo().filterState(); + if (filter_state->hasData(encoding_decision_key)) { + const CompressorFilter::EncodingDecision& decision = + filter_state->getDataReadOnly(encoding_decision_key); + return shouldCompress(decision); + } + + // No cached decision found, so decide now. + std::unique_ptr decision = chooseEncoding(headers); + bool result = shouldCompress(*decision); + filter_state->setData(encoding_decision_key, std::move(decision), + StreamInfo::FilterState::StateType::ReadOnly); + return result; +} + +bool CompressorFilter::isContentTypeAllowed(Http::ResponseHeaderMap& headers) const { + const Http::HeaderEntry* content_type = headers.ContentType(); + if (content_type != nullptr && !config_->contentTypeValues().empty()) { + const absl::string_view value = + StringUtil::trim(StringUtil::cropRight(content_type->value().getStringView(), ";")); + return config_->contentTypeValues().find(value) != config_->contentTypeValues().end(); + } + + return true; +} + +bool CompressorFilter::isEtagAllowed(Http::ResponseHeaderMap& headers) const { + const bool is_etag_allowed = !(config_->disableOnEtagHeader() && headers.Etag()); + if (!is_etag_allowed) { + config_->stats().not_compressed_etag_.inc(); + } + return is_etag_allowed; +} + +bool CompressorFilter::isMinimumContentLength(Http::ResponseHeaderMap& headers) const { + const Http::HeaderEntry* content_length = headers.ContentLength(); + if (content_length != nullptr) { + uint64_t length; + const bool is_minimum_content_length = + absl::SimpleAtoi(content_length->value().getStringView(), &length) && + length >= config_->minimumLength(); + if (!is_minimum_content_length) { + config_->stats().content_length_too_small_.inc(); + } + return is_minimum_content_length; + } + + const Http::HeaderEntry* transfer_encoding = headers.TransferEncoding(); + return (transfer_encoding && + StringUtil::caseFindToken(transfer_encoding->value().getStringView(), ",", + Http::Headers::get().TransferEncodingValues.Chunked)); +} + +bool CompressorFilter::isTransferEncodingAllowed(Http::ResponseHeaderMap& headers) const { + const Http::HeaderEntry* transfer_encoding = headers.TransferEncoding(); + if (transfer_encoding != nullptr) { + for (absl::string_view header_value : + StringUtil::splitToken(transfer_encoding->value().getStringView(), ",", true)) { + const auto trimmed_value = StringUtil::trim(header_value); + if (absl::EqualsIgnoreCase(trimmed_value, config_->contentEncoding()) || + // or any other compression type known to Envoy + absl::EqualsIgnoreCase(trimmed_value, Http::Headers::get().TransferEncodingValues.Gzip) || + absl::EqualsIgnoreCase(trimmed_value, + Http::Headers::get().TransferEncodingValues.Deflate)) { + return false; + } + } + } + + return true; +} + +void CompressorFilter::insertVaryHeader(Http::ResponseHeaderMap& headers) { + const Http::HeaderEntry* vary = headers.Vary(); + if (vary != nullptr) { + if (!StringUtil::findToken(vary->value().getStringView(), ",", + Http::Headers::get().VaryValues.AcceptEncoding, true)) { + std::string new_header; + absl::StrAppend(&new_header, vary->value().getStringView(), ", ", + Http::Headers::get().VaryValues.AcceptEncoding); + headers.setVary(new_header); + } + } else { + headers.setReferenceVary(Http::Headers::get().VaryValues.AcceptEncoding); + } +} + +// TODO(gsagula): It seems that every proxy has a different opinion how to handle Etag. Some +// discussions around this topic have been going on for over a decade, e.g., +// https://bz.apache.org/bugzilla/show_bug.cgi?id=45023 +// This design attempts to stay more on the safe side by preserving weak etags and removing +// the strong ones when disable_on_etag_header is false. Envoy does NOT re-write entity tags. +void CompressorFilter::sanitizeEtagHeader(Http::ResponseHeaderMap& headers) { + const Http::HeaderEntry* etag = headers.Etag(); + if (etag != nullptr) { + absl::string_view value(etag->value().getStringView()); + if (value.length() > 2 && !((value[0] == 'w' || value[0] == 'W') && value[1] == '/')) { + headers.removeEtag(); + } + } +} + +} // namespace Compressors +} // namespace Common +} // namespace HttpFilters +} // namespace Extensions +} // namespace Envoy diff --git a/source/extensions/filters/http/common/compressor/compressor.h b/source/extensions/filters/http/common/compressor/compressor.h new file mode 100644 index 0000000000000..fc99ab517d0d8 --- /dev/null +++ b/source/extensions/filters/http/common/compressor/compressor.h @@ -0,0 +1,160 @@ +#pragma once + +#include "envoy/compressor/compressor.h" +#include "envoy/extensions/filters/http/compressor/v3/compressor.pb.h" +#include "envoy/stats/scope.h" +#include "envoy/stats/stats_macros.h" +#include "envoy/stream_info/filter_state.h" + +#include "common/protobuf/protobuf.h" +#include "common/runtime/runtime_protos.h" + +#include "extensions/filters/http/common/pass_through_filter.h" + +namespace Envoy { +namespace Extensions { +namespace HttpFilters { +namespace Common { +namespace Compressors { + +/** + * All compressor filter stats. @see stats_macros.h + * "total_uncompressed_bytes" only includes bytes from requests that were marked for compression. + * If the request was not marked for compression, the filter increments "not_compressed", but does + * not add to "total_uncompressed_bytes". This way, the user can measure the memory performance of + * the compression. + * + * "header_compressor_used" is a number of requests whose Accept-Encoding header explicitly stated + * that the response body should be compressed with the encoding provided by this filter instance. + * + * "header_compressor_overshadowed" is a number of requests skipped by this filter instance because + * they were handled by another filter in the same filter chain. + * + * "header_gzip" is specific to the gzip filter and is deprecated since it duplicates + * "header_compressor_used". + */ +#define ALL_COMPRESSOR_STATS(COUNTER) \ + COUNTER(compressed) \ + COUNTER(not_compressed) \ + COUNTER(no_accept_header) \ + COUNTER(header_identity) \ + COUNTER(header_gzip) \ + COUNTER(header_compressor_used) \ + COUNTER(header_compressor_overshadowed) \ + COUNTER(header_wildcard) \ + COUNTER(header_not_valid) \ + COUNTER(total_uncompressed_bytes) \ + COUNTER(total_compressed_bytes) \ + COUNTER(content_length_too_small) \ + COUNTER(not_compressed_etag) + +/** + * Struct definition for compressor stats. @see stats_macros.h + */ +struct CompressorStats { + ALL_COMPRESSOR_STATS(GENERATE_COUNTER_STRUCT) +}; + +class CompressorFilterConfig { +public: + CompressorFilterConfig() = delete; + virtual ~CompressorFilterConfig() = default; + + virtual std::unique_ptr makeCompressor() PURE; + + bool enabled() const { return enabled_.enabled(); } + const CompressorStats& stats() { return stats_; } + const StringUtil::CaseUnorderedSet& contentTypeValues() const { return content_type_values_; } + bool disableOnEtagHeader() const { return disable_on_etag_header_; } + bool removeAcceptEncodingHeader() const { return remove_accept_encoding_header_; } + uint32_t minimumLength() const { return content_length_; } + const std::string contentEncoding() const { return content_encoding_; }; + const std::map registeredCompressors() const; + +protected: + CompressorFilterConfig( + const envoy::extensions::filters::http::compressor::v3::Compressor& compressor, + const std::string& stats_prefix, Stats::Scope& scope, Runtime::Loader& runtime, + const std::string& content_encoding); + +private: + static StringUtil::CaseUnorderedSet + contentTypeSet(const Protobuf::RepeatedPtrField& types); + + static uint32_t contentLengthUint(Protobuf::uint32 length); + + static CompressorStats generateStats(const std::string& prefix, Stats::Scope& scope) { + return CompressorStats{ALL_COMPRESSOR_STATS(POOL_COUNTER_PREFIX(scope, prefix))}; + } + + const uint32_t content_length_; + const StringUtil::CaseUnorderedSet content_type_values_; + const bool disable_on_etag_header_; + const bool remove_accept_encoding_header_; + + const CompressorStats stats_; + Runtime::FeatureFlag enabled_; + const std::string content_encoding_; +}; +using CompressorFilterConfigSharedPtr = std::shared_ptr; + +/** + * A filter that compresses data dispatched from the upstream upon client request. + */ +class CompressorFilter : public Http::PassThroughFilter { +public: + explicit CompressorFilter(const CompressorFilterConfigSharedPtr config); + + // Http::StreamDecoderFilter + Http::FilterHeadersStatus decodeHeaders(Http::RequestHeaderMap& headers, + bool end_stream) override; + void setDecoderFilterCallbacks(Http::StreamDecoderFilterCallbacks& callbacks) override; + + // Http::StreamEncoderFilter + Http::FilterHeadersStatus encodeHeaders(Http::ResponseHeaderMap& headers, + bool end_stream) override; + Http::FilterDataStatus encodeData(Buffer::Instance& buffer, bool end_stream) override; + Http::FilterTrailersStatus encodeTrailers(Http::ResponseTrailerMap&) override; + +private: + // TODO(gsagula): This is here temporarily and just to facilitate testing. Ideally all + // the logic in these private member functions would be available in another class. + friend class CompressorFilterTest; + + bool hasCacheControlNoTransform(Http::ResponseHeaderMap& headers) const; + bool isAcceptEncodingAllowed(const Http::ResponseHeaderMap& headers) const; + bool isContentTypeAllowed(Http::ResponseHeaderMap& headers) const; + bool isEtagAllowed(Http::ResponseHeaderMap& headers) const; + bool isMinimumContentLength(Http::ResponseHeaderMap& headers) const; + bool isTransferEncodingAllowed(Http::ResponseHeaderMap& headers) const; + + void sanitizeEtagHeader(Http::ResponseHeaderMap& headers); + void insertVaryHeader(Http::ResponseHeaderMap& headers); + + class EncodingDecision : public StreamInfo::FilterState::Object { + public: + enum class HeaderStat { NotValid, Identity, Wildcard, ValidCompressor }; + EncodingDecision(const std::string& encoding, const HeaderStat stat) + : encoding_(encoding), stat_(stat) {} + const std::string& encoding() const { return encoding_; } + HeaderStat stat() const { return stat_; } + + private: + const std::string encoding_; + const HeaderStat stat_; + }; + + std::unique_ptr chooseEncoding(const Http::ResponseHeaderMap& headers) const; + bool shouldCompress(const EncodingDecision& decision) const; + + bool skip_compression_; + std::unique_ptr compressor_; + const CompressorFilterConfigSharedPtr config_; + std::unique_ptr accept_encoding_; +}; + +} // namespace Compressors +} // namespace Common +} // namespace HttpFilters +} // namespace Extensions +} // namespace Envoy diff --git a/test/extensions/filters/http/common/compressor/BUILD b/test/extensions/filters/http/common/compressor/BUILD new file mode 100644 index 0000000000000..b03a3cf39122b --- /dev/null +++ b/test/extensions/filters/http/common/compressor/BUILD @@ -0,0 +1,23 @@ +licenses(["notice"]) # Apache 2 + +load( + "//bazel:envoy_build_system.bzl", + "envoy_cc_test", + "envoy_package", +) + +envoy_package() + +envoy_cc_test( + name = "compressor_filter_test", + srcs = ["compressor_filter_test.cc"], + deps = [ + "//source/common/protobuf:utility_lib", + "//source/extensions/filters/http/common/compressor:compressor_lib", + "//test/mocks/http:http_mocks", + "//test/mocks/protobuf:protobuf_mocks", + "//test/mocks/runtime:runtime_mocks", + "//test/test_common:utility_lib", + "@envoy_api//envoy/extensions/filters/http/compressor/v3:pkg_cc_proto", + ], +) diff --git a/test/extensions/filters/http/common/compressor/compressor_filter_test.cc b/test/extensions/filters/http/common/compressor/compressor_filter_test.cc new file mode 100644 index 0000000000000..6f4379299330a --- /dev/null +++ b/test/extensions/filters/http/common/compressor/compressor_filter_test.cc @@ -0,0 +1,888 @@ +#include "envoy/extensions/filters/http/compressor/v3/compressor.pb.h" + +#include "common/protobuf/utility.h" + +#include "extensions/filters/http/common/compressor/compressor.h" + +#include "test/mocks/http/mocks.h" +#include "test/mocks/protobuf/mocks.h" +#include "test/mocks/runtime/mocks.h" +#include "test/mocks/stats/mocks.h" +#include "test/test_common/utility.h" + +#include "gtest/gtest.h" + +using testing::Return; + +namespace Envoy { +namespace Extensions { +namespace HttpFilters { +namespace Common { +namespace Compressors { + +class MockCompressor : public Compressor::Compressor { + void compress(Buffer::Instance&, ::Envoy::Compressor::State) override {} +}; + +class MockCompressorFilterConfig : public CompressorFilterConfig { +public: + MockCompressorFilterConfig( + const envoy::extensions::filters::http::compressor::v3::Compressor& compressor, + const std::string& stats_prefix, Stats::Scope& scope, Runtime::Loader& runtime, + const std::string& compressor_name) + : CompressorFilterConfig(compressor, stats_prefix + compressor_name + ".", scope, runtime, + compressor_name) {} + + std::unique_ptr makeCompressor() override { + return std::make_unique(); + } +}; + +class CompressorFilterTest : public testing::Test { +protected: + CompressorFilterTest() { + ON_CALL(runtime_.snapshot_, featureEnabled("test.filter_enabled", 100)) + .WillByDefault(Return(true)); + } + + void SetUp() override { setUpFilter("{}"); } + + // CompressorFilter private member functions + void sanitizeEtagHeader(Http::ResponseHeaderMap& headers) { + filter_->sanitizeEtagHeader(headers); + } + + void insertVaryHeader(Http::ResponseHeaderMap& headers) { filter_->insertVaryHeader(headers); } + + bool isContentTypeAllowed(Http::ResponseHeaderMap& headers) { + return filter_->isContentTypeAllowed(headers); + } + + bool isEtagAllowed(Http::ResponseHeaderMap& headers) { return filter_->isEtagAllowed(headers); } + + bool hasCacheControlNoTransform(Http::ResponseHeaderMap& headers) { + return filter_->hasCacheControlNoTransform(headers); + } + + bool isAcceptEncodingAllowed(const std::string accept_encoding, + const std::unique_ptr& filter = nullptr) { + Http::TestResponseHeaderMapImpl headers; + if (filter) { + filter->accept_encoding_ = std::make_unique(accept_encoding); + return filter->isAcceptEncodingAllowed(headers); + } else { + NiceMock decoder_callbacks; + filter_->setDecoderFilterCallbacks(decoder_callbacks); + filter_->accept_encoding_ = std::make_unique(accept_encoding); + return filter_->isAcceptEncodingAllowed(headers); + } + } + + bool isMinimumContentLength(Http::ResponseHeaderMap& headers) { + return filter_->isMinimumContentLength(headers); + } + + bool isTransferEncodingAllowed(Http::ResponseHeaderMap& headers) { + return filter_->isTransferEncodingAllowed(headers); + } + + // CompressorFilterTest Helpers + void setUpFilter(std::string&& json) { + envoy::extensions::filters::http::compressor::v3::Compressor compressor; + TestUtility::loadFromJson(json, compressor); + config_.reset(new MockCompressorFilterConfig(compressor, "test.", stats_, runtime_, "test")); + filter_ = std::make_unique(config_); + filter_->setEncoderFilterCallbacks(encoder_callbacks_); + } + + void verifyCompressedData() { + EXPECT_EQ(expected_str_.length(), stats_.counter("test.test.total_uncompressed_bytes").value()); + EXPECT_EQ(data_.length(), stats_.counter("test.test.total_compressed_bytes").value()); + } + + void feedBuffer(uint64_t size) { + TestUtility::feedBufferWithRandomCharacters(data_, size); + expected_str_ += data_.toString(); + } + + void drainBuffer() { + const uint64_t data_len = data_.length(); + data_.drain(data_len); + } + + void doRequest(Http::TestRequestHeaderMapImpl&& headers, bool end_stream) { + EXPECT_EQ(Http::FilterHeadersStatus::Continue, filter_->decodeHeaders(headers, end_stream)); + } + + void doResponseCompression(Http::TestResponseHeaderMapImpl&& headers, bool with_trailers) { + NiceMock decoder_callbacks; + filter_->setDecoderFilterCallbacks(decoder_callbacks); + uint64_t content_length; + ASSERT_TRUE(absl::SimpleAtoi(headers.get_("content-length"), &content_length)); + feedBuffer(content_length); + EXPECT_EQ(Http::FilterHeadersStatus::Continue, filter_->encodeHeaders(headers, false)); + EXPECT_EQ("", headers.get_("content-length")); + EXPECT_EQ("test", headers.get_("content-encoding")); + EXPECT_EQ(Http::FilterDataStatus::Continue, filter_->encodeData(data_, !with_trailers)); + if (with_trailers) { + Buffer::OwnedImpl trailers_buffer; + EXPECT_CALL(encoder_callbacks_, addEncodedData(_, true)) + .WillOnce(Invoke([&](Buffer::Instance& data, bool) { data_.move(data); })); + Http::TestResponseTrailerMapImpl trailers; + EXPECT_EQ(Http::FilterTrailersStatus::Continue, filter_->encodeTrailers(trailers)); + } + verifyCompressedData(); + drainBuffer(); + EXPECT_EQ(1U, stats_.counter("test.test.compressed").value()); + } + + void doResponseNoCompression(Http::TestResponseHeaderMapImpl&& headers) { + NiceMock decoder_callbacks; + filter_->setDecoderFilterCallbacks(decoder_callbacks); + uint64_t content_length; + ASSERT_TRUE(absl::SimpleAtoi(headers.get_("content-length"), &content_length)); + feedBuffer(content_length); + Http::TestResponseHeaderMapImpl continue_headers; + EXPECT_EQ(Http::FilterHeadersStatus::Continue, + filter_->encode100ContinueHeaders(continue_headers)); + Http::MetadataMap metadata_map{{"metadata", "metadata"}}; + EXPECT_EQ(Http::FilterMetadataStatus::Continue, filter_->encodeMetadata(metadata_map)); + EXPECT_EQ(Http::FilterHeadersStatus::Continue, filter_->encodeHeaders(headers, false)); + EXPECT_EQ("", headers.get_("content-encoding")); + EXPECT_EQ(Http::FilterDataStatus::Continue, filter_->encodeData(data_, false)); + Http::TestResponseTrailerMapImpl trailers; + EXPECT_EQ(Http::FilterTrailersStatus::Continue, filter_->encodeTrailers(trailers)); + EXPECT_EQ(1, stats_.counter("test.test.not_compressed").value()); + } + + CompressorFilterConfigSharedPtr config_; + std::unique_ptr filter_; + Buffer::OwnedImpl data_; + std::string expected_str_; + Stats::IsolatedStoreImpl stats_; + NiceMock runtime_; + NiceMock encoder_callbacks_; +}; + +// Test if Runtime Feature is Disabled +TEST_F(CompressorFilterTest, DecodeHeadersWithRuntimeDisabled) { + setUpFilter(R"EOF( +{ + "runtime_enabled": { + "default_value": true, + "runtime_key": "foo_key" + } +} +)EOF"); + EXPECT_CALL(runtime_.snapshot_, getBoolean("foo_key", true)).WillOnce(Return(false)); + doRequest({{":method", "get"}, {"accept-encoding", "deflate, test"}}, false); + doResponseNoCompression({{":method", "get"}, {"content-length", "256"}}); +} + +// Default config values. +TEST_F(CompressorFilterTest, DefaultConfigValues) { + EXPECT_EQ(30, config_->minimumLength()); + EXPECT_EQ(false, config_->disableOnEtagHeader()); + EXPECT_EQ(false, config_->removeAcceptEncodingHeader()); + EXPECT_EQ(18, config_->contentTypeValues().size()); +} + +// Acceptance Testing with default configuration. +TEST_F(CompressorFilterTest, AcceptanceTestEncoding) { + doRequest({{":method", "get"}, {"accept-encoding", "deflate, test"}}, false); + Buffer::OwnedImpl data("hello"); + EXPECT_EQ(Http::FilterDataStatus::Continue, filter_->decodeData(data, false)); + Http::TestRequestTrailerMapImpl trailers; + EXPECT_EQ(Http::FilterTrailersStatus::Continue, filter_->decodeTrailers(trailers)); + doResponseCompression({{":method", "get"}, {"content-length", "256"}}, false); +} + +TEST_F(CompressorFilterTest, AcceptanceTestEncodingWithTrailers) { + doRequest({{":method", "get"}, {"accept-encoding", "deflate, test"}}, false); + Buffer::OwnedImpl data("hello"); + EXPECT_EQ(Http::FilterDataStatus::Continue, filter_->decodeData(data, false)); + Http::TestRequestTrailerMapImpl trailers; + EXPECT_EQ(Http::FilterTrailersStatus::Continue, filter_->decodeTrailers(trailers)); + doResponseCompression({{":method", "get"}, {"content-length", "256"}}, true); +} + +// Verifies hasCacheControlNoTransform function. +TEST_F(CompressorFilterTest, HasCacheControlNoTransform) { + { + Http::TestResponseHeaderMapImpl headers = {{"cache-control", "no-cache"}}; + EXPECT_FALSE(hasCacheControlNoTransform(headers)); + } + { + Http::TestResponseHeaderMapImpl headers = {{"cache-control", "no-transform"}}; + EXPECT_TRUE(hasCacheControlNoTransform(headers)); + } + { + Http::TestResponseHeaderMapImpl headers = {{"cache-control", "No-Transform"}}; + EXPECT_TRUE(hasCacheControlNoTransform(headers)); + } +} + +// Verifies that compression is skipped when cache-control header has no-transform value. +TEST_F(CompressorFilterTest, HasCacheControlNoTransformNoCompression) { + doRequest({{":method", "get"}, {"accept-encoding", "test;q=1, deflate"}}, true); + doResponseNoCompression( + {{":method", "get"}, {"content-length", "256"}, {"cache-control", "no-transform"}}); +} + +// Verifies that compression is NOT skipped when cache-control header does NOT have no-transform +// value. +TEST_F(CompressorFilterTest, HasCacheControlNoTransformCompression) { + doRequest({{":method", "get"}, {"accept-encoding", "test, deflate"}}, true); + doResponseCompression( + {{":method", "get"}, {"content-length", "256"}, {"cache-control", "no-cache"}}, false); +} + +TEST_F(CompressorFilterTest, NoAcceptEncodingHeader) { + doRequest({{":method", "get"}, {}}, true); + doResponseNoCompression({{":method", "get"}, {"content-length", "256"}}); + EXPECT_EQ(1, stats_.counter("test.test.no_accept_header").value()); +} + +// Verifies isAcceptEncodingAllowed function. +TEST_F(CompressorFilterTest, IsAcceptEncodingAllowed) { + { + EXPECT_TRUE(isAcceptEncodingAllowed("deflate, test, br")); + EXPECT_EQ(1, stats_.counter("test.test.header_compressor_used").value()); + } + { + EXPECT_TRUE(isAcceptEncodingAllowed("deflate, test;q=1.0, *;q=0.5")); + EXPECT_EQ(2, stats_.counter("test.test.header_compressor_used").value()); + } + { + EXPECT_TRUE(isAcceptEncodingAllowed("\tdeflate\t, test\t ; q\t =\t 1.0,\t * ;q=0.5")); + EXPECT_EQ(3, stats_.counter("test.test.header_compressor_used").value()); + } + { + EXPECT_TRUE(isAcceptEncodingAllowed("deflate,test;q=1.0,*;q=0")); + EXPECT_EQ(4, stats_.counter("test.test.header_compressor_used").value()); + } + { + EXPECT_TRUE(isAcceptEncodingAllowed("deflate, test;q=0.2, br;q=1")); + EXPECT_EQ(5, stats_.counter("test.test.header_compressor_used").value()); + } + { + EXPECT_TRUE(isAcceptEncodingAllowed("*")); + EXPECT_EQ(1, stats_.counter("test.test.header_wildcard").value()); + EXPECT_EQ(5, stats_.counter("test.test.header_compressor_used").value()); + } + { + EXPECT_TRUE(isAcceptEncodingAllowed("*;q=1")); + EXPECT_EQ(2, stats_.counter("test.test.header_wildcard").value()); + EXPECT_EQ(5, stats_.counter("test.test.header_compressor_used").value()); + } + { + // test header is not valid due to q=0. + EXPECT_FALSE(isAcceptEncodingAllowed("test;q=0,*;q=1")); + EXPECT_EQ(5, stats_.counter("test.test.header_compressor_used").value()); + EXPECT_EQ(1, stats_.counter("test.test.header_not_valid").value()); + } + { + EXPECT_FALSE(isAcceptEncodingAllowed("identity, *;q=0")); + EXPECT_EQ(1, stats_.counter("test.test.header_identity").value()); + } + { + EXPECT_FALSE(isAcceptEncodingAllowed("identity;q=0.5, *;q=0")); + EXPECT_EQ(2, stats_.counter("test.test.header_identity").value()); + } + { + EXPECT_FALSE(isAcceptEncodingAllowed("identity;q=0, *;q=0")); + EXPECT_EQ(2, stats_.counter("test.test.header_identity").value()); + EXPECT_EQ(2, stats_.counter("test.test.header_not_valid").value()); + } + { + EXPECT_TRUE(isAcceptEncodingAllowed("xyz;q=1, br;q=0.2, *")); + EXPECT_EQ(3, stats_.counter("test.test.header_wildcard").value()); + } + { + EXPECT_FALSE(isAcceptEncodingAllowed("xyz;q=1, br;q=0.2, *;q=0")); + EXPECT_EQ(3, stats_.counter("test.test.header_wildcard").value()); + EXPECT_EQ(3, stats_.counter("test.test.header_not_valid").value()); + } + { + EXPECT_FALSE(isAcceptEncodingAllowed("xyz;q=1, br;q=0.2")); + EXPECT_EQ(4, stats_.counter("test.test.header_not_valid").value()); + } + { + EXPECT_FALSE(isAcceptEncodingAllowed("identity")); + EXPECT_EQ(3, stats_.counter("test.test.header_identity").value()); + } + { + EXPECT_FALSE(isAcceptEncodingAllowed("identity;q=1")); + EXPECT_EQ(4, stats_.counter("test.test.header_identity").value()); + } + { + EXPECT_FALSE(isAcceptEncodingAllowed("identity;q=0")); + EXPECT_EQ(4, stats_.counter("test.test.header_identity").value()); + EXPECT_EQ(5, stats_.counter("test.test.header_not_valid").value()); + } + { + // Test that we return identity and ignore the invalid wildcard. + EXPECT_FALSE(isAcceptEncodingAllowed("identity, *;q=0")); + EXPECT_EQ(5, stats_.counter("test.test.header_identity").value()); + EXPECT_EQ(5, stats_.counter("test.test.header_not_valid").value()); + } + { + EXPECT_TRUE(isAcceptEncodingAllowed("deflate, test;Q=.5, br")); + EXPECT_EQ(6, stats_.counter("test.test.header_compressor_used").value()); + } + { + EXPECT_FALSE(isAcceptEncodingAllowed("identity;Q=0")); + EXPECT_EQ(5, stats_.counter("test.test.header_identity").value()); + EXPECT_EQ(6, stats_.counter("test.test.header_not_valid").value()); + } + { + EXPECT_FALSE(isAcceptEncodingAllowed("")); + EXPECT_EQ(5, stats_.counter("test.test.header_identity").value()); + EXPECT_EQ(7, stats_.counter("test.test.header_not_valid").value()); + } + { + // Compressor "test2" from an independent filter chain should not overshadow "test". + // The independence is simulated with a new instance DecoderFilterCallbacks set for "test2". + Stats::IsolatedStoreImpl stats; + NiceMock runtime; + envoy::extensions::filters::http::compressor::v3::Compressor compressor; + TestUtility::loadFromJson("{}", compressor); + CompressorFilterConfigSharedPtr config2; + config2.reset(new MockCompressorFilterConfig(compressor, "test2.", stats, runtime, "test2")); + std::unique_ptr filter2 = std::make_unique(config2); + NiceMock decoder_callbacks; + filter2->setDecoderFilterCallbacks(decoder_callbacks); + + EXPECT_TRUE(isAcceptEncodingAllowed("test;Q=.5,test2;q=0.75")); + EXPECT_TRUE(isAcceptEncodingAllowed("test;Q=.5,test2;q=0.75", filter2)); + EXPECT_EQ(0, stats_.counter("test.test.header_compressor_overshadowed").value()); + EXPECT_EQ(7, stats_.counter("test.test.header_compressor_used").value()); + EXPECT_EQ(1, stats.counter("test2.test2.header_compressor_used").value()); + } + { + EXPECT_FALSE(isAcceptEncodingAllowed("test;q=invalid")); + EXPECT_EQ(8, stats_.counter("test.test.header_not_valid").value()); + } + { + // check if the legacy "header_gzip" counter is incremented for gzip compression filter + Stats::IsolatedStoreImpl stats; + NiceMock runtime; + envoy::extensions::filters::http::compressor::v3::Compressor compressor; + TestUtility::loadFromJson("{}", compressor); + CompressorFilterConfigSharedPtr config2; + config2.reset(new MockCompressorFilterConfig(compressor, "test2.", stats, runtime, "gzip")); + std::unique_ptr gzip_filter = std::make_unique(config2); + NiceMock decoder_callbacks; + gzip_filter->setDecoderFilterCallbacks(decoder_callbacks); + + EXPECT_TRUE(isAcceptEncodingAllowed("gzip;q=0.75", gzip_filter)); + EXPECT_EQ(1, stats.counter("test2.gzip.header_gzip").value()); + // This fake Accept-Encoding is ignored as a cached decision is used. + EXPECT_TRUE(isAcceptEncodingAllowed("fake", gzip_filter)); + EXPECT_EQ(2, stats.counter("test2.gzip.header_gzip").value()); + } + { + // check if identity stat is increased twice (the second time via the cached path). + Stats::IsolatedStoreImpl stats; + NiceMock runtime; + envoy::extensions::filters::http::compressor::v3::Compressor compressor; + TestUtility::loadFromJson("{}", compressor); + CompressorFilterConfigSharedPtr config2; + config2.reset(new MockCompressorFilterConfig(compressor, "test2.", stats, runtime, "test")); + std::unique_ptr filter2 = std::make_unique(config2); + NiceMock decoder_callbacks; + filter2->setDecoderFilterCallbacks(decoder_callbacks); + + EXPECT_FALSE(isAcceptEncodingAllowed("identity", filter2)); + EXPECT_EQ(1, stats.counter("test2.test.header_identity").value()); + // This fake Accept-Encoding is ignored as a cached decision is used. + EXPECT_FALSE(isAcceptEncodingAllowed("fake", filter2)); + EXPECT_EQ(2, stats.counter("test2.test.header_identity").value()); + } + { + // check if not_valid stat is increased twice (the second time via the cached path). + Stats::IsolatedStoreImpl stats; + NiceMock runtime; + envoy::extensions::filters::http::compressor::v3::Compressor compressor; + TestUtility::loadFromJson("{}", compressor); + CompressorFilterConfigSharedPtr config2; + config2.reset(new MockCompressorFilterConfig(compressor, "test2.", stats, runtime, "test")); + std::unique_ptr filter2 = std::make_unique(config2); + NiceMock decoder_callbacks; + filter2->setDecoderFilterCallbacks(decoder_callbacks); + + EXPECT_FALSE(isAcceptEncodingAllowed("test;q=invalid", filter2)); + EXPECT_EQ(1, stats.counter("test2.test.header_not_valid").value()); + // This fake Accept-Encoding is ignored as a cached decision is used. + EXPECT_FALSE(isAcceptEncodingAllowed("fake", filter2)); + EXPECT_EQ(2, stats.counter("test2.test.header_not_valid").value()); + } + { + // Test that encoding decision is cached when used by multiple filters. + Stats::IsolatedStoreImpl stats; + NiceMock runtime; + envoy::extensions::filters::http::compressor::v3::Compressor compressor; + TestUtility::loadFromJson("{}", compressor); + CompressorFilterConfigSharedPtr config1; + config1.reset(new MockCompressorFilterConfig(compressor, "test1.", stats, runtime, "test1")); + std::unique_ptr filter1 = std::make_unique(config1); + CompressorFilterConfigSharedPtr config2; + config2.reset(new MockCompressorFilterConfig(compressor, "test2.", stats, runtime, "test2")); + std::unique_ptr filter2 = std::make_unique(config2); + NiceMock decoder_callbacks; + filter1->setDecoderFilterCallbacks(decoder_callbacks); + filter2->setDecoderFilterCallbacks(decoder_callbacks); + + std::string accept_encoding = "test1;Q=.5,test2;q=0.75"; + EXPECT_FALSE(isAcceptEncodingAllowed(accept_encoding, filter1)); + EXPECT_TRUE(isAcceptEncodingAllowed(accept_encoding, filter2)); + EXPECT_EQ(1, stats.counter("test1.test1.header_compressor_overshadowed").value()); + EXPECT_EQ(1, stats.counter("test2.test2.header_compressor_used").value()); + EXPECT_FALSE(isAcceptEncodingAllowed(accept_encoding, filter1)); + EXPECT_EQ(2, stats.counter("test1.test1.header_compressor_overshadowed").value()); + // These fake Accept-Encoding header is ignored. Instead the cached decision is used. + EXPECT_TRUE(isAcceptEncodingAllowed("fake", filter2)); + EXPECT_EQ(2, stats.counter("test2.test2.header_compressor_used").value()); + } + { + // Test that first registered filter is used when handling wildcard. + Stats::IsolatedStoreImpl stats; + NiceMock runtime; + envoy::extensions::filters::http::compressor::v3::Compressor compressor; + TestUtility::loadFromJson("{}", compressor); + CompressorFilterConfigSharedPtr config1; + config1.reset(new MockCompressorFilterConfig(compressor, "test1.", stats, runtime, "test1")); + std::unique_ptr filter1 = std::make_unique(config1); + CompressorFilterConfigSharedPtr config2; + config2.reset(new MockCompressorFilterConfig(compressor, "test2.", stats, runtime, "test2")); + std::unique_ptr filter2 = std::make_unique(config2); + NiceMock decoder_callbacks; + filter1->setDecoderFilterCallbacks(decoder_callbacks); + filter2->setDecoderFilterCallbacks(decoder_callbacks); + + std::string accept_encoding = "*"; + EXPECT_TRUE(isAcceptEncodingAllowed(accept_encoding, filter1)); + EXPECT_FALSE(isAcceptEncodingAllowed(accept_encoding, filter2)); + EXPECT_EQ(1, stats.counter("test1.test1.header_wildcard").value()); + EXPECT_EQ(1, stats.counter("test2.test2.header_wildcard").value()); + } +} + +// Verifies that compression is skipped when accept-encoding header is not allowed. +TEST_F(CompressorFilterTest, AcceptEncodingNoCompression) { + doRequest({{":method", "get"}, {"accept-encoding", "test;q=0, deflate"}}, true); + doResponseNoCompression({{":method", "get"}, {"content-length", "256"}}); +} + +// Verifies that compression is NOT skipped when accept-encoding header is allowed. +TEST_F(CompressorFilterTest, AcceptEncodingCompression) { + doRequest({{":method", "get"}, {"accept-encoding", "test, deflate"}}, true); + doResponseCompression({{":method", "get"}, {"content-length", "256"}}, false); +} + +// Verifies isMinimumContentLength function. +TEST_F(CompressorFilterTest, IsMinimumContentLength) { + { + Http::TestResponseHeaderMapImpl headers = {{"content-length", "31"}}; + EXPECT_TRUE(isMinimumContentLength(headers)); + } + { + Http::TestResponseHeaderMapImpl headers = {{"content-length", "29"}}; + EXPECT_FALSE(isMinimumContentLength(headers)); + } + { + Http::TestResponseHeaderMapImpl headers = {{"transfer-encoding", "chunked"}}; + EXPECT_TRUE(isMinimumContentLength(headers)); + } + { + Http::TestResponseHeaderMapImpl headers = {{"transfer-encoding", "Chunked"}}; + EXPECT_TRUE(isMinimumContentLength(headers)); + } + + setUpFilter(R"EOF({"content_length": 500})EOF"); + { + Http::TestResponseHeaderMapImpl headers = {{"content-length", "501"}}; + EXPECT_TRUE(isMinimumContentLength(headers)); + } + { + Http::TestResponseHeaderMapImpl headers = {{"transfer-encoding", "chunked"}}; + EXPECT_TRUE(isMinimumContentLength(headers)); + } + { + Http::TestResponseHeaderMapImpl headers = {{"content-length", "499"}}; + EXPECT_FALSE(isMinimumContentLength(headers)); + } +} + +// Verifies that compression is skipped when content-length header is NOT allowed. +TEST_F(CompressorFilterTest, ContentLengthNoCompression) { + doRequest({{":method", "get"}, {"accept-encoding", "test"}}, true); + doResponseNoCompression({{":method", "get"}, {"content-length", "10"}}); +} + +// Verifies that compression is NOT skipped when content-length header is allowed. +TEST_F(CompressorFilterTest, ContentLengthCompression) { + setUpFilter(R"EOF({"content_length": 500})EOF"); + doRequest({{":method", "get"}, {"accept-encoding", "test"}}, true); + doResponseCompression({{":method", "get"}, {"content-length", "1000"}}, false); +} + +// Verifies isContentTypeAllowed function. +TEST_F(CompressorFilterTest, IsContentTypeAllowed) { + + { + Http::TestResponseHeaderMapImpl headers = {{"content-type", "text/html"}}; + EXPECT_TRUE(isContentTypeAllowed(headers)); + } + { + Http::TestResponseHeaderMapImpl headers = {{"content-type", "text/xml"}}; + EXPECT_TRUE(isContentTypeAllowed(headers)); + } + { + Http::TestResponseHeaderMapImpl headers = {{"content-type", "text/plain"}}; + EXPECT_TRUE(isContentTypeAllowed(headers)); + } + { + Http::TestResponseHeaderMapImpl headers = {{"content-type", "application/javascript"}}; + EXPECT_TRUE(isContentTypeAllowed(headers)); + } + { + Http::TestResponseHeaderMapImpl headers = {{"content-type", "image/svg+xml"}}; + EXPECT_TRUE(isContentTypeAllowed(headers)); + } + { + Http::TestResponseHeaderMapImpl headers = {{"content-type", "application/json;charset=utf-8"}}; + EXPECT_TRUE(isContentTypeAllowed(headers)); + } + { + Http::TestResponseHeaderMapImpl headers = {{"content-type", "application/json"}}; + EXPECT_TRUE(isContentTypeAllowed(headers)); + } + { + Http::TestResponseHeaderMapImpl headers = {{"content-type", "application/xhtml+xml"}}; + EXPECT_TRUE(isContentTypeAllowed(headers)); + } + { + Http::TestResponseHeaderMapImpl headers = {{"content-type", "Application/XHTML+XML"}}; + EXPECT_TRUE(isContentTypeAllowed(headers)); + } + { + Http::TestResponseHeaderMapImpl headers = {{"content-type", "image/jpeg"}}; + EXPECT_FALSE(isContentTypeAllowed(headers)); + } + { + Http::TestResponseHeaderMapImpl headers = {}; + EXPECT_TRUE(isContentTypeAllowed(headers)); + } + { + Http::TestResponseHeaderMapImpl headers = {{"content-type", "\ttext/html\t"}}; + EXPECT_TRUE(isContentTypeAllowed(headers)); + } + + setUpFilter(R"EOF( + { + "content_type": [ + "text/html", + "xyz/svg+xml", + "Test/INSENSITIVE" + ] + } + )EOF"); + + { + Http::TestResponseHeaderMapImpl headers = {{"content-type", "xyz/svg+xml"}}; + EXPECT_TRUE(isContentTypeAllowed(headers)); + } + { + Http::TestResponseHeaderMapImpl headers = {}; + EXPECT_TRUE(isContentTypeAllowed(headers)); + } + { + Http::TestResponseHeaderMapImpl headers = {{"content-type", "xyz/false"}}; + EXPECT_FALSE(isContentTypeAllowed(headers)); + } + { + Http::TestResponseHeaderMapImpl headers = {{"content-type", "image/jpeg"}}; + EXPECT_FALSE(isContentTypeAllowed(headers)); + } + { + Http::TestResponseHeaderMapImpl headers = {{"content-type", "test/insensitive"}}; + EXPECT_TRUE(isContentTypeAllowed(headers)); + } +} + +// Verifies that compression is skipped when content-type header is NOT allowed. +TEST_F(CompressorFilterTest, ContentTypeNoCompression) { + setUpFilter(R"EOF( + { + "content_type": [ + "text/html", + "text/css", + "text/plain", + "application/javascript", + "application/json", + "font/eot", + "image/svg+xml" + ] + } + )EOF"); + doRequest({{":method", "get"}, {"accept-encoding", "test"}}, true); + doResponseNoCompression( + {{":method", "get"}, {"content-length", "256"}, {"content-type", "image/jpeg"}}); + EXPECT_EQ(1, stats_.counter("test.test.header_not_valid").value()); +} + +// Verifies that compression is NOT skipped when content-encoding header is allowed. +TEST_F(CompressorFilterTest, ContentTypeCompression) { + doRequest({{":method", "get"}, {"accept-encoding", "test"}}, true); + doResponseCompression({{":method", "get"}, + {"content-length", "256"}, + {"content-type", "application/json;charset=utf-8"}}, + false); +} + +// Verifies sanitizeEtagHeader function. +TEST_F(CompressorFilterTest, SanitizeEtagHeader) { + { + std::string etag_header{R"EOF(W/"686897696a7c876b7e")EOF"}; + Http::TestResponseHeaderMapImpl headers = {{"etag", etag_header}}; + sanitizeEtagHeader(headers); + EXPECT_EQ(etag_header, headers.get_("etag")); + } + { + std::string etag_header{R"EOF(w/"686897696a7c876b7e")EOF"}; + Http::TestResponseHeaderMapImpl headers = {{"etag", etag_header}}; + sanitizeEtagHeader(headers); + EXPECT_EQ(etag_header, headers.get_("etag")); + } + { + Http::TestResponseHeaderMapImpl headers = {{"etag", "686897696a7c876b7e"}}; + sanitizeEtagHeader(headers); + EXPECT_FALSE(headers.has("etag")); + } +} + +// Verifies isEtagAllowed function. +TEST_F(CompressorFilterTest, IsEtagAllowed) { + { + Http::TestResponseHeaderMapImpl headers = {{"etag", R"EOF(W/"686897696a7c876b7e")EOF"}}; + EXPECT_TRUE(isEtagAllowed(headers)); + EXPECT_EQ(0, stats_.counter("test.test.not_compressed_etag").value()); + } + { + Http::TestResponseHeaderMapImpl headers = {{"etag", "686897696a7c876b7e"}}; + EXPECT_TRUE(isEtagAllowed(headers)); + EXPECT_EQ(0, stats_.counter("test.test.not_compressed_etag").value()); + } + { + Http::TestResponseHeaderMapImpl headers = {}; + EXPECT_TRUE(isEtagAllowed(headers)); + EXPECT_EQ(0, stats_.counter("test.test.not_compressed_etag").value()); + } + + setUpFilter(R"EOF({ "disable_on_etag_header": true })EOF"); + { + Http::TestResponseHeaderMapImpl headers = {{"etag", R"EOF(W/"686897696a7c876b7e")EOF"}}; + EXPECT_FALSE(isEtagAllowed(headers)); + EXPECT_EQ(1, stats_.counter("test.test.not_compressed_etag").value()); + } + { + Http::TestResponseHeaderMapImpl headers = {{"etag", "686897696a7c876b7e"}}; + EXPECT_FALSE(isEtagAllowed(headers)); + EXPECT_EQ(2, stats_.counter("test.test.not_compressed_etag").value()); + } + { + Http::TestResponseHeaderMapImpl headers = {}; + EXPECT_TRUE(isEtagAllowed(headers)); + EXPECT_EQ(2, stats_.counter("test.test.not_compressed_etag").value()); + } +} + +// Verifies that compression is skipped when etag header is NOT allowed. +TEST_F(CompressorFilterTest, EtagNoCompression) { + setUpFilter(R"EOF({ "disable_on_etag_header": true })EOF"); + doRequest({{":method", "get"}, {"accept-encoding", "test"}}, true); + doResponseNoCompression( + {{":method", "get"}, {"content-length", "256"}, {"etag", R"EOF(W/"686897696a7c876b7e")EOF"}}); + EXPECT_EQ(1, stats_.counter("test.test.not_compressed_etag").value()); +} + +// Verifies that compression is skipped when etag header is NOT allowed. +TEST_F(CompressorFilterTest, EtagCompression) { + doRequest({{":method", "get"}, {"accept-encoding", "test"}}, true); + Http::TestResponseHeaderMapImpl headers{ + {":method", "get"}, {"content-length", "256"}, {"etag", "686897696a7c876b7e"}}; + feedBuffer(256); + NiceMock decoder_callbacks; + filter_->setDecoderFilterCallbacks(decoder_callbacks); + EXPECT_EQ(Http::FilterHeadersStatus::Continue, filter_->encodeHeaders(headers, false)); + EXPECT_FALSE(headers.has("etag")); + EXPECT_EQ("test", headers.get_("content-encoding")); +} + +// Verifies isTransferEncodingAllowed function. +TEST_F(CompressorFilterTest, IsTransferEncodingAllowed) { + { + Http::TestResponseHeaderMapImpl headers = {}; + EXPECT_TRUE(isTransferEncodingAllowed(headers)); + } + { + Http::TestResponseHeaderMapImpl headers = {{"transfer-encoding", "chunked"}}; + EXPECT_TRUE(isTransferEncodingAllowed(headers)); + } + { + Http::TestResponseHeaderMapImpl headers = {{"transfer-encoding", "Chunked"}}; + EXPECT_TRUE(isTransferEncodingAllowed(headers)); + } + { + Http::TestResponseHeaderMapImpl headers = {{"transfer-encoding", "deflate"}}; + EXPECT_FALSE(isTransferEncodingAllowed(headers)); + } + { + Http::TestResponseHeaderMapImpl headers = {{"transfer-encoding", "Deflate"}}; + EXPECT_FALSE(isTransferEncodingAllowed(headers)); + } + { + Http::TestResponseHeaderMapImpl headers = {{"transfer-encoding", "test"}}; + EXPECT_FALSE(isTransferEncodingAllowed(headers)); + } + { + Http::TestResponseHeaderMapImpl headers = {{"transfer-encoding", "test, chunked"}}; + EXPECT_FALSE(isTransferEncodingAllowed(headers)); + } + { + Http::TestResponseHeaderMapImpl headers = {{"transfer-encoding", " test\t, chunked\t"}}; + EXPECT_FALSE(isTransferEncodingAllowed(headers)); + } +} + +// Tests compression when Transfer-Encoding header exists. +TEST_F(CompressorFilterTest, TransferEncodingChunked) { + doRequest({{":method", "get"}, {"accept-encoding", "test"}}, true); + doResponseCompression( + {{":method", "get"}, {"content-length", "256"}, {"transfer-encoding", "chunked"}}, false); +} + +// Tests compression when Transfer-Encoding header exists. +TEST_F(CompressorFilterTest, AcceptanceTransferEncoding) { + + doRequest({{":method", "get"}, {"accept-encoding", "test"}}, true); + doResponseNoCompression( + {{":method", "get"}, {"content-length", "256"}, {"transfer-encoding", "chunked, deflate"}}); +} + +// Content-Encoding: upstream response is already encoded. +TEST_F(CompressorFilterTest, ContentEncodingAlreadyEncoded) { + NiceMock decoder_callbacks; + filter_->setDecoderFilterCallbacks(decoder_callbacks); + doRequest({{":method", "get"}, {"accept-encoding", "test"}}, true); + Http::TestResponseHeaderMapImpl response_headers{ + {":method", "get"}, {"content-length", "256"}, {"content-encoding", "deflate, gzip"}}; + feedBuffer(256); + EXPECT_EQ(Http::FilterHeadersStatus::Continue, filter_->encodeHeaders(response_headers, false)); + EXPECT_TRUE(response_headers.has("content-length")); + EXPECT_FALSE(response_headers.has("transfer-encoding")); + EXPECT_EQ(Http::FilterDataStatus::Continue, filter_->encodeData(data_, false)); +} + +// No compression when upstream response is empty. +TEST_F(CompressorFilterTest, EmptyResponse) { + + Http::TestResponseHeaderMapImpl headers{{":method", "get"}, {":status", "204"}}; + EXPECT_EQ(Http::FilterHeadersStatus::Continue, filter_->encodeHeaders(headers, true)); + EXPECT_EQ("", headers.get_("content-length")); + EXPECT_EQ("", headers.get_("content-encoding")); + EXPECT_EQ(Http::FilterDataStatus::Continue, filter_->encodeData(data_, true)); +} + +// Verifies insertVaryHeader function. +TEST_F(CompressorFilterTest, InsertVaryHeader) { + { + Http::TestResponseHeaderMapImpl headers = {}; + insertVaryHeader(headers); + EXPECT_EQ("Accept-Encoding", headers.get_("vary")); + } + { + Http::TestResponseHeaderMapImpl headers = {{"vary", "Cookie"}}; + insertVaryHeader(headers); + EXPECT_EQ("Cookie, Accept-Encoding", headers.get_("vary")); + } + { + Http::TestResponseHeaderMapImpl headers = {{"vary", "accept-encoding"}}; + insertVaryHeader(headers); + EXPECT_EQ("accept-encoding, Accept-Encoding", headers.get_("vary")); + } + { + Http::TestResponseHeaderMapImpl headers = {{"vary", "Accept-Encoding, Cookie"}}; + insertVaryHeader(headers); + EXPECT_EQ("Accept-Encoding, Cookie", headers.get_("vary")); + } + { + Http::TestResponseHeaderMapImpl headers = {{"vary", "Accept-Encoding"}}; + insertVaryHeader(headers); + EXPECT_EQ("Accept-Encoding", headers.get_("vary")); + } +} + +// Filter should set Vary header value with `accept-encoding`. +TEST_F(CompressorFilterTest, NoVaryHeader) { + NiceMock decoder_callbacks; + filter_->setDecoderFilterCallbacks(decoder_callbacks); + doRequest({{":method", "get"}, {"accept-encoding", "test"}}, true); + Http::TestResponseHeaderMapImpl headers{{":method", "get"}, {"content-length", "256"}}; + feedBuffer(256); + EXPECT_EQ(Http::FilterHeadersStatus::Continue, filter_->encodeHeaders(headers, false)); + EXPECT_TRUE(headers.has("vary")); + EXPECT_EQ("Accept-Encoding", headers.get_("vary")); +} + +// Filter should set Vary header value with `accept-encoding` and preserve other values. +TEST_F(CompressorFilterTest, VaryOtherValues) { + NiceMock decoder_callbacks; + filter_->setDecoderFilterCallbacks(decoder_callbacks); + doRequest({{":method", "get"}, {"accept-encoding", "test"}}, true); + Http::TestResponseHeaderMapImpl headers{ + {":method", "get"}, {"content-length", "256"}, {"vary", "User-Agent, Cookie"}}; + feedBuffer(256); + EXPECT_EQ(Http::FilterHeadersStatus::Continue, filter_->encodeHeaders(headers, false)); + EXPECT_TRUE(headers.has("vary")); + EXPECT_EQ("User-Agent, Cookie, Accept-Encoding", headers.get_("vary")); +} + +// Vary header should have only one `accept-encoding`value. +TEST_F(CompressorFilterTest, VaryAlreadyHasAcceptEncoding) { + NiceMock decoder_callbacks; + filter_->setDecoderFilterCallbacks(decoder_callbacks); + doRequest({{":method", "get"}, {"accept-encoding", "test"}}, true); + Http::TestResponseHeaderMapImpl headers{ + {":method", "get"}, {"content-length", "256"}, {"vary", "accept-encoding"}}; + feedBuffer(256); + EXPECT_EQ(Http::FilterHeadersStatus::Continue, filter_->encodeHeaders(headers, false)); + EXPECT_TRUE(headers.has("vary")); + EXPECT_EQ("accept-encoding, Accept-Encoding", headers.get_("vary")); +} + +// Verify removeAcceptEncoding header. +TEST_F(CompressorFilterTest, RemoveAcceptEncodingHeader) { + NiceMock decoder_callbacks; + filter_->setDecoderFilterCallbacks(decoder_callbacks); + { + Http::TestRequestHeaderMapImpl headers = {{"accept-encoding", "deflate, test, gzip, br"}}; + setUpFilter(R"EOF({"remove_accept_encoding_header": true})EOF"); + EXPECT_EQ(Http::FilterHeadersStatus::Continue, filter_->decodeHeaders(headers, true)); + EXPECT_FALSE(headers.has("accept-encoding")); + } + { + Http::TestRequestHeaderMapImpl headers = {{"accept-encoding", "deflate, test, gzip, br"}}; + setUpFilter("{}"); + EXPECT_EQ(Http::FilterHeadersStatus::Continue, filter_->decodeHeaders(headers, true)); + EXPECT_TRUE(headers.has("accept-encoding")); + EXPECT_EQ("deflate, test, gzip, br", headers.get_("accept-encoding")); + } +} + +} // namespace Compressors +} // namespace Common +} // namespace HttpFilters +} // namespace Extensions +} // namespace Envoy