diff --git a/bazel/repository_locations.bzl b/bazel/repository_locations.bzl
index 89485043ef4e4..3fcb4b96c136e 100644
--- a/bazel/repository_locations.bzl
+++ b/bazel/repository_locations.bzl
@@ -949,45 +949,45 @@ REPOSITORY_LOCATIONS_SPEC = dict(
project_name = "Kafka (source)",
project_desc = "Open-source distributed event streaming platform",
project_url = "https://kafka.apache.org",
- version = "2.8.1",
- sha256 = "c3fd89257e056e11b5e1b09d4bbd8332ce5abfdfa7c7a5bb6a5cfe9860fcc688",
+ version = "3.0.0",
+ sha256 = "862526ee07c372d7b2f7e672c096fe84bb1e115ef536e0ad7497e6fb50e08e02",
strip_prefix = "kafka-{version}/clients/src/main/resources/common/message",
urls = ["https://github.com/apache/kafka/archive/{version}.zip"],
use_category = ["dataplane_ext"],
extensions = ["envoy.filters.network.kafka_broker", "envoy.filters.network.kafka_mesh"],
- release_date = "2021-09-14",
+ release_date = "2021-09-08",
cpe = "cpe:2.3:a:apache:kafka:*",
),
edenhill_librdkafka = dict(
project_name = "Kafka (C/C++ client)",
project_desc = "C/C++ client for Apache Kafka (open-source distributed event streaming platform)",
project_url = "https://github.com/edenhill/librdkafka",
- version = "1.7.0",
- sha256 = "c71b8c5ff419da80c31bb8d3036a408c87ad523e0c7588e7660ee5f3c8973057",
+ version = "1.8.2",
+ sha256 = "6a747d293a7a4613bd2897e28e8791476fbe1ae7361f2530a876e0fd483482a6",
strip_prefix = "librdkafka-{version}",
urls = ["https://github.com/edenhill/librdkafka/archive/v{version}.tar.gz"],
use_category = ["dataplane_ext"],
extensions = ["envoy.filters.network.kafka_mesh"],
- release_date = "2021-05-10",
+ release_date = "2021-10-18",
cpe = "N/A",
),
kafka_server_binary = dict(
project_name = "Kafka (server binary)",
project_desc = "Open-source distributed event streaming platform",
project_url = "https://kafka.apache.org",
- version = "2.8.1",
- sha256 = "4888b03e3b27dd94f2d830ce3bae9d7d98b0ccee3a5d30c919ccb60e0fa1f139",
+ version = "3.0.0",
+ sha256 = "a82728166bbccf406009747a25e1fe52dbcb4d575e4a7a8616429b5818cd02d1",
strip_prefix = "kafka_2.13-{version}",
urls = ["https://archive.apache.org/dist/kafka/{version}/kafka_2.13-{version}.tgz"],
- release_date = "2021-09-14",
+ release_date = "2021-09-20",
use_category = ["test_only"],
),
kafka_python_client = dict(
project_name = "Kafka (Python client)",
project_desc = "Open-source distributed event streaming platform",
project_url = "https://kafka.apache.org",
- version = "2.0.1",
- sha256 = "05f7c6eecb402f11fcb7e524c903f1ba1c38d3bdc9bf42bc8ec3cf7567b9f979",
+ version = "2.0.2",
+ sha256 = "5dcf87c559e7aee4f18d621a02e247db3e3552ee4589ca611d51eef87b37efed",
strip_prefix = "kafka-python-{version}",
urls = ["https://github.com/dpkp/kafka-python/archive/{version}.tar.gz"],
release_date = "2020-09-30",
diff --git a/contrib/kafka/filters/network/source/protocol/generator.py b/contrib/kafka/filters/network/source/protocol/generator.py
index 2fd18ebc2d69b..05ab8d0599669 100755
--- a/contrib/kafka/filters/network/source/protocol/generator.py
+++ b/contrib/kafka/filters/network/source/protocol/generator.py
@@ -126,12 +126,7 @@ def parse_messages(self, input_files):
r'^\s*$', '', without_comments, flags=re.MULTILINE)
# Windows support: see PR 10542 for details.
amended = re.sub(r'-2147483648', 'INT32_MIN', without_empty_newlines)
- # Kafka JSON files are malformed. See KAFKA-12794.
- if input_file == 'external/kafka_source/DescribeProducersRequest.json':
- amended = amended[:-6]
message_spec = json.loads(amended)
- # Adopt publicly available messages only:
- # https://kafka.apache.org/28/protocol.html#protocol_api_keys
api_key = message_spec['apiKey']
if api_key <= 51 or api_key in [56, 57, 60, 61]:
message = self.parse_top_level_element(message_spec)
diff --git a/docs/root/configuration/listeners/network_filters/kafka_broker_filter.rst b/docs/root/configuration/listeners/network_filters/kafka_broker_filter.rst
index ec7828db5c120..8608fd3669162 100644
--- a/docs/root/configuration/listeners/network_filters/kafka_broker_filter.rst
+++ b/docs/root/configuration/listeners/network_filters/kafka_broker_filter.rst
@@ -5,8 +5,8 @@ Kafka Broker filter
The Apache Kafka broker filter decodes the client protocol for
`Apache Kafka `_, both the requests and responses in the payload.
-The message versions in `Kafka 2.8.1 `_
-are supported.
+The message versions in `Kafka 3.0.0 `_
+are supported (apart from API keys 65-67 which were introduced recently).
The filter attempts not to influence the communication between client and brokers, so the messages
that could not be decoded (due to Kafka client or broker running a newer version than supported by
this filter) are forwarded as-is.
diff --git a/docs/root/configuration/listeners/network_filters/kafka_mesh_filter.rst b/docs/root/configuration/listeners/network_filters/kafka_mesh_filter.rst
index bebb7c31aa5bf..2ad5bd2c2924e 100644
--- a/docs/root/configuration/listeners/network_filters/kafka_mesh_filter.rst
+++ b/docs/root/configuration/listeners/network_filters/kafka_mesh_filter.rst
@@ -6,7 +6,7 @@ Kafka Mesh filter
The Apache Kafka mesh filter provides a facade for `Apache Kafka `_
producers. Produce requests sent to this filter insance can be forwarded to one of multiple
clusters, depending on configured forwarding rules. Corresponding message versions from
-Kafka 2.8.1 are supported.
+Kafka 3.0.0 are supported.
* :ref:`v3 API reference `
* This filter should be configured with the name *envoy.filters.network.kafka_mesh*.