Skip to content

Commit 29bd393

Browse files
committed
added new apache kafka docker image starting from 3.7
1 parent f4b900b commit 29bd393

File tree

63 files changed

+705
-493
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

63 files changed

+705
-493
lines changed

.env

+1
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
KAFKA_VERSION=3.7.0
12
CONFLUENT_VERSION=7.6.0
23
POSTGRES_VERSION=10.5
34
POSTGRES_ALPINE_VERSION=14.1-alpine

README.adoc

+332-219
Large diffs are not rendered by default.

acls/docker-compose.yml

+2-2
Original file line numberDiff line numberDiff line change
@@ -4,14 +4,14 @@ version: '2'
44
services:
55

66
broker:
7-
image: confluentinc/cp-kafka:${CONFLUENT_VERSION}
7+
image: apache/kafka:${KAFKA_VERSION}
88
hostname: broker
99
container_name: broker
1010
ports:
1111
- "9092:9092"
1212
volumes:
1313
- ./config/alice.properties:/tmp/alice.properties
14-
- ./config/alice.properties:/tmp/admin.properties
14+
- ./config/admin.properties:/tmp/admin.properties
1515
environment:
1616
KAFKA_NODE_ID: 1
1717
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: CONTROLLER:PLAINTEXT,AUTH:SASL_PLAINTEXT,INTERNAL:PLAINTEXT
Original file line numberDiff line numberDiff line change
@@ -1,4 +1 @@
1-
bootstrap.servers=XXXXXXXXXXX
2-
security.protocol=SASL_SSL
3-
sasl.jaas.config=org.apache.kafka.common.security.plain.PlainLoginModule required username='XXXXXXXXXXX' password='XXXXXXXXXXX';
4-
sasl.mechanism=PLAIN
1+
bootstrap.servers=localhost:9092
+6
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
sasl.mechanism=PLAIN
2+
security.protocol=SASL_PLAINTEXT
3+
sasl.jaas.config=org.apache.kafka.common.security.plain.PlainLoginModule required \
4+
username="kafkabroker1" \
5+
password="kafkabroker1-secret";
6+
group.id=test
+5
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
sasl.mechanism=PLAIN
2+
security.protocol=SASL_PLAINTEXT
3+
sasl.jaas.config=org.apache.kafka.common.security.plain.PlainLoginModule required \
4+
username="kafkabroker1" \
5+
password="kafkabroker1-secret";

authorizers/docker-compose.yml

+3-1
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,9 @@ services:
2323
- "9092:9092"
2424
- "9093:9093"
2525
volumes:
26-
- ./jars/authorizers-0.0.1-SNAPSHOT.jar:/usr/share/java/kafka/authorizers-0.0.1-SNAPSHOT.jar
26+
- ./jars/authorizers-1.2.1.jar:/usr/share/java/kafka/authorizers-1.2.1.jar
27+
- ./config/producer.properties:/tmp/producer.properties
28+
- ./config/consumer.properties:/tmp/consumer.properties
2729
environment:
2830
KAFKA_BROKER_ID: 1
2931
KAFKA_ZOOKEEPER_CONNECT: 'zookeeper:2181'
-5.64 KB
Binary file not shown.
5.77 KB
Binary file not shown.

docker-compose.yml

+1-99
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ version: '2'
44
services:
55

66
broker:
7-
image: confluentinc/cp-kafka:${CONFLUENT_VERSION}
7+
image: apache/kafka:${KAFKA_VERSION}
88
hostname: broker
99
container_name: broker
1010
ports:
@@ -26,104 +26,6 @@ services:
2626
KAFKA_TOOLS_LOG4J_LOGLEVEL: ERROR
2727
CLUSTER_ID: 'QTnB2tAgTWa1ec5wYon2jg'
2828

29-
schema-registry:
30-
image: confluentinc/cp-schema-registry:${CONFLUENT_VERSION}
31-
hostname: schema-registry
32-
container_name: schema-registry
33-
depends_on:
34-
- broker
35-
ports:
36-
- "8081:8081"
37-
environment:
38-
SCHEMA_REGISTRY_HOST_NAME: schema-registry
39-
SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: 'broker:9092'
40-
41-
connect:
42-
image: confluentinc/cp-kafka-connect-base:${CONFLUENT_VERSION}
43-
hostname: connect
44-
container_name: connect
45-
depends_on:
46-
- schema-registry
47-
ports:
48-
- "8083:8083"
49-
environment:
50-
CONNECT_BOOTSTRAP_SERVERS: 'broker:9092'
51-
CONNECT_REST_ADVERTISED_HOST_NAME: connect
52-
CONNECT_REST_PORT: 8083
53-
CONNECT_GROUP_ID: compose-connect-group
54-
CONNECT_CONFIG_STORAGE_TOPIC: docker-connect-configs
55-
CONNECT_CONFIG_STORAGE_REPLICATION_FACTOR: 1
56-
CONNECT_OFFSET_FLUSH_INTERVAL_MS: 10000
57-
CONNECT_OFFSET_STORAGE_TOPIC: docker-connect-offsets
58-
CONNECT_OFFSET_STORAGE_REPLICATION_FACTOR: 1
59-
CONNECT_STATUS_STORAGE_TOPIC: docker-connect-status
60-
CONNECT_STATUS_STORAGE_REPLICATION_FACTOR: 1
61-
CONNECT_KEY_CONVERTER: org.apache.kafka.connect.storage.StringConverter
62-
CONNECT_VALUE_CONVERTER: io.confluent.connect.avro.AvroConverter
63-
CONNECT_VALUE_CONVERTER_SCHEMA_REGISTRY_URL: http://schema-registry:8081
64-
CONNECT_PLUGIN_PATH: "/usr/share/java,/usr/share/confluent-hub-components"
65-
CONNECT_LOG4J_LOGGERS: org.reflections=ERROR
66-
command:
67-
- bash
68-
- -c
69-
- |
70-
echo "Installing Connector"
71-
confluent-hub install --no-prompt confluentinc/kafka-connect-jdbc:10.6.4
72-
#
73-
echo "Launching Kafka Connect worker"
74-
/etc/confluent/docker/run &
75-
#
76-
sleep infinity
77-
78-
ksqldb-server:
79-
image: confluentinc/cp-ksqldb-server:${CONFLUENT_VERSION}
80-
hostname: ksqldb-server
81-
container_name: ksqldb-server
82-
depends_on:
83-
- broker
84-
- schema-registry
85-
- connect
86-
volumes:
87-
- ./extensions:/etc/ksqldb/ext
88-
ports:
89-
- "8088:8088"
90-
environment:
91-
KSQL_LISTENERS: "http://0.0.0.0:8088"
92-
KSQL_BOOTSTRAP_SERVERS: "broker:9092"
93-
KSQL_CONFIG_DIR: "/etc/ksqldb"
94-
KSQL_KSQL_EXTENSION_DIR: "/etc/ksqldb/ext/"
95-
KSQL_KSQL_SCHEMA_REGISTRY_URL: "http://schema-registry:8081"
96-
KSQL_KSQL_CONNECT_URL: "http://connect:8083"
97-
KSQL_KSQL_LOGGING_PROCESSING_STREAM_AUTO_CREATE: "true"
98-
KSQL_KSQL_LOGGING_PROCESSING_TOPIC_AUTO_CREATE: "true"
99-
KSQL_KSQL_STREAMS_AUTO_OFFSET_RESET: "earliest"
100-
KSQL_KSQL_STREAMS_CACHE_MAX_BYTES_BUFFERING: "20000000"
101-
KSQL_KSQL_LOGGING_PROCESSING_TOPIC_REPLICATION_FACTOR: 1
102-
103-
ksqldb-cli:
104-
image: confluentinc/cp-ksqldb-cli:${CONFLUENT_VERSION}
105-
container_name: ksqldb-cli
106-
depends_on:
107-
- broker
108-
- ksqldb-server
109-
entrypoint: /bin/sh
110-
tty: true
111-
112-
rest-proxy:
113-
image: confluentinc/cp-kafka-rest:${CONFLUENT_VERSION}
114-
depends_on:
115-
- broker
116-
- schema-registry
117-
ports:
118-
- "8082:8082"
119-
hostname: rest-proxy
120-
container_name: rest-proxy
121-
environment:
122-
KAFKA_REST_HOST_NAME: rest-proxy
123-
KAFKA_REST_BOOTSTRAP_SERVERS: broker:9092
124-
KAFKA_REST_LISTENERS: "http://0.0.0.0:8082"
125-
KAFKA_REST_SCHEMA_REGISTRY_URL: 'http://schema-registry:8081'
126-
12729
kcat:
12830
image: confluentinc/cp-kcat:${KCAT_VERSION}
12931
hostname: kcat

kafka-consumer/docker-compose.yml

+3-3
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ version: '2'
44
services:
55

66
broker:
7-
image: confluentinc/cp-kafka:${CONFLUENT_VERSION}
7+
image: apache/kafka:${KAFKA_VERSION}
88
hostname: broker
99
container_name: broker
1010
ports:
@@ -31,7 +31,7 @@ services:
3131
CLUSTER_ID: 'QTnB2tAgTWa1ec5wYon2jg'
3232

3333
broker2:
34-
image: confluentinc/cp-kafka:${CONFLUENT_VERSION}
34+
image: apache/kafka:${KAFKA_VERSION}
3535
hostname: broker2
3636
container_name: broker2
3737
ports:
@@ -56,7 +56,7 @@ services:
5656
CLUSTER_ID: 'QTnB2tAgTWa1ec5wYon2jg'
5757

5858
broker3:
59-
image: confluentinc/cp-kafka:${CONFLUENT_VERSION}
59+
image: apache/kafka:${KAFKA_VERSION}
6060
hostname: broker3
6161
container_name: broker3
6262
ports:

kafka-consumer/src/main/java/org/hifly/kafka/demo/consumer/core/Runner.java

+7-5
Original file line numberDiff line numberDiff line change
@@ -10,22 +10,24 @@ public class Runner {
1010
public static void main (String [] args) {
1111
String topics = null;
1212
String partitionStrategy = "org.apache.kafka.clients.consumer.RangeAssignor";
13+
String groupId = "group-1";
1314
if(args != null && args.length >= 1) {
1415
topics = args[0];
1516
}
1617
if(args.length == 2) {
1718
partitionStrategy = args[1];
1819
}
19-
pollAutoCommit(topics, partitionStrategy);
20+
if(args.length == 3) {
21+
groupId = args[2];
22+
}
23+
pollAutoCommit(topics, partitionStrategy, groupId);
2024
}
2125

22-
private static void pollAutoCommit(String topics, String partitionStrategy) {
23-
UUID uuid = UUID. randomUUID();
24-
String uuidAsString = uuid. toString();
26+
private static void pollAutoCommit(String topics, String partitionStrategy, String groupId) {
2527

2628
new ConsumerInstance<String , String>(
2729
UUID.randomUUID().toString(),
28-
uuidAsString,
30+
groupId,
2931
topics == null? "topic1": topics,
3032
StringDeserializer.class.getName(),
3133
StringDeserializer.class.getName(),

kafka-microprofile2-consumer/pom.xml

+1-1
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,7 @@
1717
<dependency>
1818
<groupId>org.apache.kafka</groupId>
1919
<artifactId>kafka_2.12</artifactId>
20-
<version>3.6.0</version>
20+
<version>3.7.0</version>
2121
</dependency>
2222

2323
<dependency>

kafka-microprofile2-producer/pom.xml

+1-1
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@
1616
<dependency>
1717
<groupId>org.apache.kafka</groupId>
1818
<artifactId>kafka_2.12</artifactId>
19-
<version>3.6.0</version>
19+
<version>3.7.0</version>
2020
</dependency>
2121

2222
<dependency>

kafka-orders-tx/src/main/java/org/hifly/kafka/demo/orders/controller/ItemController.java

+11-5
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,10 @@
11
package org.hifly.kafka.demo.orders.controller;
22

3+
import org.apache.kafka.clients.producer.KafkaProducer;
4+
import org.apache.kafka.common.serialization.StringSerializer;
35
import org.hifly.kafka.demo.orders.kafka.KafkaConfig;
6+
import org.hifly.kafka.demo.orders.kafka.producer.ItemJsonSerializer;
7+
import org.hifly.kafka.demo.orders.kafka.producer.OrderJsonSerializer;
48
import org.hifly.kafka.demo.orders.model.Item;
59
import org.hifly.kafka.demo.orders.model.Order;
610
import org.hifly.kafka.demo.producer.serializer.json.JsonProducer;
@@ -30,12 +34,11 @@ public List<RecordMetadata> sendItems(List<Item> items, int delay) {
3034

3135
final String TOPIC = "items";
3236

33-
Properties properties = new Properties();
34-
properties.put("valueSerializer", "org.hifly.kafka.demo.orders.kafka.producer.ItemJsonSerializer");
35-
properties.put("txId", "cart-app");
37+
Properties props = KafkaConfig.jsonProducer(ItemJsonSerializer.class.getName(), "cart-app");
38+
KafkaProducer<String, Item> producer = new KafkaProducer<>(props);
3639

3740
JsonProducer<Item> jsonProducer = new JsonProducer<>();
38-
jsonProducer.start();
41+
jsonProducer.start(producer);
3942

4043
jsonProducer.getProducer().initTransactions();
4144

@@ -66,8 +69,11 @@ public void generateOrders(
6669

6770
String consGroup = CONS_GROUP;
6871

72+
Properties props = KafkaConfig.jsonProducer(OrderJsonSerializer.class.getName(), "cart-app");
73+
KafkaProducer<String, Order> producer = new KafkaProducer<>(props);
74+
6975
JsonProducer<Order> jsonProducer = new JsonProducer<>();
70-
jsonProducer.start();
76+
jsonProducer.start(producer);
7177

7278
if(generateConsGroup)
7379
consGroup = UUID.randomUUID().toString();

kafka-producer/docker-compose-apicurio.yml

+2
Original file line numberDiff line numberDiff line change
@@ -51,6 +51,8 @@ services:
5151
container_name: apicurio
5252
ports:
5353
- 8080:8080
54+
depends_on:
55+
- postgres
5456
environment:
5557
REGISTRY_DATASOURCE_URL: 'jdbc:postgresql://postgres:5432/postgres'
5658
REGISTRY_DATASOURCE_USERNAME: postgres
+39
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,39 @@
1+
---
2+
version: '2'
3+
4+
services:
5+
6+
broker:
7+
image: confluentinc/cp-kafka:${CONFLUENT_VERSION}
8+
hostname: broker
9+
container_name: broker
10+
ports:
11+
- "9092:9092"
12+
environment:
13+
KAFKA_NODE_ID: 1
14+
KAFKA_PROCESS_ROLES: 'broker,controller'
15+
KAFKA_CONTROLLER_QUORUM_VOTERS: '1@broker:29093'
16+
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: 'PLAINTEXT:PLAINTEXT,CONTROLLER:PLAINTEXT'
17+
KAFKA_ADVERTISED_LISTENERS: 'PLAINTEXT://broker:9092'
18+
KAFKA_LISTENERS: 'PLAINTEXT://broker:9092,CONTROLLER://broker:29093'
19+
KAFKA_INTER_BROKER_LISTENER_NAME: 'PLAINTEXT'
20+
KAFKA_CONTROLLER_LISTENER_NAMES: 'CONTROLLER'
21+
KAFKA_LOG_DIRS: '/tmp/kraft-combined-logs'
22+
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
23+
KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
24+
KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
25+
KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
26+
KAFKA_TOOLS_LOG4J_LOGLEVEL: ERROR
27+
CLUSTER_ID: 'QTnB2tAgTWa1ec5wYon2jg'
28+
29+
schema-registry:
30+
image: confluentinc/cp-schema-registry:${CONFLUENT_VERSION}
31+
hostname: schema-registry
32+
container_name: schema-registry
33+
depends_on:
34+
- broker
35+
ports:
36+
- "8081:8081"
37+
environment:
38+
SCHEMA_REGISTRY_HOST_NAME: schema-registry
39+
SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: 'broker:9092'

kafka-producer/src/main/java/org/hifly/kafka/demo/producer/KafkaConfig.java

+1-1
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@
1414
public class KafkaConfig {
1515

1616
private static final String BROKER_LIST =
17-
System.getenv("kafka.broker.list") != null? System.getenv("kafka.broker.list") :"localhost:12091,localhost:9093,localhost:9094";
17+
System.getenv("kafka.broker.list") != null? System.getenv("kafka.broker.list") :"localhost:9092,localhost:9093,localhost:9094";
1818
private static final String CONFLUENT_SCHEMA_REGISTRY_URL =
1919
System.getenv("confluent.schema.registry") != null? System.getenv("confluent.schema.registry"):"http://localhost:8081";
2020
private static final String APICURIO_SCHEMA_REGISTRY_URL =

kafka-producer/src/main/java/org/hifly/kafka/demo/producer/partitioner/custom/Runner.java

+1-1
Original file line numberDiff line numberDiff line change
@@ -31,7 +31,7 @@ public static void main (String [] args) {
3131
public static void bunchOfSynchMessages(String topic, StringProducer baseProducer) {
3232
Random random = new Random();
3333
RecordMetadata lastRecord;
34-
for (int i= 10; i < 100; i++ ) {
34+
for (int i= 0; i < 10; i++ ) {
3535
String key = strings[random.nextInt(strings.length)];
3636
lastRecord = baseProducer.produceSync(new ProducerRecord<>(topic, key, Integer.toString(i)));
3737
LOGGER.info("Key to send: {}\n", key);

kafka-producer/src/main/java/org/hifly/kafka/demo/producer/serializer/json/JsonProducer.java

+2-1
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
package org.hifly.kafka.demo.producer.serializer.json;
22

3+
import org.apache.kafka.common.serialization.StringSerializer;
34
import org.hifly.kafka.demo.producer.AbstractKafkaProducer;
45
import org.hifly.kafka.demo.producer.ProducerCallback;
56
import org.hifly.kafka.demo.producer.KafkaConfig;
@@ -14,7 +15,7 @@
1415

1516
public class JsonProducer<T> extends AbstractKafkaProducer<String, T> implements IKafkaProducer<String, T> {
1617

17-
private String valueSerializer;
18+
private String valueSerializer = StringSerializer.class.getName();
1819

1920
public JsonProducer() {}
2021

kafka-producer/src/main/java/org/hifly/kafka/demo/producer/serializer/json/Runner.java

+3-3
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@ public static void main (String [] args) {
2020

2121
public static void bunchOfMessages(String topic, JsonProducer baseProducer) {
2222
RecordMetadata lastRecord = null;
23-
for (int i= 10; i < 100; i++ ) {
23+
for (int i= 0; i < 10; i++ ) {
2424
lastRecord = baseProducer.produceSync(new ProducerRecord<>(topic, new CustomData(i)));
2525
RecordMetadataUtil.prettyPrinter(lastRecord);
2626
}
@@ -29,13 +29,13 @@ public static void bunchOfMessages(String topic, JsonProducer baseProducer) {
2929
}
3030

3131
public static void bunchOfFFMessages(String topic, JsonProducer baseProducer) {
32-
for (int i= 10; i < 100; i++ )
32+
for (int i= 0; i < 10; i++ )
3333
baseProducer.produceFireAndForget(new ProducerRecord<>(topic, new CustomData(i)));
3434
baseProducer.stop();
3535
}
3636

3737
public static void bunchOfAsynchMessages(String topic, JsonProducer baseProducer) {
38-
for (int i= 10; i < 100; i++ )
38+
for (int i= 0; i < 10; i++ )
3939
baseProducer.produceAsync(new ProducerRecord<>(topic, new CustomData(i)), new ProducerCallback());
4040
baseProducer.stop();
4141
}

0 commit comments

Comments
 (0)