Skip to content

Commit e0dbc81

Browse files
committed
removed non maven modules
1 parent b1a9b0e commit e0dbc81

File tree

9 files changed

+178
-149
lines changed

9 files changed

+178
-149
lines changed
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,17 @@
1+
{
2+
"name" : "mongo-sample-sink",
3+
"config": {
4+
"connector.class": "com.mongodb.kafka.connect.MongoSinkConnector",
5+
"topics": "test",
6+
"connection.uri": "mongodb://admin:password@mongo:27017",
7+
"key.converter": "org.apache.kafka.connect.storage.StringConverter",
8+
"value.converter": "org.apache.kafka.connect.json.JsonConverter",
9+
"key.converter.schemas.enable": false,
10+
"value.converter.schemas.enable": false,
11+
"database": "Tutorial2",
12+
"collection": "pets",
13+
"errors.tolerance": "all",
14+
"errors.deadletterqueue.topic.name":"dlq.mongo",
15+
"errors.deadletterqueue.topic.replication.factor": 1
16+
}
17+
}

ksqldb-join/docker-compose.yml

+91
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,91 @@
1+
---
2+
version: '2'
3+
4+
services:
5+
zookeeper:
6+
image: confluentinc/cp-zookeeper:${CONFLUENT_VERSION}
7+
hostname: zookeeper
8+
container_name: zookeeper
9+
ports:
10+
- "2181:2181"
11+
environment:
12+
ZOOKEEPER_CLIENT_PORT: 2181
13+
ZOOKEEPER_TICK_TIME: 2000
14+
15+
broker:
16+
image: confluentinc/cp-kafka:${CONFLUENT_VERSION}
17+
hostname: broker
18+
container_name: broker
19+
depends_on:
20+
- zookeeper
21+
ports:
22+
- "9092:9092"
23+
environment:
24+
KAFKA_BROKER_ID: 1
25+
KAFKA_ZOOKEEPER_CONNECT: 'zookeeper:2181'
26+
KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT
27+
KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://broker:9092
28+
KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
29+
KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
30+
KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
31+
KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
32+
KAFKA_TOOLS_LOG4J_LOGLEVEL: ERROR
33+
34+
schema-registry:
35+
image: confluentinc/cp-schema-registry:${CONFLUENT_VERSION}
36+
hostname: schema-registry
37+
container_name: schema-registry
38+
depends_on:
39+
- broker
40+
ports:
41+
- "8081:8081"
42+
environment:
43+
SCHEMA_REGISTRY_HOST_NAME: schema-registry
44+
SCHEMA_REGISTRY_KAFKASTORE_BOOTSTRAP_SERVERS: 'broker:9092'
45+
46+
connect:
47+
image: connect-custom-smt-image:1.0.0
48+
hostname: connect
49+
container_name: connect
50+
depends_on:
51+
- schema-registry
52+
ports:
53+
- "8083:8083"
54+
environment:
55+
CONNECT_BOOTSTRAP_SERVERS: 'broker:9092'
56+
CONNECT_REST_ADVERTISED_HOST_NAME: connect
57+
CONNECT_REST_PORT: 8083
58+
CONNECT_GROUP_ID: compose-connect-group
59+
CONNECT_CONFIG_STORAGE_TOPIC: docker-connect-configs
60+
CONNECT_CONFIG_STORAGE_REPLICATION_FACTOR: 1
61+
CONNECT_OFFSET_FLUSH_INTERVAL_MS: 10000
62+
CONNECT_OFFSET_STORAGE_TOPIC: docker-connect-offsets
63+
CONNECT_OFFSET_STORAGE_REPLICATION_FACTOR: 1
64+
CONNECT_STATUS_STORAGE_TOPIC: docker-connect-status
65+
CONNECT_STATUS_STORAGE_REPLICATION_FACTOR: 1
66+
CONNECT_KEY_CONVERTER: org.apache.kafka.connect.storage.StringConverter
67+
CONNECT_VALUE_CONVERTER: io.confluent.connect.avro.AvroConverter
68+
CONNECT_VALUE_CONVERTER_SCHEMA_REGISTRY_URL: http://schema-registry:8081
69+
CONNECT_ZOOKEEPER_CONNECT: 'zookeeper:2181'
70+
CONNECT_PLUGIN_PATH: "/usr/share/java,/usr/share/confluent-hub-components"
71+
CONNECT_LOG4J_LOGGERS: org.apache.zookeeper=ERROR,org.I0Itec.zkclient=ERROR,org.reflections=ERROR
72+
command:
73+
- bash
74+
- -c
75+
- |
76+
echo "Installing Connector"
77+
confluent-hub install --no-prompt mongodb/kafka-connect-mongodb:1.9.1
78+
#
79+
echo "Launching Kafka Connect worker"
80+
/etc/confluent/docker/run &
81+
#
82+
sleep infinity
83+
84+
mongodb:
85+
image: mongo:5.0
86+
container_name: mongo
87+
ports:
88+
- 27017:27017
89+
environment:
90+
- MONGO_INITDB_ROOT_USERNAME=admin
91+
- MONGO_INITDB_ROOT_PASSWORD=password

ksqldb-join/ksql/insert.sql

+25
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,25 @@
1+
INSERT INTO heartbeat (person_id, heartbeat_value, timestamp) VALUES ('MGG1', 98, '2023-02-18 15:10:00');
2+
INSERT INTO heartbeat (person_id, heartbeat_value, timestamp) VALUES ('MGG1', 97, '2023-02-18 15:10:05');
3+
INSERT INTO heartbeat (person_id, heartbeat_value, timestamp) VALUES ('MGG1', 103, '2023-02-18 15:10:10');
4+
INSERT INTO heartbeat (person_id, heartbeat_value, timestamp) VALUES ('MGG1', 102, '2023-02-18 15:10:15');
5+
INSERT INTO heartbeat (person_id, heartbeat_value, timestamp) VALUES ('MGG1', 110, '2023-02-18 15:10:20');
6+
INSERT INTO heartbeat (person_id, heartbeat_value, timestamp) VALUES ('MGG1', 122, '2023-02-18 15:10:25');
7+
INSERT INTO heartbeat (person_id, heartbeat_value, timestamp) VALUES ('MGG1', 125, '2023-02-18 15:10:30');
8+
INSERT INTO heartbeat (person_id, heartbeat_value, timestamp) VALUES ('MGG1', 121, '2023-02-18 15:10:35');
9+
INSERT INTO heartbeat (person_id, heartbeat_value, timestamp) VALUES ('MGG1', 118, '2023-02-18 15:10:40');
10+
INSERT INTO heartbeat (person_id, heartbeat_value, timestamp) VALUES ('MGG1', 98, '2023-02-18 15:10:45');
11+
INSERT INTO heartbeat (person_id, heartbeat_value, timestamp) VALUES ('MGG1', 98, '2023-02-18 15:10:50');
12+
INSERT INTO heartbeat (person_id, heartbeat_value, timestamp) VALUES ('MGG1', 93, '2023-02-18 15:10:55');
13+
14+
INSERT INTO heartbeat (person_id, heartbeat_value, timestamp) VALUES ('MGG1', 111, '2023-02-18 15:15:00');
15+
INSERT INTO heartbeat (person_id, heartbeat_value, timestamp) VALUES ('MGG1', 117, '2023-02-18 15:15:05');
16+
INSERT INTO heartbeat (person_id, heartbeat_value, timestamp) VALUES ('MGG1', 125, '2023-02-18 15:15:10');
17+
INSERT INTO heartbeat (person_id, heartbeat_value, timestamp) VALUES ('MGG1', 127, '2023-02-18 15:15:15');
18+
INSERT INTO heartbeat (person_id, heartbeat_value, timestamp) VALUES ('MGG1', 128, '2023-02-18 15:15:20');
19+
INSERT INTO heartbeat (person_id, heartbeat_value, timestamp) VALUES ('MGG1', 130, '2023-02-18 15:15:25');
20+
INSERT INTO heartbeat (person_id, heartbeat_value, timestamp) VALUES ('MGG1', 131, '2023-02-18 15:15:30');
21+
INSERT INTO heartbeat (person_id, heartbeat_value, timestamp) VALUES ('MGG1', 125, '2023-02-18 15:15:35');
22+
INSERT INTO heartbeat (person_id, heartbeat_value, timestamp) VALUES ('MGG1', 127, '2023-02-18 15:15:40');
23+
INSERT INTO heartbeat (person_id, heartbeat_value, timestamp) VALUES ('MGG1', 128, '2023-02-18 15:15:45');
24+
INSERT INTO heartbeat (person_id, heartbeat_value, timestamp) VALUES ('MGG1', 129, '2023-02-18 15:15:50');
25+
INSERT INTO heartbeat (person_id, heartbeat_value, timestamp) VALUES ('MGG1', 128, '2023-02-18 15:15:55');

ksqldb-join/ksql/ksql-insert.sh

+11
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,11 @@
1+
#!/bin/bash
2+
3+
tr '\n' ' ' < insert.sql | \
4+
sed 's/;/;\'$'\n''/g' | \
5+
while read stmt; do
6+
echo '{"ksql":"'$stmt'", "streamsProperties": {}}' | \
7+
curl -s -X "POST" "http://ksqldb-server:8088/ksql" \
8+
-H "Content-Type: application/vnd.ksql.v1+json; charset=utf-8" \
9+
-d @- | \
10+
jq
11+
done

ksqldb-join/ksql/ksql-statements.sh

+11
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,11 @@
1+
#!/bin/bash
2+
3+
tr '\n' ' ' < statements.sql | \
4+
sed 's/;/;\'$'\n''/g' | \
5+
while read stmt; do
6+
echo '{"ksql":"'$stmt'", "streamsProperties": {}}' | \
7+
curl -s -X "POST" "http://ksqldb-server:8088/ksql" \
8+
-H "Content-Type: application/vnd.ksql.v1+json; charset=utf-8" \
9+
-d @- | \
10+
jq
11+
done

ksqldb-join/ksql/statements.sql

+23
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,23 @@
1+
CREATE STREAM heartbeat (
2+
person_id VARCHAR,
3+
heartbeat_value DOUBLE,
4+
timestamp VARCHAR
5+
) WITH (
6+
kafka_topic = 'heartbeat',
7+
timestamp='timestamp',
8+
timestamp_format='yyyy-MM-dd HH:mm:ss',
9+
partitions = 1,
10+
value_format = 'avro'
11+
);
12+
13+
14+
CREATE TABLE heartbeat_60sec
15+
WITH (kafka_topic='heartbeat_60sec') AS
16+
SELECT person_id,
17+
COUNT(*) AS beat_over_threshold_count,
18+
WINDOWSTART AS window_start,
19+
WINDOWEND AS window_end
20+
FROM heartbeat
21+
WINDOW TUMBLING (SIZE 1 MINUTES)
22+
where heartbeat_value > 120
23+
GROUP BY person_id;

ksqldb-window-session-tripsegments/pom.xml

-73
This file was deleted.

ksqldb-window-tumbling-heartbeat/pom.xml

-73
This file was deleted.

pom.xml

-3
Original file line numberDiff line numberDiff line change
@@ -19,11 +19,8 @@
1919
<module>kafka-springboot-consumer</module>
2020
<module>kafka-quarkus</module>
2121
<module>kafka-smt-custom</module>
22-
<module>kafka-connect-sink-dlq</module>
2322
<module>kafka-unixcommand-connector</module>
2423
<module>ksqldb-saga-example</module>
25-
<module>ksqldb-window-tumbling-heartbeat</module>
26-
<module>ksqldb-window-session-tripsegments</module>
2724
<module>admin-client</module>
2825
<module>authorizers</module>
2926
<module>interceptors</module>

0 commit comments

Comments
 (0)