Skip to content
This repository has been archived by the owner on Mar 29, 2023. It is now read-only.

Kafka auth #510

Open
wants to merge 19 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
13 changes: 13 additions & 0 deletions broker-jaas.conf
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
KafkaServer {
org.apache.kafka.common.security.plain.PlainLoginModule required
username="admin"
password="admin-secret"
user_admin="admin-secret";
};

Client {
org.apache.zookeeper.server.auth.DigestLoginModule required
username="admin"
password="admin-secret"
user_admin="admin-secret";
};
2 changes: 2 additions & 0 deletions data-collector/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,8 @@ By default, this component will validate that hosts in the `KAFKA_BOOTSTRAP_TLS`

If your Kafka cluster uses TLS certificates issued by a private Certificate Authority, you will need to provide the CA Certificate in PEM format so that certificate validation can be performed when connecting to the Kafka cluster. You should do this by including the CA certificate in PEM format in the `/tls` directory of the container, probably through a volume mount.

At present Kiln supports authentication between brokers and producers/consumers using the SASL_PLAIN mechanism. Authentication is optional and configured by setting the `ENABLE_KAFKA_AUTH` environment variable. If this variable is set, you also need to supply the username and password for authentication using `KAFKA_SASL_AUTH_USERNAME` and `KAFKA_SASL_AUTH_PASSWORD` environment variables respectively.

## Request & Response Documentation

You shouldn't generally need to make manual requests to the data-collector, instead prefer to use the ToolReport struct from kiln_lib and serialise that to JSON before sending to the data-collector. If you do need to make a manual request to the data-collector, see [docs/request-response.md](docs/request-response.md).
11 changes: 6 additions & 5 deletions data-forwarder/Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion data-forwarder/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ kiln_lib = { git = "https://github.com/simplybusiness/Kiln", features = [ "json"
clap = "2"
chrono = "0.4"
reqwest = { version = "0.11", features = [ "blocking", "json",] }
git2 = "0.13"
git2 = "0.12"
Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

At present the CI/CD build for data forwarder fails without this change. Potentially related to this rust-lang/rust#85574

uuid = { version = "0.8", features = [ "v4",] }
openssl-probe = "0.1.4"
toml = "0.5"
Expand Down
90 changes: 90 additions & 0 deletions docker-compose-auth.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,90 @@
version: '2'
services:
zookeeper:
image: wurstmeister/zookeeper
ports:
- "2181:2181"
environment:
SERVER_JVMFLAGS: -Djava.security.auth.login.config=/opt/zookeeper_jaas.conf
-Dzookeeper.authProvider.1=org.apache.zookeeper.server.auth.SASLAuthenticationProvider
-Dzookeeper.authProvider.2=org.apache.zookeeper.server.auth.DigestAuthenticationProvider
-DjaasLoginRenew=3600000
-DrequireClientAuthScheme=sasl
volumes:
- ./zookeeper_jaas.conf:/opt/zookeeper_jaas.conf

kafka:
image: wurstmeister/kafka
depends_on:
- zookeeper
ports:
- "9092:9092"
hostname: kafka
environment:
KAFKA_ADVERTISED_HOSTNAME: kafka
KAFKA_LISTENERS: "SASL_SSL://kafka:9092"
KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
KAFKA_SSL_ENDPOINT_IDENTIFICATION_ALGORITHM: ""
KAFKA_SSL_TLS_VERSION: TLSv1.2
KAFKA_SSL_PROTOCOL: TLSv1.2
KAFKA_SSL_ENABLED_PROTOCOLS: TLSv1.2
KAFKA_SSL_SECURE_RANDOM_IMPLEMENTATION: NativePRNG
KAFKA_SSL_CIPHER_SUITES: TLS_ECDHE_ECDSA_WITH_AES_256_GCM_SHA384,TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384,TLS_ECDHE_ECDSA_WITH_AES_128_GCM_SHA256,TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256
KAFKA_SSL_KEYSTORE_LOCATION: /tls/kafka.keystore.jks
KAFKA_SSL_KEYSTORE_PASSWORD: password
KAFKA_SSL_KEY_PASSWORD: password
KAFKA_SSL_TRUSTSTORE_LOCATION: /tls/kafka.truststore.jks
KAFKA_SSL_TRUSTSTORE_PASSWORD: password
KAFKA_CREATE_TOPICS: "ToolReports:6:1,DependencyEvents:6:1"
KAFKA_MESSAGE_MAX_BYTES: 10000000
KAFKA_REPLICA_FETCH_MAX_BYTES: 10000000
KAFKA_SECURITY_PROTOCOL: SASL_SSL
KAFKA_SASL_ENABLED_MECHANISMS: PLAIN
KAFKA_SECURITY_INTER_BROKER_PROTOCOL: SASL_SSL
KAFKA_SASL_MECHANISM_INTER_BROKER_PROTOCOL: PLAIN
KAFKA_OPTS: "-Djava.security.auth.login.config=/etc/kafka/broker-jaas.conf"
volumes:
- /var/run/docker.sock:/var/run/docker.sock
- ./tls:/tls
- ./broker-jaas.conf:/etc/kafka/broker-jaas.conf
data-collector:
depends_on:
- kafka
image: kiln/data-collector:git-latest
ports:
- "8081:8080"
environment:
- KAFKA_BOOTSTRAP_TLS=kafka:9092
- ENABLE_KAFKA_AUTH=true
- KAFKA_SASL_AUTH_USERNAME=admin
- KAFKA_SASL_AUTH_PASSWORD=admin-secret
- DISABLE_KAFKA_DOMAIN_VALIDATION=true
volumes:
- ./tls:/tls
report-parser:
depends_on:
- kafka
image: kiln/report-parser:git-latest
environment:
- KAFKA_BOOTSTRAP_TLS=kafka:9092
- ENABLE_KAFKA_AUTH=true
- KAFKA_SASL_AUTH_USERNAME=admin
- KAFKA_SASL_AUTH_PASSWORD=admin-secret
- DISABLE_KAFKA_DOMAIN_VALIDATION=true
volumes:
- ./tls:/tls
slack-connector:
depends_on:
- kafka
image: kiln/slack-connector:git-latest
environment:
- KAFKA_BOOTSTRAP_TLS=kafka:9092
- RUST_LOG=info
- DISABLE_KAFKA_DOMAIN_VALIDATION=true
- ENABLE_KAFKA_AUTH=true
- KAFKA_SASL_AUTH_USERNAME=admin
- KAFKA_SASL_AUTH_PASSWORD=admin
- OAUTH2_TOKEN
- SLACK_CHANNEL_ID
volumes:
- ./tls:/tls
Loading