Skip to content

Commit 07b3718

Browse files
authored
chore: give example and ref on using ES env that requires basic auth (#489)
* chore: rm incomplete example * chore: add basic auth example * chore: add auth support * docs: add ref to basic auth example * Update installation.md
1 parent fb7bd32 commit 07b3718

10 files changed

+393
-136
lines changed
-3.37 KB
Binary file not shown.
-2.47 KB
Binary file not shown.
+190
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,190 @@
1+
#########################################
2+
# From Dsiem plugins #
3+
#########################################
4+
5+
filter {
6+
if [@metadata][siem_data_type] == "normalizedEvent" {
7+
uuid {
8+
target => "event_id"
9+
overwrite => true
10+
}
11+
}
12+
}
13+
14+
output{
15+
if [@metadata][siem_data_type] == "normalizedEvent" {
16+
# to dsiem
17+
http {
18+
format=>"json"
19+
http_method=>"post"
20+
url=>"http://dsiem:8080/events"
21+
}
22+
# to elasticsearch
23+
elasticsearch {
24+
hosts => "elasticsearch:9200"
25+
index => "siem_events-%{+YYYY.MM.dd}"
26+
document_id => "%{[event_id]}"
27+
action => "index"
28+
template => "/etc/logstash/index-template.d/siem_events-template.json"
29+
template_name => "siem_events"
30+
template_overwrite => true
31+
user => "${ELASTICSEARCH_USERNAME}"
32+
password => "${ELASTICSEARCH_PASSWORD}"
33+
}
34+
}
35+
}
36+
37+
#########################################
38+
# From Dsiem's Filebeat #
39+
#########################################
40+
41+
filter {
42+
if [siem_data_type] == "alarm_events" {
43+
mutate {
44+
add_field => {
45+
"[@metadata][siem_data_type]" => "alarm_events"
46+
}
47+
}
48+
prune {
49+
whitelist_names => [ "@metadata", "@timestamp", "alarm_id", "event_id", "stage" ]
50+
}
51+
}
52+
53+
if [siem_data_type] == "alarms" {
54+
date {
55+
match => [ "created_time", "UNIX" ]
56+
target => "timestamp"
57+
}
58+
date {
59+
match => [ "update_time", "UNIX" ]
60+
target => "updated_time"
61+
}
62+
mutate {
63+
add_field => {
64+
"[@metadata][alarm_id]" => "%{[alarm_id]}"
65+
"[@metadata][siem_data_type]" => "alarms"
66+
}
67+
}
68+
69+
# set target_index to the actual index for an existing ID (perm_index).
70+
# lookup is done against siem_alarms_id_lookup alias which is assigned to all new index
71+
# by default. This alias can then be managed separately to cover, for example, only
72+
# the last 3 indices.
73+
74+
elasticsearch {
75+
hosts => ["elasticsearch:9200"]
76+
index => "siem_alarms_id_lookup"
77+
query => "_id:%{[alarm_id]}"
78+
fields => {
79+
"perm_index" => "[@metadata][target_index]"
80+
}
81+
user => "${ELASTICSEARCH_USERNAME}"
82+
password => "${ELASTICSEARCH_PASSWORD}"
83+
}
84+
85+
# if previous step failed or couldn't find a match in the case of new ID, then use today's date
86+
if ![@metadata][target_index] {
87+
mutate {
88+
add_field => {
89+
"[@metadata][target_index]" => "siem_alarms-%{+YYYY.MM.dd}"
90+
}
91+
}
92+
}
93+
94+
# elasticsearch filter plugin only search within _source, so the following extra perm_index field is necessary
95+
mutate {
96+
add_field => {
97+
"perm_index" => "%{[@metadata][target_index]}"
98+
}
99+
}
100+
prune {
101+
whitelist_names => [ "timestamp", "@metadata", "title", "status", "kingdom", "category",
102+
"updated_time", "risk", "risk_class", "tag$", "src_ips", "dst_ips", "intel_hits", "vulnerabilities",
103+
"networks", "rules", "custom_data", "^perm_index$" ]
104+
}
105+
106+
# debugging only:
107+
# mutate { add_field => { "alarm_id" => "%{[@metadata][alarm_id]}" }}
108+
# ruby { code => 'logger.info("Dsiem alarm processing: ready to output ID ", "value" => event.get("[@metadata][alarm_id]"))' }
109+
}
110+
}
111+
112+
output {
113+
if [@metadata][siem_data_type] == "alarm_events" {
114+
elasticsearch {
115+
hosts => "elasticsearch:9200"
116+
index => "siem_alarm_events-%{+YYYY.MM.dd}"
117+
template => "/etc/logstash/index-template.d/siem_alarm_events-template.json"
118+
template_name => "siem_alarm_events"
119+
template_overwrite => true
120+
user => "${ELASTICSEARCH_USERNAME}"
121+
password => "${ELASTICSEARCH_PASSWORD}"
122+
}
123+
}
124+
125+
# This one uses update action and doc_as_upsert to allow partial updates
126+
if [@metadata][siem_data_type] == "alarms" {
127+
128+
# debugging only:
129+
# elasticsearch { hosts => "elasticsearch:9200" index => "siem_alarms_debug" }
130+
131+
elasticsearch {
132+
hosts => "elasticsearch:9200"
133+
index => "%{[@metadata][target_index]}"
134+
document_id => "%{[@metadata][alarm_id]}"
135+
template => "/etc/logstash/index-template.d/siem_alarms-template.json"
136+
template_name => "siem_alarms"
137+
template_overwrite => true
138+
user => "${ELASTICSEARCH_USERNAME}"
139+
password => "${ELASTICSEARCH_PASSWORD}"
140+
action => "update"
141+
# use doc_as_upsert and script so that:
142+
# - incoming doc is automatically indexed when document_id doesn't yet exist
143+
# - for existing docs, we can selectively discard out-of-order updates and status/tag updates,
144+
# without having to use external versioning
145+
doc_as_upsert => true
146+
script_lang => "painless"
147+
script_type => "inline"
148+
# lower risk value for an incoming update means it's out of order
149+
# the same goes for updated_time, but should only be checked when incoming update
150+
# doesn't have a higher risk
151+
script => '
152+
int incoming_risk = params.event.get("risk");
153+
int existing_risk = ctx._source.risk;
154+
155+
if (incoming_risk < existing_risk) {
156+
ctx.op = "none";
157+
return
158+
} else if (incoming_risk == existing_risk) {
159+
ZonedDateTime old_tm = ZonedDateTime.parse(ctx._source.updated_time);
160+
ZonedDateTime new_tm = ZonedDateTime.parse(params.event.get("updated_time"));
161+
if (new_tm.isBefore(old_tm)) {
162+
ctx.op = "none";
163+
return
164+
}
165+
}
166+
ctx._source.timestamp = params.event.get("timestamp");
167+
ctx._source.updated_time = params.event.get("updated_time");
168+
ctx._source.risk = incoming_risk;
169+
ctx._source.risk_class = params.event.get("risk_class");
170+
ctx._source.src_ips = params.event.get("src_ips");
171+
ctx._source.dst_ips = params.event.get("dst_ips");
172+
ctx._source.rules = params.event.get("rules");
173+
ctx._source.networks = params.event.get("networks");
174+
175+
if (params.event.get("intel_hits") != null) {
176+
ctx._source.intel_hits = params.event.get("intel_hits")
177+
}
178+
179+
if (params.event.get("vulnerabilities") != null) {
180+
ctx._source.vulnerabilities = params.event.get("vulnerabilities")
181+
}
182+
183+
if (params.event.get("custom_data") != null) {
184+
ctx._source.custom_data = params.event.get("custom_data")
185+
}
186+
'
187+
retry_on_conflict => 5
188+
}
189+
}
190+
}
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,13 @@
1+
output {
2+
if [application] == "suricata" {
3+
elasticsearch {
4+
hosts => ["elasticsearch:9200"]
5+
index => "suricata-%{+YYYY.MM.dd}"
6+
template => "/etc/logstash/index-template.d/suricata-template.json"
7+
template_name => "suricata"
8+
template_overwrite => true
9+
user => "${ELASTICSEARCH_USERNAME}"
10+
password => "${ELASTICSEARCH_PASSWORD}"
11+
}
12+
}
13+
}

Diff for: deployments/docker/docker-compose-basic-auth.yml

+142
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,142 @@
1+
# this requires ES_USERNAME and ES_PASSWORD to be set in the environment
2+
# you can set them in the .env file in the same directory as the docker-compose.yml,
3+
# or just export them in the shell before running docker-compose up:
4+
#
5+
# export ES_USERNAME=elastic
6+
# export ES_PASSWORD=changeme
7+
# export PROMISC_INTERFACE=eth0
8+
# docker-compose -f do docker-compose-basic-auth.yml up
9+
10+
version: "3"
11+
services:
12+
13+
elasticsearch:
14+
container_name: elasticsearch
15+
image: docker.elastic.co/elasticsearch/elasticsearch:7.11.0
16+
environment:
17+
- discovery.type=single-node
18+
- "ES_JAVA_OPTS=-Xms256m -Xmx256m"
19+
- cluster.routing.allocation.disk.threshold_enabled=false
20+
- xpack.monitoring.enabled=false
21+
- xpack.ml.enabled=false
22+
- xpack.graph.enabled=false
23+
- xpack.watcher.enabled=false
24+
- xpack.security.enabled=true
25+
- ELASTIC_PASSWORD=${ES_PASSWORD}
26+
- http.cors.enabled=true
27+
- http.cors.allow-credentials=true
28+
- http.cors.allow-headers=Content-Type,Content-Length,Authorization
29+
- http.cors.allow-origin=/https?:\/\/localhost(:[0-9]+)?/
30+
ports:
31+
- 9200:9200
32+
networks:
33+
- siemnet
34+
volumes:
35+
- es-data:/usr/share/elasticsearch/data
36+
37+
logstash:
38+
container_name: logstash
39+
image: defenxor/docker-logstash:7.11.0
40+
command:
41+
- -f/etc/logstash/conf.d
42+
environment:
43+
- xpack.monitoring.enabled=false
44+
- ELASTICSEARCH_USERNAME=${ES_USERNAME}
45+
- ELASTICSEARCH_PASSWORD=${ES_PASSWORD}
46+
networks:
47+
- siemnet
48+
volumes:
49+
- ./conf/logstash/conf.d:/etc/logstash/conf.d
50+
- ./conf/logstash/conf-auth.d/80_siem.conf:/etc/logstash/conf.d/80_siem.conf
51+
- ./conf/logstash/conf-auth.d/99_output.conf:/etc/logstash/conf.d/99_output.conf
52+
- ./conf/logstash/index-template.d/es7:/etc/logstash/index-template.d
53+
depends_on:
54+
- elasticsearch
55+
56+
kibana:
57+
container_name: kibana
58+
image: docker.elastic.co/kibana/kibana:7.11.0
59+
environment:
60+
- xpack.monitoring.ui.container.elasticsearch.enabled=false
61+
- ELASTICSEARCH_USERNAME=${ES_USERNAME}
62+
- ELASTICSEARCH_PASSWORD=${ES_PASSWORD}
63+
ports:
64+
- 5601:5601
65+
networks:
66+
- siemnet
67+
depends_on:
68+
- elasticsearch
69+
70+
# use dsiem name for frontend to avoid changing logstash configuration
71+
dsiem:
72+
container_name: dsiem-frontend
73+
image: defenxor/dsiem:latest
74+
environment:
75+
- DSIEM_MODE=cluster-frontend
76+
- DSIEM_NODE=dsiem-frontend-0
77+
- DSIEM_MSQ=nats://dsiem-nats:4222
78+
- DSIEM_PORT=8080
79+
- DSIEM_DEBUG=true
80+
- DSIEM_WEB_ESURL=http://${ES_USERNAME}:${ES_PASSWORD}@localhost:9200
81+
- DSIEM_WEB_KBNURL=http://localhost:5601
82+
ports:
83+
- "8080:8080"
84+
networks:
85+
- siemnet
86+
87+
dsiem-backend:
88+
container_name: dsiem-backend
89+
image: defenxor/dsiem:latest
90+
environment:
91+
- DSIEM_MODE=cluster-backend
92+
- DSIEM_NODE=dsiem-backend-0
93+
- DSIEM_DEBUG=true
94+
- DSIEM_FRONTEND=http://dsiem:8080
95+
- DSIEM_MSQ=nats://dsiem-nats:4222
96+
networks:
97+
- siemnet
98+
volumes:
99+
- dsiem-log:/dsiem/logs
100+
101+
nats:
102+
container_name: dsiem-nats
103+
image: nats:1.3.0-linux
104+
networks:
105+
- siemnet
106+
107+
filebeat:
108+
container_name: filebeat
109+
image: docker.elastic.co/beats/filebeat:7.11.0
110+
user: root
111+
networks:
112+
- siemnet
113+
volumes:
114+
- filebeat-data:/usr/share/filebeat/data
115+
- ./conf/filebeat/filebeat.yml:/usr/share/filebeat/filebeat.yml
116+
- dsiem-log:/var/log/dsiem
117+
- suricata-log:/var/log/suricata
118+
119+
suricata:
120+
container_name: suricata
121+
image: defenxor/suricata:1710
122+
network_mode: "host"
123+
cap_add:
124+
- NET_ADMIN
125+
- SYS_NICE
126+
- NET_RAW
127+
command:
128+
[
129+
"/bin/bash",
130+
"-c",
131+
"chown -R suri /var/log/suricata && /usr/bin/suricata -v -i ${PROMISC_INTERFACE}"
132+
]
133+
volumes:
134+
- suricata-log:/var/log/suricata
135+
136+
volumes:
137+
filebeat-data:
138+
es-data:
139+
dsiem-log:
140+
suricata-log:
141+
networks:
142+
siemnet:

0 commit comments

Comments
 (0)