|
| 1 | +######################################### |
| 2 | +# From Dsiem plugins # |
| 3 | +######################################### |
| 4 | + |
| 5 | +filter { |
| 6 | + if [@metadata][siem_data_type] == "normalizedEvent" { |
| 7 | + uuid { |
| 8 | + target => "event_id" |
| 9 | + overwrite => true |
| 10 | + } |
| 11 | + } |
| 12 | +} |
| 13 | + |
| 14 | +output{ |
| 15 | + if [@metadata][siem_data_type] == "normalizedEvent" { |
| 16 | + # to dsiem |
| 17 | + http { |
| 18 | + format=>"json" |
| 19 | + http_method=>"post" |
| 20 | + url=>"http://dsiem:8080/events" |
| 21 | + } |
| 22 | + # to elasticsearch |
| 23 | + elasticsearch { |
| 24 | + hosts => "elasticsearch:9200" |
| 25 | + index => "siem_events-%{+YYYY.MM.dd}" |
| 26 | + document_id => "%{[event_id]}" |
| 27 | + action => "index" |
| 28 | + template => "/etc/logstash/index-template.d/siem_events-template.json" |
| 29 | + template_name => "siem_events" |
| 30 | + template_overwrite => true |
| 31 | + user => "${ELASTICSEARCH_USERNAME}" |
| 32 | + password => "${ELASTICSEARCH_PASSWORD}" |
| 33 | + } |
| 34 | + } |
| 35 | +} |
| 36 | + |
| 37 | +######################################### |
| 38 | +# From Dsiem's Filebeat # |
| 39 | +######################################### |
| 40 | + |
| 41 | +filter { |
| 42 | + if [siem_data_type] == "alarm_events" { |
| 43 | + mutate { |
| 44 | + add_field => { |
| 45 | + "[@metadata][siem_data_type]" => "alarm_events" |
| 46 | + } |
| 47 | + } |
| 48 | + prune { |
| 49 | + whitelist_names => [ "@metadata", "@timestamp", "alarm_id", "event_id", "stage" ] |
| 50 | + } |
| 51 | + } |
| 52 | + |
| 53 | + if [siem_data_type] == "alarms" { |
| 54 | + date { |
| 55 | + match => [ "created_time", "UNIX" ] |
| 56 | + target => "timestamp" |
| 57 | + } |
| 58 | + date { |
| 59 | + match => [ "update_time", "UNIX" ] |
| 60 | + target => "updated_time" |
| 61 | + } |
| 62 | + mutate { |
| 63 | + add_field => { |
| 64 | + "[@metadata][alarm_id]" => "%{[alarm_id]}" |
| 65 | + "[@metadata][siem_data_type]" => "alarms" |
| 66 | + } |
| 67 | + } |
| 68 | + |
| 69 | + # set target_index to the actual index for an existing ID (perm_index). |
| 70 | + # lookup is done against siem_alarms_id_lookup alias which is assigned to all new index |
| 71 | + # by default. This alias can then be managed separately to cover, for example, only |
| 72 | + # the last 3 indices. |
| 73 | + |
| 74 | + elasticsearch { |
| 75 | + hosts => ["elasticsearch:9200"] |
| 76 | + index => "siem_alarms_id_lookup" |
| 77 | + query => "_id:%{[alarm_id]}" |
| 78 | + fields => { |
| 79 | + "perm_index" => "[@metadata][target_index]" |
| 80 | + } |
| 81 | + user => "${ELASTICSEARCH_USERNAME}" |
| 82 | + password => "${ELASTICSEARCH_PASSWORD}" |
| 83 | + } |
| 84 | + |
| 85 | + # if previous step failed or couldn't find a match in the case of new ID, then use today's date |
| 86 | + if ![@metadata][target_index] { |
| 87 | + mutate { |
| 88 | + add_field => { |
| 89 | + "[@metadata][target_index]" => "siem_alarms-%{+YYYY.MM.dd}" |
| 90 | + } |
| 91 | + } |
| 92 | + } |
| 93 | + |
| 94 | + # elasticsearch filter plugin only search within _source, so the following extra perm_index field is necessary |
| 95 | + mutate { |
| 96 | + add_field => { |
| 97 | + "perm_index" => "%{[@metadata][target_index]}" |
| 98 | + } |
| 99 | + } |
| 100 | + prune { |
| 101 | + whitelist_names => [ "timestamp", "@metadata", "title", "status", "kingdom", "category", |
| 102 | + "updated_time", "risk", "risk_class", "tag$", "src_ips", "dst_ips", "intel_hits", "vulnerabilities", |
| 103 | + "networks", "rules", "custom_data", "^perm_index$" ] |
| 104 | + } |
| 105 | + |
| 106 | + # debugging only: |
| 107 | + # mutate { add_field => { "alarm_id" => "%{[@metadata][alarm_id]}" }} |
| 108 | + # ruby { code => 'logger.info("Dsiem alarm processing: ready to output ID ", "value" => event.get("[@metadata][alarm_id]"))' } |
| 109 | + } |
| 110 | +} |
| 111 | + |
| 112 | +output { |
| 113 | + if [@metadata][siem_data_type] == "alarm_events" { |
| 114 | + elasticsearch { |
| 115 | + hosts => "elasticsearch:9200" |
| 116 | + index => "siem_alarm_events-%{+YYYY.MM.dd}" |
| 117 | + template => "/etc/logstash/index-template.d/siem_alarm_events-template.json" |
| 118 | + template_name => "siem_alarm_events" |
| 119 | + template_overwrite => true |
| 120 | + user => "${ELASTICSEARCH_USERNAME}" |
| 121 | + password => "${ELASTICSEARCH_PASSWORD}" |
| 122 | + } |
| 123 | + } |
| 124 | + |
| 125 | + # This one uses update action and doc_as_upsert to allow partial updates |
| 126 | + if [@metadata][siem_data_type] == "alarms" { |
| 127 | + |
| 128 | + # debugging only: |
| 129 | + # elasticsearch { hosts => "elasticsearch:9200" index => "siem_alarms_debug" } |
| 130 | + |
| 131 | + elasticsearch { |
| 132 | + hosts => "elasticsearch:9200" |
| 133 | + index => "%{[@metadata][target_index]}" |
| 134 | + document_id => "%{[@metadata][alarm_id]}" |
| 135 | + template => "/etc/logstash/index-template.d/siem_alarms-template.json" |
| 136 | + template_name => "siem_alarms" |
| 137 | + template_overwrite => true |
| 138 | + user => "${ELASTICSEARCH_USERNAME}" |
| 139 | + password => "${ELASTICSEARCH_PASSWORD}" |
| 140 | + action => "update" |
| 141 | + # use doc_as_upsert and script so that: |
| 142 | + # - incoming doc is automatically indexed when document_id doesn't yet exist |
| 143 | + # - for existing docs, we can selectively discard out-of-order updates and status/tag updates, |
| 144 | + # without having to use external versioning |
| 145 | + doc_as_upsert => true |
| 146 | + script_lang => "painless" |
| 147 | + script_type => "inline" |
| 148 | + # lower risk value for an incoming update means it's out of order |
| 149 | + # the same goes for updated_time, but should only be checked when incoming update |
| 150 | + # doesn't have a higher risk |
| 151 | + script => ' |
| 152 | + int incoming_risk = params.event.get("risk"); |
| 153 | + int existing_risk = ctx._source.risk; |
| 154 | + |
| 155 | + if (incoming_risk < existing_risk) { |
| 156 | + ctx.op = "none"; |
| 157 | + return |
| 158 | + } else if (incoming_risk == existing_risk) { |
| 159 | + ZonedDateTime old_tm = ZonedDateTime.parse(ctx._source.updated_time); |
| 160 | + ZonedDateTime new_tm = ZonedDateTime.parse(params.event.get("updated_time")); |
| 161 | + if (new_tm.isBefore(old_tm)) { |
| 162 | + ctx.op = "none"; |
| 163 | + return |
| 164 | + } |
| 165 | + } |
| 166 | + ctx._source.timestamp = params.event.get("timestamp"); |
| 167 | + ctx._source.updated_time = params.event.get("updated_time"); |
| 168 | + ctx._source.risk = incoming_risk; |
| 169 | + ctx._source.risk_class = params.event.get("risk_class"); |
| 170 | + ctx._source.src_ips = params.event.get("src_ips"); |
| 171 | + ctx._source.dst_ips = params.event.get("dst_ips"); |
| 172 | + ctx._source.rules = params.event.get("rules"); |
| 173 | + ctx._source.networks = params.event.get("networks"); |
| 174 | + |
| 175 | + if (params.event.get("intel_hits") != null) { |
| 176 | + ctx._source.intel_hits = params.event.get("intel_hits") |
| 177 | + } |
| 178 | + |
| 179 | + if (params.event.get("vulnerabilities") != null) { |
| 180 | + ctx._source.vulnerabilities = params.event.get("vulnerabilities") |
| 181 | + } |
| 182 | + |
| 183 | + if (params.event.get("custom_data") != null) { |
| 184 | + ctx._source.custom_data = params.event.get("custom_data") |
| 185 | + } |
| 186 | + ' |
| 187 | + retry_on_conflict => 5 |
| 188 | + } |
| 189 | + } |
| 190 | +} |
0 commit comments