diff --git a/salt/elasticfleet/files/integrations/grid-nodes_general/soc-detections-logs.json b/salt/elasticfleet/files/integrations/grid-nodes_general/soc-detections-logs.json new file mode 100644 index 0000000000..5649b481de --- /dev/null +++ b/salt/elasticfleet/files/integrations/grid-nodes_general/soc-detections-logs.json @@ -0,0 +1,35 @@ +{ + "policy_id": "so-grid-nodes_general", + "package": { + "name": "log", + "version": "" + }, + "name": "soc-detections-logs", + "description": "Security Onion Console - Detections Logs", + "namespace": "so", + "inputs": { + "logs-logfile": { + "enabled": true, + "streams": { + "log.logs": { + "enabled": true, + "vars": { + "paths": [ + "/opt/so/log/soc/detections_runtime-status_sigma.log", + "/opt/so/log/soc/detections_runtime-status_yara.log" + ], + "exclude_files": [], + "ignore_older": "72h", + "data_stream.dataset": "soc", + "tags": [ + "so-soc" + ], + "processors": "- decode_json_fields:\n fields: [\"message\"]\n target: \"soc\"\n process_array: true\n max_depth: 2\n add_error_key: true \n- add_fields:\n target: event\n fields:\n category: host\n module: soc\n dataset_temp: detections\n- rename:\n fields:\n - from: \"soc.fields.sourceIp\"\n to: \"source.ip\"\n - from: \"soc.fields.status\"\n to: \"http.response.status_code\"\n - from: \"soc.fields.method\"\n to: \"http.request.method\"\n - from: \"soc.fields.path\"\n to: \"url.path\"\n - from: \"soc.message\"\n to: \"event.action\"\n - from: \"soc.level\"\n to: \"log.level\"\n ignore_missing: true", + "custom": "pipeline: common" + } + } + } + } + }, + "force": true +} diff --git a/salt/soc/config.sls b/salt/soc/config.sls index 3e756f977f..af34f5e7c7 100644 --- a/salt/soc/config.sls +++ b/salt/soc/config.sls @@ -80,6 +80,17 @@ socmotd: - mode: 600 - template: jinja +crondetectionsruntime: + cron.present: + - name: /usr/local/bin/so-detections-runtime-status cron + - identifier: detections-runtime-status + - user: socore + - minute: '*/10' + - hour: '*' + - daymonth: '*' + - month: '*' + - dayweek: '*' + socsigmafinalpipeline: file.managed: - name: /opt/so/conf/soc/sigma_final_pipeline.yaml diff --git a/salt/soc/defaults.yaml b/salt/soc/defaults.yaml index 13e2021c55..1c14e61cb6 100644 --- a/salt/soc/defaults.yaml +++ b/salt/soc/defaults.yaml @@ -1912,6 +1912,12 @@ soc: - name: Kismet - WiFi Devices description: WiFi devices seen by Kismet sensors query: 'event.module: kismet | groupby network.wireless.ssid | groupby device.manufacturer | groupby -pie device.manufacturer | groupby event.dataset' + - name: SOC Detections - Runtime Status + description: Runtime Status of Detections + query: 'event.dataset:soc.detections | groupby soc.detection_type soc.error_type | groupby soc.error_analysis | groupby soc.rule.name | groupby soc.error_message' + + + job: alerts: advanced: false diff --git a/salt/soc/tools/sbin/so-detections-runtime-status b/salt/soc/tools/sbin/so-detections-runtime-status new file mode 100644 index 0000000000..ed3ee58008 --- /dev/null +++ b/salt/soc/tools/sbin/so-detections-runtime-status @@ -0,0 +1,33 @@ +#!/bin/bash +# +# Copyright Security Onion Solutions LLC and/or licensed to Security Onion Solutions LLC under one +# or more contributor license agreements. Licensed under the Elastic License 2.0 as shown at +# https://securityonion.net/license; you may not use this file except in compliance with the +# Elastic License 2.0. + +# Set the default output destination to stdout +output_dest="/dev/stdout" + +# If the "cron" flag is passed, change the output destination to the log file +if [ "$1" = "cron" ]; then + output_dest="/opt/so/log/soc/detections_runtime-status_sigma.log" +fi + +# Run the query and output based on the output_dest value +/sbin/so-elasticsearch-query '*:elastalert_error*/_search' -d '{"query":{"range":{"@timestamp":{"gte":"now-11m","lte":"now"}}},"size": 50}' | \ +jq --compact-output '.hits.hits[] | { + _timestamp: ._source["@timestamp"], + "rule.name": ._source.data.rule, + error_type: "runtime_status", + error_message: ._source.message, + detection_type: "sigma", + event_module: "soc", + event_dataset: "soc.detections", + error_analysis: ( + if ._source.message | contains("Unknown column [winlog.channel]") then "Target logsource never seen" + elif ._source.message | contains("parsing_exception") then "Syntax Error" + else "Unknown" + end + ) + }' >> $output_dest + diff --git a/salt/strelka/compile_yara/compile_yara.py b/salt/strelka/compile_yara/compile_yara.py index 6d88fbbde2..ece3c6a9e1 100644 --- a/salt/strelka/compile_yara/compile_yara.py +++ b/salt/strelka/compile_yara/compile_yara.py @@ -39,14 +39,14 @@ def compile_yara_rules(rules_dir): # Extract just the UUID from the rule file name rule_id = os.path.splitext(os.path.basename(rule_file))[0] log_entry = { - "event.module": "soc", - "event.dataset": "soc.detections", + "event_module": "soc", + "event_dataset": "soc.detections", "log.level": "error", - "error.message": error_message, - "error.analysis": "syntax error", - "detection_type": "yara", - "rule.uuid": rule_id, - "error.type": "runtime_status" + "error_message": error_message, + "error_analysis": "Syntax Error", + "detection_type": "YARA", + "rule_uuid": rule_id, + "error_type": "runtime_status" } with open('/opt/sensoroni/logs/detections_runtime-status_yara.log', 'a') as log_file: json.dump(log_entry, log_file)