diff --git a/salt/elasticfleet/config.sls b/salt/elasticfleet/config.sls index f8566ed945..43bfb8af99 100644 --- a/salt/elasticfleet/config.sls +++ b/salt/elasticfleet/config.sls @@ -73,6 +73,56 @@ eapackageupgrade: - template: jinja {% if GLOBALS.role != "so-fleet" %} + +soresourcesrepoconfig: + git.config_set: + - name: safe.directory + - value: /nsm/securityonion-resources + - global: True + +{% if not GLOBALS.airgap %} +soresourcesrepoclone: + git.latest: + - name: https://github.com/Security-Onion-Solutions/securityonion-resources.git + - target: /nsm/securityonion-resources + - rev: 'dev/defend_filters' + - depth: 1 +{% endif %} + +elasticdefendconfdir: + file.directory: + - name: /opt/so/conf/elastic-fleet/defend-exclusions/rulesets + - user: 947 + - group: 939 + - makedirs: True + +elasticdefenddisabled: + file.managed: + - name: /opt/so/conf/elastic-fleet/defend-exclusions/disabled-filters.yaml + - source: salt://elasticfleet/files/soc/elastic-defend-disabled-filters.yaml + - user: 947 + - group: 939 + - mode: 600 + +elasticdefendcustom: + file.managed: + - name: /opt/so/conf/elastic-fleet/defend-exclusions/rulesets/custom-filters-raw + - source: salt://elasticfleet/files/soc/elastic-defend-custom-filters.yaml + - user: 947 + - group: 939 + - mode: 600 + +cronelasticdefendfilters: + cron.present: + - name: python3 /sbin/so-elastic-defend-manage-filters.py -c /opt/so/conf/elasticsearch/curl.config -d /opt/so/conf/elastic-fleet/defend-exclusions/disabled-filters.yaml -i /nsm/securityonion-resources/event_filters/ -i /opt/so/conf/elastic-fleet/defend-exclusions/rulesets/custom-filters/ &>> /opt/so/log/elasticfleet/elastic-defend-manage-filters.log + - identifier: elastic-defend-filters + - user: root + - minute: '0' + - hour: '3' + - daymonth: '*' + - month: '*' + - dayweek: '*' + eaintegrationsdir: file.directory: - name: /opt/so/conf/elastic-fleet/integrations diff --git a/salt/elasticfleet/defaults.yaml b/salt/elasticfleet/defaults.yaml index ad2a7cc919..c3ca0f8283 100644 --- a/salt/elasticfleet/defaults.yaml +++ b/salt/elasticfleet/defaults.yaml @@ -8,6 +8,8 @@ elasticfleet: endpoints_enrollment: '' es_token: '' grid_enrollment: '' + defend_filters: + enable_auto_configuration: False logging: zeek: excluded: diff --git a/salt/elasticfleet/files/soc/elastic-defend-custom-filters.yaml b/salt/elasticfleet/files/soc/elastic-defend-custom-filters.yaml new file mode 100644 index 0000000000..ed8121a712 --- /dev/null +++ b/salt/elasticfleet/files/soc/elastic-defend-custom-filters.yaml @@ -0,0 +1,27 @@ +title: 'Template 1' +id: 'This needs to be a UUIDv4 id - https://www.uuidgenerator.net/version4' +description: 'Short description detailing what this rule is filtering and why.' +references: 'Relevant urls, etc' +author: '@SecurityOnion' +date: 'MM/DD/YY' +event_type: 'dns_query' +filter_type: 'exclude' +filter: + selection_1: + TargetField: 'QueryName' + Condition: 'end with' + Pattern: '.thawte.com' +--- +title: 'Template 2' +id: 'This needs to be a UUIDv4 id - https://www.uuidgenerator.net/version4' +description: 'Short description detailing what this rule is filtering and why.' +references: 'Relevant urls, etc' +author: '@SecurityOnion' +date: 'MM/DD/YY' +event_type: 'process_creation' +filter_type: 'exclude' +filter: + selection_1: + TargetField: 'ParentImage' + Condition: 'is' + Pattern: 'C:\Windows\Microsoft.NET\Framework\v4.0.30319\ngentask.exe' \ No newline at end of file diff --git a/salt/elasticfleet/files/soc/elastic-defend-disabled-filters.yaml b/salt/elasticfleet/files/soc/elastic-defend-disabled-filters.yaml new file mode 100644 index 0000000000..52a646385b --- /dev/null +++ b/salt/elasticfleet/files/soc/elastic-defend-disabled-filters.yaml @@ -0,0 +1,3 @@ +'9EDAA51C-BB12-49D9-8748-2B61371F2E7D': + Date: '10/10/2024' + Notes: 'Example Disabled Filter - Leave this entry here, just copy and paste as needed.' \ No newline at end of file diff --git a/salt/elasticfleet/soc_elasticfleet.yaml b/salt/elasticfleet/soc_elasticfleet.yaml index 26efce9411..0b32628ea7 100644 --- a/salt/elasticfleet/soc_elasticfleet.yaml +++ b/salt/elasticfleet/soc_elasticfleet.yaml @@ -9,6 +9,24 @@ elasticfleet: global: True forcedType: bool helpLink: elastic-fleet.html + files: + soc: + elastic-defend-disabled-filters__yaml: + title: Disabled Elastic Defend filters + description: Enter the ID of the filter that should be disabled. + syntax: yaml + file: True + global: True + helpLink: elastic-fleet.html + advanced: True + elastic-defend-custom-filters__yaml: + title: Custom Elastic Defend filters + description: Enter custom filters seperated by --- + syntax: yaml + file: True + global: True + helpLink: elastic-fleet.html + advanced: True logging: zeek: excluded: @@ -16,6 +34,12 @@ elasticfleet: forcedType: "[]string" helpLink: zeek.html config: + defend_filters: + enable_auto_configuration: + description: Enable auto-configuration and management of the Elastic Defend Exclusion filters. + global: True + helpLink: elastic-fleet.html + advanced: True server: custom_fqdn: description: Custom FQDN for Agents to connect to. One per line. diff --git a/salt/elasticfleet/tools/sbin/so-elastic-defend-manage-filters.py b/salt/elasticfleet/tools/sbin/so-elastic-defend-manage-filters.py new file mode 100644 index 0000000000..f7ce39d1db --- /dev/null +++ b/salt/elasticfleet/tools/sbin/so-elastic-defend-manage-filters.py @@ -0,0 +1,251 @@ +from datetime import datetime +import sys +import getopt +from so_elastic_defend_filters_helper import * +import logging + +logging.basicConfig(level=logging.INFO, format='%(message)s') + +# Define mappings for Target Field, Event Type, Conditions +TARGET_FIELD_MAPPINGS = { + "Image": "process.executable", + "ParentImage": "process.parent.executable", + "CommandLine": "process.command_line", + "ParentCommandLine": "process.parent.command_line", + "DestinationHostname": "destination.domain", + "QueryName": "dns.question.name", + "DestinationIp": "destination.ip", + "TargetObject": "registry.path", + "TargetFilename": "file.path" +} + +DATASET_MAPPINGS = { + "process_create": "endpoint.events.process", + "network_connection": "endpoint.events.network", + "file_create": "endpoint.events.file", + "file_delete": "endpoint.events.file", + "registry_event": "endpoint.events.registry", + "dns_query": "endpoint.events.network" +} + +CONDITION_MAPPINGS = { + "is": ("included", "match"), + "end with": ("included", "wildcard"), + "begin with": ("included", "wildcard"), + "contains": ("included", "wildcard") +} + +# Extract entries for a rule +def extract_entries(data, event_type): + entries = [] + filter_data = data.get('filter', {}) + for value in filter_data.values(): + target_field = TARGET_FIELD_MAPPINGS.get(value.get('TargetField', '')) + condition = value.get('Condition', '') + pattern = value.get('Pattern', '') + + if condition not in CONDITION_MAPPINGS: + logging.error(f"Invalid condition: {condition}") + + # Modify the pattern based on the condition + pattern = modify_pattern(condition, pattern) + + operator, match_type = CONDITION_MAPPINGS[condition] + + entries.append({ + "field": target_field, + "operator": operator, + "type": match_type, + "value": pattern + }) + + # Add the event.dataset entry from DATASET_MAPPINGS + dataset_value = DATASET_MAPPINGS.get(event_type, '') + if dataset_value: + entries.append({ + "field": "event.dataset", + "operator": "included", + "type": "match", + "value": dataset_value + }) + else: + logging.error(f"No dataset mapping found for event_type: {event_type}") + + return entries + +# Build the JSON +def build_json_entry(entries, guid, event_type, context): + return { + "comments": [], + "entries": entries, + "item_id": guid, + "name": f"SO - {event_type} - {guid}", + "description": f"{context}\n\n <<- Note: This filter is managed by Security Onion. ->>", + "namespace_type": "agnostic", + "tags": ["policy:all"], + "type": "simple", + "os_types": ["windows"], + "entries": entries + } + +# Check to see if the rule is disabled +# If it is, make sure it is not active +def disable_check(guid, disabled_rules, username, password): + if guid in disabled_rules: + logging.info(f"Rule {guid} is in the disabled rules list, confirming that is is actually disabled...") + existing_rule = api_request("GET", guid, username, password) + + if existing_rule: + if api_request("DELETE", guid, username, password): + logging.info(f"Successfully deleted rule {guid}") + return True, "deleted" + else: + logging.error(f"Error deleting rule {guid}.") + return True, "Error deleting" + return True, "NOP" + return False, None + +def modify_pattern(condition, pattern): + """ + Modify the pattern based on the condition. + - 'end with': Add '*' to the beginning of the pattern. + - 'begin with': Add '*' to the end of the pattern. + - 'contains': Add '*' to both the beginning and end of the pattern. + """ + if isinstance(pattern, list): + # Apply modification to each pattern in the list if it's a list of patterns + return [modify_pattern(condition, p) for p in pattern] + + if condition == "end with": + return f"*{pattern}" + elif condition == "begin with": + return f"{pattern}*" + elif condition == "contains": + return f"*{pattern}*" + return pattern + + +def process_rule_update_or_create(guid, json_entry, username, password): + existing_rule = api_request("GET", guid, username, password) + + if existing_rule: + existing_rule_data = extract_relevant_fields(existing_rule) + new_rule_data = extract_relevant_fields(json_entry) + if generate_hash(existing_rule_data) != generate_hash(new_rule_data): + logging.info(f"Updating rule {guid}") + json_entry.pop("list_id", None) + api_request("PUT", guid, username, password, json_data=json_entry) + return "updated" + logging.info(f"Rule {guid} is up to date.") + return "no_change" + else: + logging.info(f"Creating new rule {guid}") + json_entry["list_id"] = "endpoint_event_filters" + api_request("POST", guid, username, password, json_data=json_entry) + return "new" + +# Main function for processing rules +def process_rules(yaml_files, disabled_rules, username, password): + stats = {"rule_count": 0, "new": 0, "updated": 0, "no_change": 0, "disabled": 0, "deleted": 0} + for data in yaml_files: + logging.info(f"Processing rule: {data.get('id', '')}") + event_type = data.get('event_type', '') + guid = data.get('id', '') + dataset = DATASET_MAPPINGS.get(event_type, '') + context = data.get('description', '') + + rule_deleted, state = disable_check(guid, disabled_rules, username, password) + if rule_deleted: + stats["disabled"] += 1 + if state == "deleted": + stats["deleted"] += 1 + continue + + # Extract entries and build JSON + entries = extract_entries(data, event_type) + json_entry = build_json_entry(entries, guid, event_type, context) + + # Process rule creation or update + status = process_rule_update_or_create(guid, json_entry, username, password) + + stats[status] += 1 + stats["rule_count"] += 1 + return stats + +def parse_args(argv): + try: + opts, args = getopt.getopt(argv, "i:d:c:f:", ["input=", "disabled=", "credentials=", "flags_file="]) + except getopt.GetoptError: + print("Usage: python so-elastic-defend-manage-filters.py -c -d -i [-f ]") + sys.exit(2) + return opts + +def load_flags(file_path): + with open(file_path, 'r') as flags_file: + return flags_file.read().splitlines() + +def validate_inputs(credentials_file, disabled_file, yaml_directories): + if not credentials_file or not disabled_file or not yaml_directories: + print("Usage: python so-elastic-defend-manage-filters.py -c -d -i [-f ]") + sys.exit(2) + +def main(argv): + credentials_file = "" + disabled_file = "" + yaml_directories = [] + + opts = parse_args(argv) + + for opt, arg in opts: + if opt in ("-c", "--credentials"): + credentials_file = arg + elif opt in ("-d", "--disabled"): + disabled_file = arg + elif opt in ("-i", "--input"): + yaml_directories.append(arg) + elif opt in ("-f", "--flags_file"): + flags = load_flags(arg) + return main(argv + flags) + + timestamp = datetime.now().strftime('%Y-%m-%d %H:%M:%S') + logging.info(f"\n{timestamp}") + + validate_inputs(credentials_file, disabled_file, yaml_directories) + + credentials = load_credentials(credentials_file) + if not credentials: + raise Exception("Failed to load credentials") + + username, password = extract_auth_details(credentials) + if not username or not password: + raise Exception("Invalid credentials format") + + custom_rules_input = '/opt/so/conf/elastic-fleet/defend-exclusions/rulesets/custom-filters-raw' + custom_rules_output = '/opt/so/conf/elastic-fleet/defend-exclusions/rulesets/custom-filters' + prepare_custom_rules(custom_rules_input, custom_rules_output) + disabled_rules = load_disabled(disabled_file) + + total_stats = {"rule_count": 0, "new": 0, "updated": 0, "no_change": 0, "disabled": 0, "deleted": 0} + + for yaml_dir in yaml_directories: + yaml_files = load_yaml_files(yaml_dir) + stats = process_rules(yaml_files, disabled_rules, username, password) + + for key in total_stats: + total_stats[key] += stats[key] + + logging.info(f"\nProcessing Summary") + logging.info(f" - Total processed rules: {total_stats['rule_count']}") + logging.info(f" - New rules: {total_stats['new']}") + logging.info(f" - Updated rules: {total_stats['updated']}") + logging.info(f" - Disabled rules: {total_stats['deleted']}") + logging.info(f" - Rules with no changes: {total_stats['no_change']}") + logging.info(f"Rule status Summary") + logging.info(f" - Active rules: {total_stats['rule_count'] - total_stats['disabled']}") + logging.info(f" - Disabled rules: {total_stats['disabled']}") + timestamp = datetime.now().strftime('%Y-%m-%d %H:%M:%S') + logging.info(f"Execution completed at: {timestamp}") + + +if __name__ == "__main__": + main(sys.argv[1:]) diff --git a/salt/elasticfleet/tools/sbin/so_elastic_defend_filters_helper.py b/salt/elasticfleet/tools/sbin/so_elastic_defend_filters_helper.py new file mode 100644 index 0000000000..90031a2375 --- /dev/null +++ b/salt/elasticfleet/tools/sbin/so_elastic_defend_filters_helper.py @@ -0,0 +1,128 @@ + +import hashlib +import os +import json +import yaml +import requests +from requests.auth import HTTPBasicAuth +import shutil + +# Extract 'entries', 'description' and 'os_types' fields +def extract_relevant_fields(filter): + return { + 'entries': filter.get('entries', []), + 'description': filter.get('description', '') + } + +# Sort for consistency, so that a hash can be generated +def sorted_data(value): + if isinstance(value, dict): + # Recursively sort the dictionary by key + return {k: sorted_data(v) for k, v in sorted(value.items())} + elif isinstance(value, list): + # Sort lists; for dictionaries, sort by a specific key + return sorted(value, key=lambda x: tuple(sorted(x.items())) if isinstance(x, dict) else x) + return value + +# Generate a hash based on sorted relevant fields +def generate_hash(data): + sorted_data_string = json.dumps(sorted_data(data), sort_keys=True) + return hashlib.sha256(sorted_data_string.encode('utf-8')).hexdigest() + +# Load Elasticsearch credentials from the config file +def load_credentials(config_path): + with open(config_path, 'r') as file: + for line in file: + if line.startswith("user"): + credentials = line.split('=', 1)[1].strip().strip('"') + return credentials + return None + +# Extract username and password from credentials +def extract_auth_details(credentials): + if ':' in credentials: + return credentials.split(':', 1) + return None, None + +# Generalized API request function +def api_request(method, guid, username, password, json_data=None): + headers = { + 'kbn-xsrf': 'true', + 'Content-Type': 'application/json' + } + auth = HTTPBasicAuth(username, password) + + if method == "POST": + url = "http://localhost:5601/api/exception_lists/items?namespace_type=agnostic" + else: + url = f"http://localhost:5601/api/exception_lists/items?item_id={guid}&namespace_type=agnostic" + + response = requests.request(method, url, headers=headers, auth=auth, json=json_data) + + if response.status_code in [200, 201]: + return response.json() if response.content else True + elif response.status_code == 404 and method == "GET": + return None + else: + print(f"Error with {method} request: {response.status_code} - {response.text}") + return False + + +# Load YAML data for GUIDs to skip +def load_disabled(disabled_file_path): + if os.path.exists(disabled_file_path): + with open(disabled_file_path, 'r') as file: + return yaml.safe_load(file) or {} + return {} + +def load_yaml_files(*dirs): + yaml_files = [] + + for dir_path in dirs: + if os.path.isdir(dir_path): + # Recurse through the directory and subdirectories + for root, dirs, files in os.walk(dir_path): + for file_name in files: + if file_name.endswith(".yaml"): + full_path = os.path.join(root, file_name) + with open(full_path, 'r') as f: + try: + yaml_content = yaml.safe_load(f) + yaml_files.append(yaml_content) + except yaml.YAMLError as e: + print(f"Error loading {full_path}: {e}") + else: + print(f"Invalid directory: {dir_path}") + + return yaml_files + +def prepare_custom_rules(input_file, output_dir): + # Clear the output directory first + if os.path.exists(output_dir): + shutil.rmtree(output_dir) + os.makedirs(output_dir, exist_ok=True) + + try: + # Load the YAML file + with open(input_file, 'r') as f: + docs = yaml.safe_load_all(f) + + for doc in docs: + if 'id' not in doc: + print(f"Skipping rule, no 'id' found: {doc}") + continue + if doc.get('title') in ["Template 1", "Template 2"]: + print(f"Skipping template rule with title: {doc['title']}") + continue + # Create a filename using the 'id' field + file_name = os.path.join(output_dir, f"{doc['id']}.yaml") + + # Write the individual YAML file + with open(file_name, 'w') as output_file: + yaml.dump(doc, output_file, default_flow_style=False) + print(f"Created file: {file_name}") + + except yaml.YAMLError as e: + print(f"Error parsing YAML: {e}") + except Exception as e: + print(f"Error processing file: {e}") \ No newline at end of file diff --git a/salt/kibana/tools/sbin_jinja/so-kibana-space-defaults b/salt/kibana/tools/sbin_jinja/so-kibana-space-defaults index 430054e06a..6e4959194d 100755 --- a/salt/kibana/tools/sbin_jinja/so-kibana-space-defaults +++ b/salt/kibana/tools/sbin_jinja/so-kibana-space-defaults @@ -13,6 +13,6 @@ echo "Setting up default Space:" {% if HIGHLANDER %} curl -K /opt/so/conf/elasticsearch/curl.config -b "sid=$SESSIONCOOKIE" -L -X PUT "localhost:5601/api/spaces/space/default" -H 'kbn-xsrf: true' -H 'Content-Type: application/json' -d' {"id":"default","name":"Default","disabledFeatures":["enterpriseSearch"]} ' >> /opt/so/log/kibana/misc.log {% else %} -curl -K /opt/so/conf/elasticsearch/curl.config -b "sid=$SESSIONCOOKIE" -L -X PUT "localhost:5601/api/spaces/space/default" -H 'kbn-xsrf: true' -H 'Content-Type: application/json' -d' {"id":"default","name":"Default","disabledFeatures":["ml","enterpriseSearch","siem","logs","infrastructure","apm","uptime","monitoring","stackAlerts","actions","securitySolutionCases"]} ' >> /opt/so/log/kibana/misc.log +curl -K /opt/so/conf/elasticsearch/curl.config -b "sid=$SESSIONCOOKIE" -L -X PUT "localhost:5601/api/spaces/space/default" -H 'kbn-xsrf: true' -H 'Content-Type: application/json' -d' {"id":"default","name":"Default","disabledFeatures":["ml","enterpriseSearch","logs","infrastructure","apm","uptime","monitoring","stackAlerts","actions","securitySolutionCases"]} ' >> /opt/so/log/kibana/misc.log {% endif %} echo