Skip to content

Commit e0599cb

Browse files
Feature/unit tests/infra 30038/cim tests (#277)
* Unit tests dir structure and GA workflow * commenting out test matrix worklow * eventtype_parser unit tests (#251) * eventtype_parser unit tests * eventtype_parser no config file update * review remarks #1 * Feature/unit tests/tags parser (#257) * eventtype_parser unit tests * eventtype_parser no config file update * review remarks #1 * conftest parametrization * wrapped pytest arguments * test_tags_parser * build_parsed_output * fixture scope * typo change * review remarks #1 * review remarks #2 * Feature/unit tests/infra 30036/props parser (#259) unit tests for props_parser * Feature/unit tests/infra 30109/fields (#260) * Tests for fields and addon_parser init * Feature/unit tests/infra 30097/transforms parser (#261) * test_transforms_parser * more tests for transforms_parser * [CR] fixtures at the top of file * Feature/unit tests/infra 30039/event ingestors (#265) * get event ingestor * test_events_can_be_ingested * reocrdtype added for unit tests * test_requirement_tests_can_be_run * test_requirement_tests_can_be_run #2 * wrongly added file deletion * test_hec_raw_ingestor * requests-mock typo fix * test_hec_raw_ingestor * test_requirement_tests_can_be_run assert has calls fix * hec_event_ingestor * HECMetricEventIngestor * import fix * test_sc4s_event_ingestor * test_sc4s_event_ingest additional asserts * test_event_ingestors/test_requirement_event_ingestor * [CR] assert exception string * Feature/unit tests/infra 30041/sample generation (#266) * First part of unit tests for sample_generation * Changing location of test files (#267) * reload module change * test_data_set * test_data_model * test_json_schema.py * test_data_model_handler * test_field_test_adapter * test_field_test_helper * more tests for test_field_test_helper * test_test_generator * conftest * more tests for test_generator * more tests for test_generator * poetry lock conflicts resolved * [CR] review remarks Co-authored-by: uoboda-splunk <[email protected]> Co-authored-by: uoboda-splunk <[email protected]>
1 parent 6298590 commit e0599cb

File tree

11 files changed

+1180
-3
lines changed

11 files changed

+1180
-3
lines changed

poetry.lock

Lines changed: 3 additions & 3 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

tests/unit/pytest.ini

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3,3 +3,4 @@ addopts = -v --tb=long --log-level=INFO
33
# --force-flaky --max-runs=3 --min-passes=1
44
filterwarnings =
55
ignore::DeprecationWarning
6+

tests/unit/tests_standard_lib/test_cim_tests/__init__.py

Whitespace-only changes.
Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,16 @@
1+
import pytest
2+
from unittest.mock import MagicMock, mock_open
3+
4+
5+
@pytest.fixture()
6+
def open_mock(monkeypatch):
7+
open_mock = mock_open()
8+
monkeypatch.setattr("builtins.open", open_mock)
9+
return open_mock
10+
11+
12+
@pytest.fixture()
13+
def json_load_mock(monkeypatch):
14+
load_mock = MagicMock()
15+
monkeypatch.setattr("json.load", load_mock)
16+
return load_mock
Lines changed: 62 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,62 @@
1+
import pytest
2+
from unittest.mock import patch, MagicMock, call
3+
from collections import namedtuple
4+
from pytest_splunk_addon.standard_lib.cim_tests.data_model import DataModel
5+
6+
7+
@pytest.fixture()
8+
def data_set_mock(monkeypatch):
9+
data_set_mock = MagicMock()
10+
data_set_mock.return_value = data_set_mock
11+
data_set_mock.load_dataset.return_value = ["dataset1", "dataset2"]
12+
monkeypatch.setattr(
13+
"pytest_splunk_addon.standard_lib.cim_tests.data_model.DataSet", data_set_mock
14+
)
15+
return data_set_mock
16+
17+
18+
def test_data_model_instantiation(data_set_mock):
19+
data_model = DataModel({"model_name": "test_model_name", "objects": []})
20+
assert data_model.name == "test_model_name"
21+
assert data_model.root_data_set == ["dataset1", "dataset2"]
22+
23+
24+
def test_data_model_string(data_set_mock):
25+
data_model = DataModel({"model_name": "test_model_name", "objects": []})
26+
assert str(data_model) == data_model.name == "test_model_name"
27+
28+
29+
def test_get_mapped_datasets_calls_internal_function():
30+
m1 = MagicMock()
31+
m1.side_effect = lambda x, y: (i for i in range(3))
32+
with patch.object(DataModel, "__init__", return_value=None), patch.object(
33+
DataModel, "_get_mapped_datasets", m1
34+
):
35+
data_model = DataModel({})
36+
data_model.root_data_set = ["root_data_set"]
37+
assert list(data_model.get_mapped_datasets(["addons_tags"])) == [0, 1, 2]
38+
m1.assert_has_calls([call(["addons_tags"], ["root_data_set"])])
39+
40+
41+
def test__get_mapped_datasets():
42+
data_set = namedtuple("DataSet", ["name", "match_tags", "child_dataset"])
43+
dataset1 = data_set(
44+
"dataset1",
45+
lambda x: True,
46+
[
47+
data_set("dataset1a", lambda x: True, []),
48+
data_set(
49+
"dataset1b",
50+
lambda x: False,
51+
[data_set("dataset1c", lambda x: True, [])],
52+
),
53+
],
54+
)
55+
dataset2 = data_set("dataset2", lambda x: True, [])
56+
with patch.object(DataModel, "__init__", return_value=None):
57+
data_model = DataModel({})
58+
assert list(data_model._get_mapped_datasets([], [dataset1, dataset2])) == [
59+
[dataset1],
60+
[dataset1, dataset1.child_dataset[0]],
61+
[dataset2],
62+
]
Lines changed: 125 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,125 @@
1+
import pytest
2+
from unittest.mock import MagicMock, call, patch, PropertyMock
3+
from collections import namedtuple
4+
from pytest_splunk_addon.standard_lib.cim_tests.data_model_handler import (
5+
DataModelHandler,
6+
)
7+
8+
9+
@pytest.fixture()
10+
def listdir_mock(monkeypatch):
11+
monkeypatch.setattr(
12+
"os.listdir",
13+
MagicMock(return_value=["model1.json", "model2.xml", "model3.json"]),
14+
)
15+
16+
17+
@pytest.fixture()
18+
def data_model_mock(monkeypatch):
19+
dm = MagicMock()
20+
dm.side_effect = ["data_model_instance_1", "data_model_instance_2"]
21+
monkeypatch.setattr(
22+
"pytest_splunk_addon.standard_lib.cim_tests.data_model_handler.DataModel", dm
23+
)
24+
return dm
25+
26+
27+
@pytest.fixture()
28+
def json_schema_mock(monkeypatch):
29+
js = MagicMock()
30+
js.parse_data_model.side_effect = ["parsed_data_model_1", "parsed_data_model_2"]
31+
monkeypatch.setattr(
32+
"pytest_splunk_addon.standard_lib.cim_tests.data_model_handler.JSONSchema", js
33+
)
34+
return js
35+
36+
37+
def test_data_models():
38+
with patch.object(
39+
DataModelHandler,
40+
"load_data_models",
41+
return_value=(f"data_model_{i+1}" for i in range(3)),
42+
):
43+
dmh = DataModelHandler("/fake_path")
44+
assert dmh.data_models == ["data_model_1", "data_model_2", "data_model_3"]
45+
assert dmh._data_models == ["data_model_1", "data_model_2", "data_model_3"]
46+
47+
48+
def test_load_data_model(listdir_mock, data_model_mock, json_schema_mock):
49+
dmh = DataModelHandler("/fake_path")
50+
assert list(dmh.load_data_models("/fake_path/data_models")) == [
51+
"data_model_instance_1",
52+
"data_model_instance_2",
53+
]
54+
data_model_mock.assert_has_calls(
55+
[call("parsed_data_model_1"), call("parsed_data_model_2")]
56+
)
57+
json_schema_mock.parse_data_model.assert_has_calls(
58+
[
59+
call("/fake_path/data_models/model1.json"),
60+
call("/fake_path/data_models/model3.json"),
61+
]
62+
)
63+
64+
65+
def test_get_all_tags_per_stanza():
66+
dmh = DataModelHandler("/fake_path")
67+
addon_parser = namedtuple("AddonParser", ["get_tags"])
68+
ap = addon_parser(
69+
lambda: [
70+
{
71+
"stanza": "test_stanza_1",
72+
"tag": {"tag1_key": "tag1_value", "tag2_key": "tag2_value"},
73+
},
74+
{"stanza": "test_stanza_2", "tag": {"tag3_key": "tag3_value"}},
75+
{"stanza": "test_stanza_1", "tag": {"tag4_key": "tag4_value"}},
76+
]
77+
)
78+
assert dmh._get_all_tags_per_stanza(ap) == {
79+
"test_stanza_1": [
80+
{"tag1_key": "tag1_value", "tag2_key": "tag2_value"},
81+
{"tag4_key": "tag4_value"},
82+
],
83+
"test_stanza_2": [{"tag3_key": "tag3_value"}],
84+
}
85+
86+
87+
def test_get_mapped_data_models(caplog):
88+
data_model = namedtuple("DataModel", ["get_mapped_datasets"])
89+
data_models = [
90+
data_model(lambda x: x if len(x) == 1 else []),
91+
data_model(lambda x: []),
92+
data_model(lambda x: x if len(x) == 3 else []),
93+
]
94+
with patch.object(
95+
DataModelHandler,
96+
"_get_all_tags_per_stanza",
97+
return_value={
98+
"stanza_1": [{"tag_key_1a": "tag_value_1a", "tag_key_1b": "tag_value_1b"}],
99+
"stanza_2": [
100+
{"tag_key_2a": "tag_value_2a", "tag_key_2b": "tag_value_2b"},
101+
None,
102+
],
103+
"stanza_3": [
104+
{"tag_key_3a": "tag_value_3a"},
105+
{"tag_key_3b": "tag_value_3b"},
106+
{"tag_key_3c": "tag_value_3c"},
107+
],
108+
},
109+
), patch.object(
110+
DataModelHandler,
111+
"data_models",
112+
new_callable=PropertyMock,
113+
return_value=data_models,
114+
):
115+
dmh = DataModelHandler("/fake_path")
116+
assert list(dmh.get_mapped_data_models("addon_parser")) == [
117+
(
118+
"stanza_1",
119+
{"tag_key_1a": "tag_value_1a", "tag_key_1b": "tag_value_1b"},
120+
),
121+
("stanza_3", {"tag_key_3a": "tag_value_3a"}),
122+
("stanza_3", {"tag_key_3b": "tag_value_3b"}),
123+
("stanza_3", {"tag_key_3c": "tag_value_3c"}),
124+
]
125+
assert "No Data Model mapped for stanza_2" in caplog.messages
Lines changed: 125 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,125 @@
1+
import pytest
2+
from unittest.mock import patch, call
3+
from collections import namedtuple
4+
from pytest_splunk_addon.standard_lib.cim_tests.data_set import DataSet
5+
6+
field = namedtuple("Field", ["name"])
7+
8+
9+
@pytest.fixture()
10+
def mocked_dataset_constructor():
11+
from pytest_splunk_addon.standard_lib.cim_tests.data_set import Field
12+
13+
with patch.object(
14+
DataSet, "load_dataset", return_value=("child_dataset1", "child_dataset2")
15+
), patch.object(
16+
Field, "parse_fields", return_value=("field1", "field2")
17+
), patch.object(
18+
DataSet,
19+
"_parse_fields_cluster",
20+
return_value="parse_field_cluster_return_value",
21+
), patch.object(
22+
DataSet, "_parse_constraint", return_value="prase_constraints_return_value"
23+
):
24+
return DataSet(
25+
{
26+
"name": "dataset1",
27+
"tags": ["test_tag1", "test_tag2"],
28+
"child_dataset": [],
29+
"fields": [{"name": "app"}],
30+
"fields_cluster": [],
31+
"search_constraints": "tag=alert",
32+
},
33+
"test_data_model",
34+
)
35+
36+
37+
@pytest.fixture()
38+
def dataset_mock():
39+
with patch.object(DataSet, "__init__", return_value=None):
40+
dataset = DataSet({}, "test_data_model")
41+
dataset.name = "dataset2"
42+
dataset.fields = [
43+
field("bytes"),
44+
field("bytes_in"),
45+
field("bytes_out"),
46+
field("dest"),
47+
field("dest_ip"),
48+
field("dest_mac"),
49+
]
50+
dataset.tags = [["tag1", "tag1b"], ["tag2", "tag3"]]
51+
return dataset
52+
53+
54+
def test_dataset_can_be_loaded():
55+
with patch.object(DataSet, "__init__", return_value=None) as data_set_mock:
56+
list(
57+
DataSet.load_dataset(
58+
[
59+
{"name": "dataset1", "tags": ["tag1"]},
60+
{"name": "dataset2", "tags": ["tag2"]},
61+
],
62+
"test",
63+
)
64+
)
65+
data_set_mock.assert_has_calls(
66+
[
67+
call({"name": "dataset1", "tags": ["tag1"]}, "test"),
68+
call({"name": "dataset2", "tags": ["tag2"]}, "test"),
69+
]
70+
)
71+
72+
73+
def test_dataset_instantiation(mocked_dataset_constructor):
74+
assert mocked_dataset_constructor.name == "dataset1"
75+
assert mocked_dataset_constructor.tags == ["test_tag1", "test_tag2"]
76+
assert mocked_dataset_constructor.data_model == "test_data_model"
77+
assert mocked_dataset_constructor.child_dataset == [
78+
"child_dataset1",
79+
"child_dataset2",
80+
]
81+
assert mocked_dataset_constructor.fields == ["field1", "field2"]
82+
assert (
83+
mocked_dataset_constructor.fields_cluster == "parse_field_cluster_return_value"
84+
)
85+
assert (
86+
mocked_dataset_constructor.search_constraints
87+
== "prase_constraints_return_value"
88+
)
89+
90+
91+
def test_dataset_string(mocked_dataset_constructor):
92+
assert (
93+
mocked_dataset_constructor.name == str(mocked_dataset_constructor) == "dataset1"
94+
)
95+
96+
97+
def test_parse_constraints():
98+
assert DataSet._parse_constraint("constraints") == "constraints"
99+
100+
101+
def test_parse_fields_cluster(dataset_mock):
102+
assert dataset_mock._parse_fields_cluster(
103+
[["bytes", "bytes_in", "bytes_out"], ["dest", "dest_ip", "dest_mac"]]
104+
) == [
105+
[field("bytes"), field("bytes_in"), field("bytes_out")],
106+
[field("dest"), field("dest_ip"), field("dest_mac")],
107+
]
108+
109+
110+
def test_parse_fields_raises_error_when_cluster_field_not_in_fields_list(dataset_mock):
111+
with pytest.raises(
112+
AssertionError,
113+
match="Dataset=dataset2, Each cluster field should be included in fields list",
114+
):
115+
dataset_mock._parse_fields_cluster(
116+
[["bytes", "bytes_in", "bytes_out", "bytes_all"]]
117+
)
118+
119+
120+
def test_tags_match(dataset_mock):
121+
assert dataset_mock.match_tags(["tag1", "tag1a", "tag2", "tag3"]) is True
122+
123+
124+
def test_tags_not_match(dataset_mock):
125+
assert dataset_mock.match_tags(["tag1", "tag1a", "tag2"]) is None

0 commit comments

Comments
 (0)