From 319753b408dd87b9ed529f20257e9b4c6442f84d Mon Sep 17 00:00:00 2001 From: Giovanni M Guidini <99758426+giovanni-guidini@users.noreply.github.com> Date: Fri, 11 Aug 2023 17:42:24 +0200 Subject: [PATCH] fix: change cache of `ArchiveField` (#55) These changes are similar to https://github.com/codecov/codecov-api/pull/72 Same reasoning applies. The test changed happens because now we update the cache on the write. Because of that we are not doing the encode/decode/rehydrate operations. So the data you put in is the data you get. On one hand we can do such operations to guarantee consistency. On the other this is no different than what we used before `ArchiveField` AND such operations might be expensive. --- database/tests/unit/test_model_utils.py | 16 +------ database/tests/unit/test_models.py | 11 ++--- database/utils.py | 14 ++++-- requirements.in | 2 +- requirements.txt | 12 +++-- services/tests/test_report.py | 60 +++++++------------------ 6 files changed, 40 insertions(+), 75 deletions(-) diff --git a/database/tests/unit/test_model_utils.py b/database/tests/unit/test_model_utils.py index 43d8952a9..a8abe8f81 100644 --- a/database/tests/unit/test_model_utils.py +++ b/database/tests/unit/test_model_utils.py @@ -130,17 +130,5 @@ def test_archive_setter_archive_field(self, db, mocker): assert test_class._archive_field == None assert test_class._archive_field_storage_path == "path/to/written/object" assert test_class.archive_field == some_json - # Writing cleans the cache - assert mock_read_file.call_count == 1 - mock_read_file.assert_called_with("path/to/written/object") - mock_write_file.assert_called_with( - commit_id=commit.commitid, - table="test_table", - field="archive_field", - external_id=test_class.external_id, - data=some_json, - encoder=ReportEncoder, - ) - mock_archive_service.return_value.delete_file.assert_called_with( - "path/to/old/data" - ) + # Cache is updated on write + assert mock_read_file.call_count == 0 diff --git a/database/tests/unit/test_models.py b/database/tests/unit/test_models.py index 4fe8a643b..287cf56c4 100644 --- a/database/tests/unit/test_models.py +++ b/database/tests/unit/test_models.py @@ -403,14 +403,11 @@ def test_set_files_array_to_storage(self, dbsession, mocker, mock_configuration) # Retrieve the set value files_array = retrieved_instance.files_array assert files_array == self.sample_files_array - assert mock_archive_service.call_count == 2 - mock_archive_service.return_value.read_file.assert_has_calls( - [call("https://storage-url/path/to/item.json")] - ) - # Check that caching (still) works within the instance + assert mock_archive_service.call_count == 1 + # Check that caching works within the instance files_array = retrieved_instance.files_array - assert mock_archive_service.call_count == 2 - assert mock_archive_service.return_value.read_file.call_count == 1 + assert mock_archive_service.call_count == 1 + assert mock_archive_service.return_value.read_file.call_count == 0 class TestCommitModel(object): diff --git a/database/utils.py b/database/utils.py index c843c70e7..d4498d906 100644 --- a/database/utils.py +++ b/database/utils.py @@ -76,8 +76,8 @@ def __set_name__(self, owner, name): self.public_name = name self.db_field_name = "_" + name self.archive_field_name = "_" + name + "_storage_path" + self.cached_value_property_name = f"__{self.public_name}_cached_value" - @lru_cache(maxsize=1) def _get_value_from_archive(self, obj): repository = obj.get_repository() archive_service = ArchiveService(repository=repository) @@ -106,10 +106,16 @@ def _get_value_from_archive(self, obj): return self.default_value def __get__(self, obj, objtype=None): + cached_value = getattr(obj, self.cached_value_property_name, None) + if cached_value: + return cached_value db_field = getattr(obj, self.db_field_name) if db_field is not None: - return self.rehydrate_fn(obj, db_field) - return self._get_value_from_archive(obj) + value = self.rehydrate_fn(obj, db_field) + else: + value = self._get_value_from_archive(obj) + setattr(obj, self.cached_value_property_name, value) + return value def __set__(self, obj, value): # Set the new value @@ -130,6 +136,6 @@ def __set__(self, obj, value): archive_service.delete_file(old_file_path) setattr(obj, self.archive_field_name, path) setattr(obj, self.db_field_name, None) - self._get_value_from_archive.cache_clear() else: setattr(obj, self.db_field_name, value) + setattr(obj, self.cached_value_property_name, value) diff --git a/requirements.in b/requirements.in index 4fd9fcecc..496cb182c 100644 --- a/requirements.in +++ b/requirements.in @@ -1,4 +1,4 @@ -git+ssh://git@github.com/codecov/shared.git@39f6f4a383d37755a69d4dfbe40390fcac45536b#egg=shared +git+ssh://git@github.com/codecov/shared.git@6a8a33248804a9c101d34f417efda7c11e4bbe63#egg=shared git+ssh://git@github.com/codecov/opentelem-python.git@v0.0.4a1#egg=codecovopentelem boto3 celery diff --git a/requirements.txt b/requirements.txt index dd3a26abb..437593381 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,6 +1,6 @@ # -# This file is autogenerated by pip-compile with python 3.10 -# To update, run: +# This file is autogenerated by pip-compile with Python 3.10 +# by the following command: # # pip-compile requirements.in # @@ -77,6 +77,8 @@ deprecated==1.2.12 # via opentelemetry-api ecdsa==0.16.1 # via tlslite-ng +exceptiongroup==1.1.2 + # via pytest factory-boy==3.2.0 # via -r requirements.in faker==8.8.2 @@ -249,7 +251,7 @@ s3transfer==0.3.4 # via boto3 sentry-sdk==1.19.1 # via -r requirements.in -shared @ git+ssh://git@github.com/codecov/shared.git@39f6f4a383d37755a69d4dfbe40390fcac45536b +shared @ git+ssh://git@github.com/codecov/shared.git@6a8a33248804a9c101d34f417efda7c11e4bbe63 # via -r requirements.in six==1.15.0 # via @@ -284,8 +286,10 @@ text-unidecode==1.3 # via faker timestring==1.6.4 # via -r requirements.in -tlslite-ng==0.8.0-alpha39 +tlslite-ng==0.8.0a39 # via shared +tomli==2.0.1 + # via pytest typing==3.7.4.3 # via shared typing-extensions==4.6.3 diff --git a/services/tests/test_report.py b/services/tests/test_report.py index 83d55f60c..669d9ed37 100644 --- a/services/tests/test_report.py +++ b/services/tests/test_report.py @@ -4037,9 +4037,7 @@ async def test_initialize_and_save_report_needs_backporting( { "filename": "file_00.py", "file_index": 0, - "file_totals": ReportTotals( - *[0, 14, 12, 0, 2, "85.71429", 0, 0, 0, 0, 0, 0, 0] - ), + "file_totals": [0, 14, 12, 0, 2, "85.71429", 0, 0, 0, 0, 0, 0, 0], "session_totals": SessionTotalsArray.build_from_encoded_data( [ None, @@ -4053,9 +4051,7 @@ async def test_initialize_and_save_report_needs_backporting( { "filename": "file_01.py", "file_index": 1, - "file_totals": ReportTotals( - *[0, 11, 8, 0, 3, "72.72727", 0, 0, 0, 0, 0, 0, 0] - ), + "file_totals": [0, 11, 8, 0, 3, "72.72727", 0, 0, 0, 0, 0, 0, 0], "session_totals": SessionTotalsArray.build_from_encoded_data( [ None, @@ -4069,9 +4065,7 @@ async def test_initialize_and_save_report_needs_backporting( { "filename": "file_10.py", "file_index": 10, - "file_totals": ReportTotals( - *[0, 10, 6, 1, 3, "60.00000", 0, 0, 0, 0, 0, 0, 0] - ), + "file_totals": [0, 10, 6, 1, 3, "60.00000", 0, 0, 0, 0, 0, 0, 0], "session_totals": SessionTotalsArray.build_from_encoded_data( [ None, @@ -4085,9 +4079,7 @@ async def test_initialize_and_save_report_needs_backporting( { "filename": "file_11.py", "file_index": 11, - "file_totals": ReportTotals( - *[0, 23, 15, 1, 7, "65.21739", 0, 0, 0, 0, 0, 0, 0] - ), + "file_totals": [0, 23, 15, 1, 7, "65.21739", 0, 0, 0, 0, 0, 0, 0], "session_totals": SessionTotalsArray.build_from_encoded_data( [ None, @@ -4101,9 +4093,7 @@ async def test_initialize_and_save_report_needs_backporting( { "filename": "file_12.py", "file_index": 12, - "file_totals": ReportTotals( - *[0, 14, 8, 0, 6, "57.14286", 0, 0, 0, 0, 0, 0, 0] - ), + "file_totals": [0, 14, 8, 0, 6, "57.14286", 0, 0, 0, 0, 0, 0, 0], "session_totals": SessionTotalsArray.build_from_encoded_data( [ None, @@ -4117,9 +4107,7 @@ async def test_initialize_and_save_report_needs_backporting( { "filename": "file_13.py", "file_index": 13, - "file_totals": ReportTotals( - *[0, 15, 9, 0, 6, "60.00000", 0, 0, 0, 0, 0, 0, 0] - ), + "file_totals": [0, 15, 9, 0, 6, "60.00000", 0, 0, 0, 0, 0, 0, 0], "session_totals": SessionTotalsArray.build_from_encoded_data( [ None, @@ -4133,9 +4121,7 @@ async def test_initialize_and_save_report_needs_backporting( { "filename": "file_14.py", "file_index": 14, - "file_totals": ReportTotals( - *[0, 23, 13, 0, 10, "56.52174", 0, 0, 0, 0, 0, 0, 0] - ), + "file_totals": [0, 23, 13, 0, 10, "56.52174", 0, 0, 0, 0, 0, 0, 0], "session_totals": SessionTotalsArray.build_from_encoded_data( [ None, @@ -4149,9 +4135,7 @@ async def test_initialize_and_save_report_needs_backporting( { "filename": "file_02.py", "file_index": 2, - "file_totals": ReportTotals( - *[0, 13, 9, 0, 4, "69.23077", 0, 0, 0, 0, 0, 0, 0] - ), + "file_totals": [0, 13, 9, 0, 4, "69.23077", 0, 0, 0, 0, 0, 0, 0], "session_totals": SessionTotalsArray.build_from_encoded_data( [ None, @@ -4165,9 +4149,7 @@ async def test_initialize_and_save_report_needs_backporting( { "filename": "file_03.py", "file_index": 3, - "file_totals": ReportTotals( - *[0, 16, 8, 0, 8, "50.00000", 0, 0, 0, 0, 0, 0, 0] - ), + "file_totals": [0, 16, 8, 0, 8, "50.00000", 0, 0, 0, 0, 0, 0, 0], "session_totals": SessionTotalsArray.build_from_encoded_data( [ None, @@ -4181,9 +4163,7 @@ async def test_initialize_and_save_report_needs_backporting( { "filename": "file_04.py", "file_index": 4, - "file_totals": ReportTotals( - *[0, 10, 6, 0, 4, "60.00000", 0, 0, 0, 0, 0, 0, 0] - ), + "file_totals": [0, 10, 6, 0, 4, "60.00000", 0, 0, 0, 0, 0, 0, 0], "session_totals": SessionTotalsArray.build_from_encoded_data( [ None, @@ -4197,9 +4177,7 @@ async def test_initialize_and_save_report_needs_backporting( { "filename": "file_05.py", "file_index": 5, - "file_totals": ReportTotals( - *[0, 14, 10, 0, 4, "71.42857", 0, 0, 0, 0, 0, 0, 0] - ), + "file_totals": [0, 14, 10, 0, 4, "71.42857", 0, 0, 0, 0, 0, 0, 0], "session_totals": SessionTotalsArray.build_from_encoded_data( [ None, @@ -4213,9 +4191,7 @@ async def test_initialize_and_save_report_needs_backporting( { "filename": "file_06.py", "file_index": 6, - "file_totals": ReportTotals( - *[0, 9, 7, 1, 1, "77.77778", 0, 0, 0, 0, 0, 0, 0] - ), + "file_totals": [0, 9, 7, 1, 1, "77.77778", 0, 0, 0, 0, 0, 0, 0], "session_totals": SessionTotalsArray.build_from_encoded_data( [ None, @@ -4229,9 +4205,7 @@ async def test_initialize_and_save_report_needs_backporting( { "filename": "file_07.py", "file_index": 7, - "file_totals": ReportTotals( - *[0, 11, 9, 0, 2, "81.81818", 0, 0, 0, 0, 0, 0, 0] - ), + "file_totals": [0, 11, 9, 0, 2, "81.81818", 0, 0, 0, 0, 0, 0, 0], "session_totals": SessionTotalsArray.build_from_encoded_data( [ None, @@ -4245,9 +4219,7 @@ async def test_initialize_and_save_report_needs_backporting( { "filename": "file_08.py", "file_index": 8, - "file_totals": ReportTotals( - *[0, 11, 6, 0, 5, "54.54545", 0, 0, 0, 0, 0, 0, 0] - ), + "file_totals": [0, 11, 6, 0, 5, "54.54545", 0, 0, 0, 0, 0, 0, 0], "session_totals": SessionTotalsArray.build_from_encoded_data( [ None, @@ -4261,9 +4233,7 @@ async def test_initialize_and_save_report_needs_backporting( { "filename": "file_09.py", "file_index": 9, - "file_totals": ReportTotals( - *[0, 14, 10, 1, 3, "71.42857", 0, 0, 0, 0, 0, 0, 0] - ), + "file_totals": [0, 14, 10, 1, 3, "71.42857", 0, 0, 0, 0, 0, 0, 0], "session_totals": SessionTotalsArray.build_from_encoded_data( [ None,