From 37a31fc56d56bd933a8360e9c321bff5ab0cf854 Mon Sep 17 00:00:00 2001 From: Gguidini Date: Fri, 11 Aug 2023 10:00:29 +0200 Subject: [PATCH] fix: update archive field cache * Set varialbe cached_value property name, for the case were a single model might have multiple archived fields * Concentrate all archive getting/setting in the `__get__` and `__set__` functions The test changed happens because now we update the cache on the write. Because of that we are not doing the encode/decode/rehydrate operations. So the data you put in is the data you get. On one hand we can do such operations to guarantee consistency. On the other this is no different than what we used before `ArchiveField` AND such operations might be expensive. --- database/utils.py | 16 +++++----- services/tests/test_report.py | 60 +++++++++-------------------------- 2 files changed, 23 insertions(+), 53 deletions(-) diff --git a/database/utils.py b/database/utils.py index 491febe9e..d4498d906 100644 --- a/database/utils.py +++ b/database/utils.py @@ -76,6 +76,7 @@ def __set_name__(self, owner, name): self.public_name = name self.db_field_name = "_" + name self.archive_field_name = "_" + name + "_storage_path" + self.cached_value_property_name = f"__{self.public_name}_cached_value" def _get_value_from_archive(self, obj): repository = obj.get_repository() @@ -84,9 +85,7 @@ def _get_value_from_archive(self, obj): if archive_field: try: file_str = archive_service.read_file(archive_field) - value = self.rehydrate_fn(obj, json.loads(file_str)) - setattr(obj, "__archive_field_cached_value", value) - return value + return self.rehydrate_fn(obj, json.loads(file_str)) except FileNotInStorageError: log.error( "Archive enabled field not in storage", @@ -107,15 +106,16 @@ def _get_value_from_archive(self, obj): return self.default_value def __get__(self, obj, objtype=None): - cached_value = getattr(obj, "__archive_field_cached_value", None) + cached_value = getattr(obj, self.cached_value_property_name, None) if cached_value: return cached_value db_field = getattr(obj, self.db_field_name) if db_field is not None: value = self.rehydrate_fn(obj, db_field) - setattr(obj, "__archive_field_cached_value", value) - return value - return self._get_value_from_archive(obj) + else: + value = self._get_value_from_archive(obj) + setattr(obj, self.cached_value_property_name, value) + return value def __set__(self, obj, value): # Set the new value @@ -138,4 +138,4 @@ def __set__(self, obj, value): setattr(obj, self.db_field_name, None) else: setattr(obj, self.db_field_name, value) - setattr(obj, "__archive_field_cached_value", value) + setattr(obj, self.cached_value_property_name, value) diff --git a/services/tests/test_report.py b/services/tests/test_report.py index 83d55f60c..669d9ed37 100644 --- a/services/tests/test_report.py +++ b/services/tests/test_report.py @@ -4037,9 +4037,7 @@ async def test_initialize_and_save_report_needs_backporting( { "filename": "file_00.py", "file_index": 0, - "file_totals": ReportTotals( - *[0, 14, 12, 0, 2, "85.71429", 0, 0, 0, 0, 0, 0, 0] - ), + "file_totals": [0, 14, 12, 0, 2, "85.71429", 0, 0, 0, 0, 0, 0, 0], "session_totals": SessionTotalsArray.build_from_encoded_data( [ None, @@ -4053,9 +4051,7 @@ async def test_initialize_and_save_report_needs_backporting( { "filename": "file_01.py", "file_index": 1, - "file_totals": ReportTotals( - *[0, 11, 8, 0, 3, "72.72727", 0, 0, 0, 0, 0, 0, 0] - ), + "file_totals": [0, 11, 8, 0, 3, "72.72727", 0, 0, 0, 0, 0, 0, 0], "session_totals": SessionTotalsArray.build_from_encoded_data( [ None, @@ -4069,9 +4065,7 @@ async def test_initialize_and_save_report_needs_backporting( { "filename": "file_10.py", "file_index": 10, - "file_totals": ReportTotals( - *[0, 10, 6, 1, 3, "60.00000", 0, 0, 0, 0, 0, 0, 0] - ), + "file_totals": [0, 10, 6, 1, 3, "60.00000", 0, 0, 0, 0, 0, 0, 0], "session_totals": SessionTotalsArray.build_from_encoded_data( [ None, @@ -4085,9 +4079,7 @@ async def test_initialize_and_save_report_needs_backporting( { "filename": "file_11.py", "file_index": 11, - "file_totals": ReportTotals( - *[0, 23, 15, 1, 7, "65.21739", 0, 0, 0, 0, 0, 0, 0] - ), + "file_totals": [0, 23, 15, 1, 7, "65.21739", 0, 0, 0, 0, 0, 0, 0], "session_totals": SessionTotalsArray.build_from_encoded_data( [ None, @@ -4101,9 +4093,7 @@ async def test_initialize_and_save_report_needs_backporting( { "filename": "file_12.py", "file_index": 12, - "file_totals": ReportTotals( - *[0, 14, 8, 0, 6, "57.14286", 0, 0, 0, 0, 0, 0, 0] - ), + "file_totals": [0, 14, 8, 0, 6, "57.14286", 0, 0, 0, 0, 0, 0, 0], "session_totals": SessionTotalsArray.build_from_encoded_data( [ None, @@ -4117,9 +4107,7 @@ async def test_initialize_and_save_report_needs_backporting( { "filename": "file_13.py", "file_index": 13, - "file_totals": ReportTotals( - *[0, 15, 9, 0, 6, "60.00000", 0, 0, 0, 0, 0, 0, 0] - ), + "file_totals": [0, 15, 9, 0, 6, "60.00000", 0, 0, 0, 0, 0, 0, 0], "session_totals": SessionTotalsArray.build_from_encoded_data( [ None, @@ -4133,9 +4121,7 @@ async def test_initialize_and_save_report_needs_backporting( { "filename": "file_14.py", "file_index": 14, - "file_totals": ReportTotals( - *[0, 23, 13, 0, 10, "56.52174", 0, 0, 0, 0, 0, 0, 0] - ), + "file_totals": [0, 23, 13, 0, 10, "56.52174", 0, 0, 0, 0, 0, 0, 0], "session_totals": SessionTotalsArray.build_from_encoded_data( [ None, @@ -4149,9 +4135,7 @@ async def test_initialize_and_save_report_needs_backporting( { "filename": "file_02.py", "file_index": 2, - "file_totals": ReportTotals( - *[0, 13, 9, 0, 4, "69.23077", 0, 0, 0, 0, 0, 0, 0] - ), + "file_totals": [0, 13, 9, 0, 4, "69.23077", 0, 0, 0, 0, 0, 0, 0], "session_totals": SessionTotalsArray.build_from_encoded_data( [ None, @@ -4165,9 +4149,7 @@ async def test_initialize_and_save_report_needs_backporting( { "filename": "file_03.py", "file_index": 3, - "file_totals": ReportTotals( - *[0, 16, 8, 0, 8, "50.00000", 0, 0, 0, 0, 0, 0, 0] - ), + "file_totals": [0, 16, 8, 0, 8, "50.00000", 0, 0, 0, 0, 0, 0, 0], "session_totals": SessionTotalsArray.build_from_encoded_data( [ None, @@ -4181,9 +4163,7 @@ async def test_initialize_and_save_report_needs_backporting( { "filename": "file_04.py", "file_index": 4, - "file_totals": ReportTotals( - *[0, 10, 6, 0, 4, "60.00000", 0, 0, 0, 0, 0, 0, 0] - ), + "file_totals": [0, 10, 6, 0, 4, "60.00000", 0, 0, 0, 0, 0, 0, 0], "session_totals": SessionTotalsArray.build_from_encoded_data( [ None, @@ -4197,9 +4177,7 @@ async def test_initialize_and_save_report_needs_backporting( { "filename": "file_05.py", "file_index": 5, - "file_totals": ReportTotals( - *[0, 14, 10, 0, 4, "71.42857", 0, 0, 0, 0, 0, 0, 0] - ), + "file_totals": [0, 14, 10, 0, 4, "71.42857", 0, 0, 0, 0, 0, 0, 0], "session_totals": SessionTotalsArray.build_from_encoded_data( [ None, @@ -4213,9 +4191,7 @@ async def test_initialize_and_save_report_needs_backporting( { "filename": "file_06.py", "file_index": 6, - "file_totals": ReportTotals( - *[0, 9, 7, 1, 1, "77.77778", 0, 0, 0, 0, 0, 0, 0] - ), + "file_totals": [0, 9, 7, 1, 1, "77.77778", 0, 0, 0, 0, 0, 0, 0], "session_totals": SessionTotalsArray.build_from_encoded_data( [ None, @@ -4229,9 +4205,7 @@ async def test_initialize_and_save_report_needs_backporting( { "filename": "file_07.py", "file_index": 7, - "file_totals": ReportTotals( - *[0, 11, 9, 0, 2, "81.81818", 0, 0, 0, 0, 0, 0, 0] - ), + "file_totals": [0, 11, 9, 0, 2, "81.81818", 0, 0, 0, 0, 0, 0, 0], "session_totals": SessionTotalsArray.build_from_encoded_data( [ None, @@ -4245,9 +4219,7 @@ async def test_initialize_and_save_report_needs_backporting( { "filename": "file_08.py", "file_index": 8, - "file_totals": ReportTotals( - *[0, 11, 6, 0, 5, "54.54545", 0, 0, 0, 0, 0, 0, 0] - ), + "file_totals": [0, 11, 6, 0, 5, "54.54545", 0, 0, 0, 0, 0, 0, 0], "session_totals": SessionTotalsArray.build_from_encoded_data( [ None, @@ -4261,9 +4233,7 @@ async def test_initialize_and_save_report_needs_backporting( { "filename": "file_09.py", "file_index": 9, - "file_totals": ReportTotals( - *[0, 14, 10, 1, 3, "71.42857", 0, 0, 0, 0, 0, 0, 0] - ), + "file_totals": [0, 14, 10, 1, 3, "71.42857", 0, 0, 0, 0, 0, 0, 0], "session_totals": SessionTotalsArray.build_from_encoded_data( [ None,