Skip to content

Commit

Permalink
fix: change cache of ArchiveField (#55)
Browse files Browse the repository at this point in the history
These changes are similar to codecov/codecov-api#72
Same reasoning applies.

The test changed happens because now we update the cache on the write. Because of that we
are not doing the encode/decode/rehydrate operations. So the data you put in is the data you get.
On one hand we can do such operations to guarantee consistency. On the other this is no different than
what we used before `ArchiveField` AND such operations might be expensive.
  • Loading branch information
giovanni-guidini committed Aug 11, 2023
1 parent 3ee8958 commit 319753b
Show file tree
Hide file tree
Showing 6 changed files with 40 additions and 75 deletions.
16 changes: 2 additions & 14 deletions database/tests/unit/test_model_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -130,17 +130,5 @@ def test_archive_setter_archive_field(self, db, mocker):
assert test_class._archive_field == None
assert test_class._archive_field_storage_path == "path/to/written/object"
assert test_class.archive_field == some_json
# Writing cleans the cache
assert mock_read_file.call_count == 1
mock_read_file.assert_called_with("path/to/written/object")
mock_write_file.assert_called_with(
commit_id=commit.commitid,
table="test_table",
field="archive_field",
external_id=test_class.external_id,
data=some_json,
encoder=ReportEncoder,
)
mock_archive_service.return_value.delete_file.assert_called_with(
"path/to/old/data"
)
# Cache is updated on write
assert mock_read_file.call_count == 0
11 changes: 4 additions & 7 deletions database/tests/unit/test_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -403,14 +403,11 @@ def test_set_files_array_to_storage(self, dbsession, mocker, mock_configuration)
# Retrieve the set value
files_array = retrieved_instance.files_array
assert files_array == self.sample_files_array
assert mock_archive_service.call_count == 2
mock_archive_service.return_value.read_file.assert_has_calls(
[call("https://storage-url/path/to/item.json")]
)
# Check that caching (still) works within the instance
assert mock_archive_service.call_count == 1
# Check that caching works within the instance
files_array = retrieved_instance.files_array
assert mock_archive_service.call_count == 2
assert mock_archive_service.return_value.read_file.call_count == 1
assert mock_archive_service.call_count == 1
assert mock_archive_service.return_value.read_file.call_count == 0


class TestCommitModel(object):
Expand Down
14 changes: 10 additions & 4 deletions database/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,8 +76,8 @@ def __set_name__(self, owner, name):
self.public_name = name
self.db_field_name = "_" + name
self.archive_field_name = "_" + name + "_storage_path"
self.cached_value_property_name = f"__{self.public_name}_cached_value"

@lru_cache(maxsize=1)
def _get_value_from_archive(self, obj):
repository = obj.get_repository()
archive_service = ArchiveService(repository=repository)
Expand Down Expand Up @@ -106,10 +106,16 @@ def _get_value_from_archive(self, obj):
return self.default_value

def __get__(self, obj, objtype=None):
cached_value = getattr(obj, self.cached_value_property_name, None)
if cached_value:
return cached_value
db_field = getattr(obj, self.db_field_name)
if db_field is not None:
return self.rehydrate_fn(obj, db_field)
return self._get_value_from_archive(obj)
value = self.rehydrate_fn(obj, db_field)
else:
value = self._get_value_from_archive(obj)
setattr(obj, self.cached_value_property_name, value)
return value

def __set__(self, obj, value):
# Set the new value
Expand All @@ -130,6 +136,6 @@ def __set__(self, obj, value):
archive_service.delete_file(old_file_path)
setattr(obj, self.archive_field_name, path)
setattr(obj, self.db_field_name, None)
self._get_value_from_archive.cache_clear()
else:
setattr(obj, self.db_field_name, value)
setattr(obj, self.cached_value_property_name, value)
2 changes: 1 addition & 1 deletion requirements.in
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
git+ssh://[email protected]/codecov/shared.git@39f6f4a383d37755a69d4dfbe40390fcac45536b#egg=shared
git+ssh://[email protected]/codecov/shared.git@6a8a33248804a9c101d34f417efda7c11e4bbe63#egg=shared
git+ssh://[email protected]/codecov/[email protected]#egg=codecovopentelem
boto3
celery
Expand Down
12 changes: 8 additions & 4 deletions requirements.txt
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
#
# This file is autogenerated by pip-compile with python 3.10
# To update, run:
# This file is autogenerated by pip-compile with Python 3.10
# by the following command:
#
# pip-compile requirements.in
#
Expand Down Expand Up @@ -77,6 +77,8 @@ deprecated==1.2.12
# via opentelemetry-api
ecdsa==0.16.1
# via tlslite-ng
exceptiongroup==1.1.2
# via pytest
factory-boy==3.2.0
# via -r requirements.in
faker==8.8.2
Expand Down Expand Up @@ -249,7 +251,7 @@ s3transfer==0.3.4
# via boto3
sentry-sdk==1.19.1
# via -r requirements.in
shared @ git+ssh://[email protected]/codecov/shared.git@39f6f4a383d37755a69d4dfbe40390fcac45536b
shared @ git+ssh://[email protected]/codecov/shared.git@6a8a33248804a9c101d34f417efda7c11e4bbe63
# via -r requirements.in
six==1.15.0
# via
Expand Down Expand Up @@ -284,8 +286,10 @@ text-unidecode==1.3
# via faker
timestring==1.6.4
# via -r requirements.in
tlslite-ng==0.8.0-alpha39
tlslite-ng==0.8.0a39
# via shared
tomli==2.0.1
# via pytest
typing==3.7.4.3
# via shared
typing-extensions==4.6.3
Expand Down
60 changes: 15 additions & 45 deletions services/tests/test_report.py
Original file line number Diff line number Diff line change
Expand Up @@ -4037,9 +4037,7 @@ async def test_initialize_and_save_report_needs_backporting(
{
"filename": "file_00.py",
"file_index": 0,
"file_totals": ReportTotals(
*[0, 14, 12, 0, 2, "85.71429", 0, 0, 0, 0, 0, 0, 0]
),
"file_totals": [0, 14, 12, 0, 2, "85.71429", 0, 0, 0, 0, 0, 0, 0],
"session_totals": SessionTotalsArray.build_from_encoded_data(
[
None,
Expand All @@ -4053,9 +4051,7 @@ async def test_initialize_and_save_report_needs_backporting(
{
"filename": "file_01.py",
"file_index": 1,
"file_totals": ReportTotals(
*[0, 11, 8, 0, 3, "72.72727", 0, 0, 0, 0, 0, 0, 0]
),
"file_totals": [0, 11, 8, 0, 3, "72.72727", 0, 0, 0, 0, 0, 0, 0],
"session_totals": SessionTotalsArray.build_from_encoded_data(
[
None,
Expand All @@ -4069,9 +4065,7 @@ async def test_initialize_and_save_report_needs_backporting(
{
"filename": "file_10.py",
"file_index": 10,
"file_totals": ReportTotals(
*[0, 10, 6, 1, 3, "60.00000", 0, 0, 0, 0, 0, 0, 0]
),
"file_totals": [0, 10, 6, 1, 3, "60.00000", 0, 0, 0, 0, 0, 0, 0],
"session_totals": SessionTotalsArray.build_from_encoded_data(
[
None,
Expand All @@ -4085,9 +4079,7 @@ async def test_initialize_and_save_report_needs_backporting(
{
"filename": "file_11.py",
"file_index": 11,
"file_totals": ReportTotals(
*[0, 23, 15, 1, 7, "65.21739", 0, 0, 0, 0, 0, 0, 0]
),
"file_totals": [0, 23, 15, 1, 7, "65.21739", 0, 0, 0, 0, 0, 0, 0],
"session_totals": SessionTotalsArray.build_from_encoded_data(
[
None,
Expand All @@ -4101,9 +4093,7 @@ async def test_initialize_and_save_report_needs_backporting(
{
"filename": "file_12.py",
"file_index": 12,
"file_totals": ReportTotals(
*[0, 14, 8, 0, 6, "57.14286", 0, 0, 0, 0, 0, 0, 0]
),
"file_totals": [0, 14, 8, 0, 6, "57.14286", 0, 0, 0, 0, 0, 0, 0],
"session_totals": SessionTotalsArray.build_from_encoded_data(
[
None,
Expand All @@ -4117,9 +4107,7 @@ async def test_initialize_and_save_report_needs_backporting(
{
"filename": "file_13.py",
"file_index": 13,
"file_totals": ReportTotals(
*[0, 15, 9, 0, 6, "60.00000", 0, 0, 0, 0, 0, 0, 0]
),
"file_totals": [0, 15, 9, 0, 6, "60.00000", 0, 0, 0, 0, 0, 0, 0],
"session_totals": SessionTotalsArray.build_from_encoded_data(
[
None,
Expand All @@ -4133,9 +4121,7 @@ async def test_initialize_and_save_report_needs_backporting(
{
"filename": "file_14.py",
"file_index": 14,
"file_totals": ReportTotals(
*[0, 23, 13, 0, 10, "56.52174", 0, 0, 0, 0, 0, 0, 0]
),
"file_totals": [0, 23, 13, 0, 10, "56.52174", 0, 0, 0, 0, 0, 0, 0],
"session_totals": SessionTotalsArray.build_from_encoded_data(
[
None,
Expand All @@ -4149,9 +4135,7 @@ async def test_initialize_and_save_report_needs_backporting(
{
"filename": "file_02.py",
"file_index": 2,
"file_totals": ReportTotals(
*[0, 13, 9, 0, 4, "69.23077", 0, 0, 0, 0, 0, 0, 0]
),
"file_totals": [0, 13, 9, 0, 4, "69.23077", 0, 0, 0, 0, 0, 0, 0],
"session_totals": SessionTotalsArray.build_from_encoded_data(
[
None,
Expand All @@ -4165,9 +4149,7 @@ async def test_initialize_and_save_report_needs_backporting(
{
"filename": "file_03.py",
"file_index": 3,
"file_totals": ReportTotals(
*[0, 16, 8, 0, 8, "50.00000", 0, 0, 0, 0, 0, 0, 0]
),
"file_totals": [0, 16, 8, 0, 8, "50.00000", 0, 0, 0, 0, 0, 0, 0],
"session_totals": SessionTotalsArray.build_from_encoded_data(
[
None,
Expand All @@ -4181,9 +4163,7 @@ async def test_initialize_and_save_report_needs_backporting(
{
"filename": "file_04.py",
"file_index": 4,
"file_totals": ReportTotals(
*[0, 10, 6, 0, 4, "60.00000", 0, 0, 0, 0, 0, 0, 0]
),
"file_totals": [0, 10, 6, 0, 4, "60.00000", 0, 0, 0, 0, 0, 0, 0],
"session_totals": SessionTotalsArray.build_from_encoded_data(
[
None,
Expand All @@ -4197,9 +4177,7 @@ async def test_initialize_and_save_report_needs_backporting(
{
"filename": "file_05.py",
"file_index": 5,
"file_totals": ReportTotals(
*[0, 14, 10, 0, 4, "71.42857", 0, 0, 0, 0, 0, 0, 0]
),
"file_totals": [0, 14, 10, 0, 4, "71.42857", 0, 0, 0, 0, 0, 0, 0],
"session_totals": SessionTotalsArray.build_from_encoded_data(
[
None,
Expand All @@ -4213,9 +4191,7 @@ async def test_initialize_and_save_report_needs_backporting(
{
"filename": "file_06.py",
"file_index": 6,
"file_totals": ReportTotals(
*[0, 9, 7, 1, 1, "77.77778", 0, 0, 0, 0, 0, 0, 0]
),
"file_totals": [0, 9, 7, 1, 1, "77.77778", 0, 0, 0, 0, 0, 0, 0],
"session_totals": SessionTotalsArray.build_from_encoded_data(
[
None,
Expand All @@ -4229,9 +4205,7 @@ async def test_initialize_and_save_report_needs_backporting(
{
"filename": "file_07.py",
"file_index": 7,
"file_totals": ReportTotals(
*[0, 11, 9, 0, 2, "81.81818", 0, 0, 0, 0, 0, 0, 0]
),
"file_totals": [0, 11, 9, 0, 2, "81.81818", 0, 0, 0, 0, 0, 0, 0],
"session_totals": SessionTotalsArray.build_from_encoded_data(
[
None,
Expand All @@ -4245,9 +4219,7 @@ async def test_initialize_and_save_report_needs_backporting(
{
"filename": "file_08.py",
"file_index": 8,
"file_totals": ReportTotals(
*[0, 11, 6, 0, 5, "54.54545", 0, 0, 0, 0, 0, 0, 0]
),
"file_totals": [0, 11, 6, 0, 5, "54.54545", 0, 0, 0, 0, 0, 0, 0],
"session_totals": SessionTotalsArray.build_from_encoded_data(
[
None,
Expand All @@ -4261,9 +4233,7 @@ async def test_initialize_and_save_report_needs_backporting(
{
"filename": "file_09.py",
"file_index": 9,
"file_totals": ReportTotals(
*[0, 14, 10, 1, 3, "71.42857", 0, 0, 0, 0, 0, 0, 0]
),
"file_totals": [0, 14, 10, 1, 3, "71.42857", 0, 0, 0, 0, 0, 0, 0],
"session_totals": SessionTotalsArray.build_from_encoded_data(
[
None,
Expand Down

0 comments on commit 319753b

Please sign in to comment.