diff --git a/src/backend/api/mirrors_update.py b/src/backend/api/mirrors_update.py index cefb67bc..a7525a86 100644 --- a/src/backend/api/mirrors_update.py +++ b/src/backend/api/mirrors_update.py @@ -93,7 +93,7 @@ async def set_repo_status( ) as resp: timestamp_response = await resp.text() except (asyncio.exceptions.TimeoutError, HTTPError): - logger.error( + logger.warning( 'Mirror "%s" has no timestamp file by url "%s"', mirror_info.name, timestamp_url, @@ -107,7 +107,7 @@ async def set_repo_status( try: mirror_last_updated = float(timestamp_response) except ValueError: - logger.info( + logger.warning( 'Mirror "%s" has broken timestamp file by url "%s"', mirror_info.name, timestamp_url, diff --git a/src/backend/api/redis.py b/src/backend/api/redis.py index bf18ab08..9fdd96e8 100644 --- a/src/backend/api/redis.py +++ b/src/backend/api/redis.py @@ -66,6 +66,37 @@ async def set_mirrors_to_cache( ) +async def get_subnets_from_cache( + key: str, +) -> dict: + """ + Get a cached subnets of Azure/AWS cloud + """ + async with redis_context() as redis_engine: + subnets_string = await redis_engine.get(str(key)) + if subnets_string is not None: + subnets_json = json.loads( + subnets_string, + ) + return subnets_json + + +async def set_subnets_to_cache( + key: str, + subnets: dict, +) -> None: + """ + Save a mirror list for specified IP to cache + """ + async with redis_context() as redis_engine: + subnets = json.dumps(subnets) + await redis_engine.set( + str(key), + subnets, + 24 * 60 * 60, + ) + + async def get_geolocation_from_cache( key: str ) -> Union[tuple[float, float], tuple[None, None]]: diff --git a/src/backend/api/utils.py b/src/backend/api/utils.py index 0e87b7a8..36220812 100644 --- a/src/backend/api/utils.py +++ b/src/backend/api/utils.py @@ -43,7 +43,9 @@ from haversine import haversine from api.redis import ( get_geolocation_from_cache, - set_geolocation_to_cache + set_geolocation_to_cache, + get_subnets_from_cache, + set_subnets_to_cache, ) logger = get_logger(__name__) @@ -210,7 +212,7 @@ async def get_azure_subnets_json(http_session: ClientSession) -> Optional[dict]: response_json = await resp.json( content_type='application/octet-stream', ) - except (ClientConnectorError, TimeoutError) as err: + except (ClientConnectorError, asyncio.exceptions.TimeoutError) as err: logger.error( 'Cannot get json with Azure subnets by url "%s" because "%s"', link_to_json_url, @@ -240,6 +242,9 @@ async def get_aws_subnets_json(http_session: ClientSession) -> Optional[dict]: async def get_azure_subnets(http_session: ClientSession): + subnets = await get_subnets_from_cache('azure_subnets') + if subnets is not None: + return subnets data_json = await get_azure_subnets_json(http_session=http_session) subnets = dict() if data_json is None: @@ -250,10 +255,14 @@ async def get_azure_subnets(http_session: ClientSession): properties = value['properties'] subnets[properties['region'].lower()] = \ properties['addressPrefixes'] + await set_subnets_to_cache('aws_subnets', subnets) return subnets async def get_aws_subnets(http_session: ClientSession): + subnets = await get_subnets_from_cache('aws_subnets') + if subnets is not None: + return subnets data_json = await get_aws_subnets_json(http_session=http_session) subnets = defaultdict(list) if data_json is None: @@ -262,6 +271,7 @@ async def get_aws_subnets(http_session: ClientSession): subnets[v4_prefix['region'].lower()].append(v4_prefix['ip_prefix']) for v6_prefix in data_json['ipv6_prefixes']: subnets[v6_prefix['region'].lower()].append(v6_prefix['ipv6_prefix']) + await set_subnets_to_cache('aws_subnets', subnets) return subnets diff --git a/src/backend/yaml_snippets b/src/backend/yaml_snippets index 1dd0a625..1f9f85f1 160000 --- a/src/backend/yaml_snippets +++ b/src/backend/yaml_snippets @@ -1 +1 @@ -Subproject commit 1dd0a62524ff0404fba41e3efe1064c44081771d +Subproject commit 1f9f85f16a50f82241be3fad49e20b7c1f65f69d