From 0c8057aef45fcccd2c855a8413729b39020439db Mon Sep 17 00:00:00 2001 From: iPromKnight <156901906+iPromKnight@users.noreply.github.com> Date: Tue, 5 Nov 2024 23:06:11 +0000 Subject: [PATCH] =?UTF-8?q?fix:=20add=20HTTP=20adapter=20configuration=20f?= =?UTF-8?q?or=20Jackett=20and=20Prowlarr=20scrapers=E2=80=A6=20(#865)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit fix: add HTTP adapter configuration for Jackett and Prowlarr scrapers to manage connection pool size --- src/program/services/scrapers/jackett.py | 5 +++-- src/program/services/scrapers/prowlarr.py | 6 ++++-- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/src/program/services/scrapers/jackett.py b/src/program/services/scrapers/jackett.py index 2f834185..c3a129b7 100644 --- a/src/program/services/scrapers/jackett.py +++ b/src/program/services/scrapers/jackett.py @@ -15,7 +15,7 @@ from program.services.scrapers.shared import ScraperRequestHandler from program.settings.manager import settings_manager from program.utils.request import create_service_session, get_rate_limit_params, RateLimitExceeded, HttpMethod, \ - ResponseType + ResponseType, get_http_adapter class JackettIndexer(BaseModel): @@ -63,7 +63,8 @@ def validate(self) -> bool: self.indexers = indexers rate_limit_params = get_rate_limit_params(max_calls=len(self.indexers), period=2) if self.settings.ratelimit else None - session = create_service_session(rate_limit_params=rate_limit_params) + http_adapter = get_http_adapter(pool_connections=len(self.indexers), pool_maxsize=len(self.indexers)) + session = create_service_session(rate_limit_params=rate_limit_params, session_adapter=http_adapter) self.request_handler = ScraperRequestHandler(session) self._log_indexers() return True diff --git a/src/program/services/scrapers/prowlarr.py b/src/program/services/scrapers/prowlarr.py index 709b16a5..31191149 100644 --- a/src/program/services/scrapers/prowlarr.py +++ b/src/program/services/scrapers/prowlarr.py @@ -15,7 +15,8 @@ from program.media.item import Episode, MediaItem, Movie, Season, Show from program.services.scrapers.shared import ScraperRequestHandler from program.settings.manager import settings_manager -from program.utils.request import create_service_session, get_rate_limit_params, RateLimitExceeded, HttpMethod +from program.utils.request import create_service_session, get_rate_limit_params, RateLimitExceeded, HttpMethod, \ + get_http_adapter class ProwlarrIndexer(BaseModel): @@ -63,7 +64,8 @@ def validate(self) -> bool: return False self.indexers = indexers rate_limit_params = get_rate_limit_params(max_calls=len(self.indexers), period=self.settings.limiter_seconds) if self.settings.ratelimit else None - session = create_service_session(rate_limit_params=rate_limit_params) + http_adapter = get_http_adapter(pool_connections=len(self.indexers), pool_maxsize=len(self.indexers)) + session = create_service_session(rate_limit_params=rate_limit_params, session_adapter=http_adapter) self.request_handler = ScraperRequestHandler(session) self._log_indexers() return True