diff --git a/src/program/services/scrapers/jackett.py b/src/program/services/scrapers/jackett.py index 2f834185..c3a129b7 100644 --- a/src/program/services/scrapers/jackett.py +++ b/src/program/services/scrapers/jackett.py @@ -15,7 +15,7 @@ from program.services.scrapers.shared import ScraperRequestHandler from program.settings.manager import settings_manager from program.utils.request import create_service_session, get_rate_limit_params, RateLimitExceeded, HttpMethod, \ - ResponseType + ResponseType, get_http_adapter class JackettIndexer(BaseModel): @@ -63,7 +63,8 @@ def validate(self) -> bool: self.indexers = indexers rate_limit_params = get_rate_limit_params(max_calls=len(self.indexers), period=2) if self.settings.ratelimit else None - session = create_service_session(rate_limit_params=rate_limit_params) + http_adapter = get_http_adapter(pool_connections=len(self.indexers), pool_maxsize=len(self.indexers)) + session = create_service_session(rate_limit_params=rate_limit_params, session_adapter=http_adapter) self.request_handler = ScraperRequestHandler(session) self._log_indexers() return True diff --git a/src/program/services/scrapers/prowlarr.py b/src/program/services/scrapers/prowlarr.py index 709b16a5..31191149 100644 --- a/src/program/services/scrapers/prowlarr.py +++ b/src/program/services/scrapers/prowlarr.py @@ -15,7 +15,8 @@ from program.media.item import Episode, MediaItem, Movie, Season, Show from program.services.scrapers.shared import ScraperRequestHandler from program.settings.manager import settings_manager -from program.utils.request import create_service_session, get_rate_limit_params, RateLimitExceeded, HttpMethod +from program.utils.request import create_service_session, get_rate_limit_params, RateLimitExceeded, HttpMethod, \ + get_http_adapter class ProwlarrIndexer(BaseModel): @@ -63,7 +64,8 @@ def validate(self) -> bool: return False self.indexers = indexers rate_limit_params = get_rate_limit_params(max_calls=len(self.indexers), period=self.settings.limiter_seconds) if self.settings.ratelimit else None - session = create_service_session(rate_limit_params=rate_limit_params) + http_adapter = get_http_adapter(pool_connections=len(self.indexers), pool_maxsize=len(self.indexers)) + session = create_service_session(rate_limit_params=rate_limit_params, session_adapter=http_adapter) self.request_handler = ScraperRequestHandler(session) self._log_indexers() return True