Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 5 additions & 10 deletions comfy_api_nodes/util/client.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@
import contextlib
import json
import logging
import socket
import time
import uuid
from dataclasses import dataclass
Expand Down Expand Up @@ -456,24 +455,20 @@ async def _diagnose_connectivity() -> dict[str, bool]:
results = {
"internet_accessible": False,
"api_accessible": False,
"is_local_issue": False,
"is_api_issue": False,
}
timeout = aiohttp.ClientTimeout(total=5.0)
async with aiohttp.ClientSession(timeout=timeout) as session:
try:
with contextlib.suppress(ClientError, OSError):
async with session.get("https://www.google.com") as resp:
results["internet_accessible"] = resp.status < 500
except (ClientError, asyncio.TimeoutError, socket.gaierror):
results["is_local_issue"] = True
if not results["internet_accessible"]:
return results

parsed = urlparse(default_base_url())
health_url = f"{parsed.scheme}://{parsed.netloc}/health"
with contextlib.suppress(ClientError, asyncio.TimeoutError):
with contextlib.suppress(ClientError, OSError):
async with session.get(health_url) as resp:
results["api_accessible"] = resp.status < 500
results["is_api_issue"] = results["internet_accessible"] and not results["api_accessible"]
return results


Expand Down Expand Up @@ -790,7 +785,7 @@ async def _monitor(stop_evt: asyncio.Event, start_ts: float):
except ProcessingInterrupted:
logging.debug("Polling was interrupted by user")
raise
except (ClientError, asyncio.TimeoutError, socket.gaierror) as e:
except (ClientError, OSError) as e:
if attempt <= cfg.max_retries:
logging.warning(
"Connection error calling %s %s. Retrying in %.2fs (%d/%d): %s",
Expand Down Expand Up @@ -824,7 +819,7 @@ async def _monitor(stop_evt: asyncio.Event, start_ts: float):
delay *= cfg.retry_backoff
continue
diag = await _diagnose_connectivity()
if diag.get("is_local_issue"):
if not diag["internet_accessible"]:
try:
request_logger.log_request_response(
operation_id=operation_id,
Expand Down
6 changes: 3 additions & 3 deletions comfy_api_nodes/util/download_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ async def download_url_to_bytesio(
dest: Optional[Union[BytesIO, IO[bytes], str, Path]],
*,
timeout: Optional[float] = None,
max_retries: int = 3,
max_retries: int = 5,
retry_delay: float = 1.0,
retry_backoff: float = 2.0,
cls: type[COMFY_IO.ComfyNode] = None,
Expand Down Expand Up @@ -177,7 +177,7 @@ async def _monitor():
return
except asyncio.CancelledError:
raise ProcessingInterrupted("Task cancelled") from None
except (ClientError, asyncio.TimeoutError) as e:
except (ClientError, OSError) as e:
if attempt <= max_retries:
with contextlib.suppress(Exception):
request_logger.log_request_response(
Expand All @@ -191,7 +191,7 @@ async def _monitor():
continue

diag = await _diagnose_connectivity()
if diag.get("is_local_issue"):
if not diag["internet_accessible"]:
raise LocalNetworkError(
"Unable to connect to the network. Please check your internet connection and try again."
) from e
Expand Down
4 changes: 2 additions & 2 deletions comfy_api_nodes/util/upload_helpers.py
Original file line number Diff line number Diff line change
Expand Up @@ -290,7 +290,7 @@ async def _monitor():
return
except asyncio.CancelledError:
raise ProcessingInterrupted("Task cancelled") from None
except (aiohttp.ClientError, asyncio.TimeoutError) as e:
except (aiohttp.ClientError, OSError) as e:
if attempt <= max_retries:
with contextlib.suppress(Exception):
request_logger.log_request_response(
Expand All @@ -313,7 +313,7 @@ async def _monitor():
continue

diag = await _diagnose_connectivity()
if diag.get("is_local_issue"):
if not diag["internet_accessible"]:
raise LocalNetworkError(
"Unable to connect to the network. Please check your internet connection and try again."
) from e
Expand Down