Files
Cerbero-mcp/src/cerbero_mcp/common/http.py
T

86 lines
2.5 KiB
Python

"""HTTP client factory con retry/backoff su errori transient.
Wrap leggero attorno a httpx.AsyncClient: aggiunge AsyncHTTPTransport
con retries=N per gestire connection errors / DNS / refused. Per retry
su 5xx HTTP response usa `request_with_retry()` (decoratore separato).
Usage standard:
async with async_client(timeout=15) as http:
resp = await http.get(url)
Equivalente a httpx.AsyncClient(timeout=15) ma con retry transport su
errori di livello connessione.
"""
from __future__ import annotations
import asyncio
import logging
from collections.abc import Awaitable, Callable
from typing import Any, TypeVar
import httpx
logger = logging.getLogger(__name__)
T = TypeVar("T")
DEFAULT_RETRIES = 3
DEFAULT_TIMEOUT = 15.0
def async_client(
*,
timeout: float = DEFAULT_TIMEOUT,
retries: int = DEFAULT_RETRIES,
follow_redirects: bool = False,
**kwargs: Any,
) -> httpx.AsyncClient:
"""httpx.AsyncClient con AsyncHTTPTransport(retries=N) di default.
retries gestisce connection errors / refused / DNS — non 5xx HTTP.
"""
transport = httpx.AsyncHTTPTransport(retries=retries)
return httpx.AsyncClient(
timeout=timeout,
transport=transport,
follow_redirects=follow_redirects,
**kwargs,
)
async def call_with_retry(
fn: Callable[[], Awaitable[T]],
*,
max_attempts: int = 3,
base_delay: float = 0.5,
max_delay: float = 8.0,
retry_on: tuple[type[BaseException], ...] = (httpx.TransportError, httpx.TimeoutException),
) -> T:
"""Retry generico async con exponential backoff.
Ritenta `fn()` se solleva una delle exception in `retry_on`. Backoff
raddoppia (0.5, 1, 2, 4, ...) clipped a max_delay. Solleva l'ultima
exception se max_attempts raggiunto.
Usabile su SDK sincroni avvolti in asyncio.to_thread (pybit, alpaca):
result = await call_with_retry(lambda: client._run(self._http.get_tickers, ...))
"""
delay = base_delay
last_exc: BaseException | None = None
for attempt in range(1, max_attempts + 1):
try:
return await fn()
except retry_on as e:
last_exc = e
if attempt == max_attempts:
break
logger.warning(
"transient error, retrying (%d/%d) in %.1fs: %s",
attempt, max_attempts, delay, type(e).__name__,
)
await asyncio.sleep(delay)
delay = min(delay * 2, max_delay)
assert last_exc is not None
raise last_exc