chore: httpx retry transport + healthcheck stdlib + mypy config

- mcp_common/http.py: nuovo helper async_client() con
  AsyncHTTPTransport(retries=3) per gestire connection error transient
  + call_with_retry() generic async retry decorator. Sostituite 25
  occorrenze httpx.AsyncClient(...) in deribit/hyperliquid/sentiment/
  macro client. 5 nuovi test.

- Dockerfile healthcheck: passato da python+httpx subprocess a
  stdlib urllib.request.urlopen() su tutti i 6 servizi MCP. Zero
  dipendenze esterne nel runtime check, timeout esplicito 3s, image
  leggermente più snella.

- pyproject.toml: aggiunto [tool.mypy] python_version=3.13 con
  mypy_path multi-package + override ignore_missing_imports per i
  vendor SDK (pybit, alpaca, hyperliquid, pythonjsonlogger). mypy 1.20
  in dev deps; ruff pinned 0.5.x. mcp_common passa mypy clean; 44
  errori tipo pre-esistenti nei servizi affiorati ma non bloccanti —
  fix da pianificare separatamente.

- 455 test verdi.

Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
This commit is contained in:
AdrianoDev
2026-04-28 07:26:17 +02:00
parent 4d9db750be
commit 6b7b3f7658
15 changed files with 395 additions and 53 deletions
+85
View File
@@ -0,0 +1,85 @@
"""HTTP client factory con retry/backoff su errori transient.
Wrap leggero attorno a httpx.AsyncClient: aggiunge AsyncHTTPTransport
con retries=N per gestire connection errors / DNS / refused. Per retry
su 5xx HTTP response usa `request_with_retry()` (decoratore separato).
Usage standard:
async with async_client(timeout=15) as http:
resp = await http.get(url)
Equivalente a httpx.AsyncClient(timeout=15) ma con retry transport su
errori di livello connessione.
"""
from __future__ import annotations
import asyncio
import logging
from collections.abc import Awaitable, Callable
from typing import Any, TypeVar
import httpx
logger = logging.getLogger(__name__)
T = TypeVar("T")
DEFAULT_RETRIES = 3
DEFAULT_TIMEOUT = 15.0
def async_client(
*,
timeout: float = DEFAULT_TIMEOUT,
retries: int = DEFAULT_RETRIES,
follow_redirects: bool = False,
**kwargs: Any,
) -> httpx.AsyncClient:
"""httpx.AsyncClient con AsyncHTTPTransport(retries=N) di default.
retries gestisce connection errors / refused / DNS — non 5xx HTTP.
"""
transport = httpx.AsyncHTTPTransport(retries=retries)
return httpx.AsyncClient(
timeout=timeout,
transport=transport,
follow_redirects=follow_redirects,
**kwargs,
)
async def call_with_retry(
fn: Callable[[], Awaitable[T]],
*,
max_attempts: int = 3,
base_delay: float = 0.5,
max_delay: float = 8.0,
retry_on: tuple[type[BaseException], ...] = (httpx.TransportError, httpx.TimeoutException),
) -> T:
"""Retry generico async con exponential backoff.
Ritenta `fn()` se solleva una delle exception in `retry_on`. Backoff
raddoppia (0.5, 1, 2, 4, ...) clipped a max_delay. Solleva l'ultima
exception se max_attempts raggiunto.
Usabile su SDK sincroni avvolti in asyncio.to_thread (pybit, alpaca):
result = await call_with_retry(lambda: client._run(self._http.get_tickers, ...))
"""
delay = base_delay
last_exc: BaseException | None = None
for attempt in range(1, max_attempts + 1):
try:
return await fn()
except retry_on as e:
last_exc = e
if attempt == max_attempts:
break
logger.warning(
"transient error, retrying (%d/%d) in %.1fs: %s",
attempt, max_attempts, delay, type(e).__name__,
)
await asyncio.sleep(delay)
delay = min(delay * 2, max_delay)
assert last_exc is not None
raise last_exc
+2 -1
View File
@@ -7,6 +7,7 @@ import uuid
from collections.abc import Callable
from contextlib import AbstractAsyncContextManager
from datetime import UTC, datetime
from typing import Any
from fastapi import FastAPI, HTTPException, Request
from fastapi.exceptions import RequestValidationError
@@ -28,7 +29,7 @@ def _error_envelope(
details: dict | None = None,
request_id: str | None = None,
) -> dict:
env = {
env: dict[str, Any] = {
"error": {
"type": type_,
"code": code,
+73
View File
@@ -0,0 +1,73 @@
from __future__ import annotations
import asyncio
import httpx
import pytest
from mcp_common.http import async_client, call_with_retry
def test_async_client_uses_retry_transport():
c = async_client(retries=5)
assert isinstance(c._transport, httpx.AsyncHTTPTransport)
# internal _retries on transport
assert c._transport._pool._retries == 5
@pytest.mark.asyncio
async def test_call_with_retry_succeeds_first_try():
calls = 0
async def fn():
nonlocal calls
calls += 1
return "ok"
result = await call_with_retry(fn)
assert result == "ok"
assert calls == 1
@pytest.mark.asyncio
async def test_call_with_retry_recovers_after_transient(monkeypatch):
monkeypatch.setattr(asyncio, "sleep", asyncio.coroutine(lambda *_: None) if False else _no_sleep)
calls = 0
async def fn():
nonlocal calls
calls += 1
if calls < 3:
raise httpx.ConnectError("boom")
return "ok"
result = await call_with_retry(fn, max_attempts=5, base_delay=0.0)
assert result == "ok"
assert calls == 3
async def _no_sleep(_):
return None
@pytest.mark.asyncio
async def test_call_with_retry_gives_up_after_max():
calls = 0
async def fn():
nonlocal calls
calls += 1
raise httpx.TimeoutException("slow")
with pytest.raises(httpx.TimeoutException):
await call_with_retry(fn, max_attempts=3, base_delay=0.0)
assert calls == 3
@pytest.mark.asyncio
async def test_call_with_retry_does_not_catch_unexpected():
async def fn():
raise ValueError("not transient")
with pytest.raises(ValueError):
await call_with_retry(fn, max_attempts=5, base_delay=0.0)
@@ -8,6 +8,7 @@ import httpx
from mcp_common import indicators as ind
from mcp_common import microstructure as micro
from mcp_common import options as opt
from mcp_common.http import async_client
BASE_LIVE = "https://www.deribit.com/api/v2"
BASE_TESTNET = "https://test.deribit.com/api/v2"
@@ -43,7 +44,7 @@ class DeribitClient:
"client_id": self.client_id,
"client_secret": self.client_secret,
}
async with httpx.AsyncClient(timeout=15.0) as http:
async with async_client(timeout=15.0) as http:
resp = await http.get(url, params=params)
data = resp.json()
result = data["result"]
@@ -67,7 +68,7 @@ class DeribitClient:
if is_private and self._token:
headers["Authorization"] = f"Bearer {self._token}"
async with httpx.AsyncClient(timeout=15.0) as http:
async with async_client(timeout=15.0) as http:
resp = await http.get(url, params=request_params, headers=headers)
data = resp.json()
@@ -8,6 +8,7 @@ from typing import Any
import httpx
from mcp_common import indicators as ind
from mcp_common.http import async_client
BASE_LIVE = "https://api.hyperliquid.xyz"
BASE_TESTNET = "https://api.hyperliquid-testnet.xyz"
@@ -86,7 +87,7 @@ class HyperliquidClient:
async def _post(self, payload: dict[str, Any]) -> Any:
"""POST JSON to the /info endpoint."""
async with httpx.AsyncClient(timeout=15) as http:
async with async_client(timeout=15.0) as http:
resp = await http.post(f"{self.base_url}/info", json=payload)
resp.raise_for_status()
return resp.json()
@@ -295,7 +296,7 @@ class HyperliquidClient:
spot_price: float | None = None
spot_source = "coinbase"
try:
async with httpx.AsyncClient(timeout=8) as c:
async with async_client(timeout=8.0) as c:
resp = await c.get(f"https://api.coinbase.com/v2/prices/{asset}-USD/spot")
if resp.status_code == 200:
spot_price = float(resp.json().get("data", {}).get("amount"))
@@ -303,7 +304,7 @@ class HyperliquidClient:
spot_price = None
if spot_price is None:
try:
async with httpx.AsyncClient(timeout=8) as c:
async with async_client(timeout=8.0) as c:
resp = await c.get(
"https://api.kraken.com/0/public/Ticker", params={"pair": f"{asset}USD"}
)
+9 -8
View File
@@ -4,6 +4,7 @@ from datetime import UTC, datetime, timedelta
from typing import Any
import httpx
from mcp_common.http import async_client
FRED_BASE = "https://api.stlouisfed.org/fred/series/observations"
FINNHUB_CALENDAR = "https://finnhub.io/api/v1/calendar/economic"
@@ -79,7 +80,7 @@ async def fetch_asset_price(ticker: str) -> dict[str, Any]:
return {"ticker": ticker, "error": f"unknown ticker {ticker}"}
symbol, name = mapping
async with httpx.AsyncClient(timeout=10) as client:
async with async_client(timeout=10.0) as client:
info = await _fetch_yahoo_meta(client, symbol, "10d")
meta = info.get("meta") or {}
closes = info.get("closes") or []
@@ -129,7 +130,7 @@ async def fetch_treasury_yields() -> dict[str, Any]:
("us30y", "^TYX"),
]
yields: dict[str, float | None] = {}
async with httpx.AsyncClient(timeout=10) as client:
async with async_client(timeout=10.0) as client:
for key, sym in symbols:
info = await _fetch_yahoo_meta(client, sym, "5d")
meta = info.get("meta") or {}
@@ -217,7 +218,7 @@ async def fetch_breakeven_inflation(fred_api_key: str = "") -> dict[str, Any]:
"be_5y5y_forward": "T5YIFR",
}
out: dict[str, float | None] = {}
async with httpx.AsyncClient(timeout=10) as client:
async with async_client(timeout=10.0) as client:
for name, series_id in series_map.items():
resp = await client.get(
FRED_BASE,
@@ -270,7 +271,7 @@ async def fetch_equity_futures() -> dict[str, Any]:
session = "after-hours"
out: dict[str, Any] = {}
async with httpx.AsyncClient(timeout=10) as client:
async with async_client(timeout=10.0) as client:
for key, sym in tickers:
info = await _fetch_yahoo_meta(client, sym, "5d")
meta = info.get("meta") or {}
@@ -362,7 +363,7 @@ async def fetch_economic_indicators(
result: dict[str, Any] = {}
if not fred_api_key:
return {"indicators": result, "error": "No FRED API key configured"}
async with httpx.AsyncClient(timeout=10) as client:
async with async_client(timeout=10.0) as client:
for name, series_id in series_map.items():
if indicators and name not in indicators:
continue
@@ -444,7 +445,7 @@ async def fetch_macro_calendar(
# Try Forex Factory free feed first
try:
async with httpx.AsyncClient(timeout=10) as client:
async with async_client(timeout=10.0) as client:
resp = await client.get("https://nfs.faireconomy.media/ff_calendar_thisweek.json")
if resp.status_code == 200:
raw = resp.json()
@@ -503,7 +504,7 @@ async def fetch_macro_calendar(
try:
now = datetime.now(UTC)
end_default = now + timedelta(days=days_ahead)
async with httpx.AsyncClient(timeout=10) as client:
async with async_client(timeout=10.0) as client:
resp = await client.get(
FINNHUB_CALENDAR,
params={
@@ -571,7 +572,7 @@ async def fetch_market_overview() -> dict[str, Any]:
if _MARKET_CACHE["data"] is not None and (now - _MARKET_CACHE["ts"]) < _MARKET_CACHE_TTL:
return _MARKET_CACHE["data"]
async with httpx.AsyncClient(timeout=10) as client:
async with async_client(timeout=10.0) as client:
global_data: dict[str, Any] = {}
prices: dict[str, Any] = {}
try:
@@ -6,6 +6,7 @@ import xml.etree.ElementTree as ET
from typing import Any
import httpx
from mcp_common.http import async_client
CRYPTOPANIC_URL = "https://cryptopanic.com/api/v1/posts/"
ALTERNATIVE_ME_URL = "https://api.alternative.me/fng/"
@@ -18,7 +19,7 @@ MESSARI_NEWS_URL = "https://data.messari.io/api/v1/news"
async def _fetch_coindesk_headlines(limit: int = 20) -> list[dict[str, Any]]:
items: list[dict[str, Any]] = []
try:
async with httpx.AsyncClient(timeout=10, follow_redirects=True) as client:
async with async_client(timeout=10.0, follow_redirects=True) as client:
resp = await client.get(COINDESK_RSS)
if resp.status_code != 200:
return items
@@ -54,7 +55,7 @@ async def _fetch_cryptocompare_news(limit: int = 20) -> list[dict[str, Any]]:
"""CER-017: CryptoCompare news free (no key needed)."""
items: list[dict[str, Any]] = []
try:
async with httpx.AsyncClient(timeout=10) as client:
async with async_client(timeout=10.0) as client:
resp = await client.get(CRYPTOCOMPARE_NEWS_URL, params={"lang": "EN"})
if resp.status_code != 200:
return items
@@ -82,7 +83,7 @@ async def _fetch_messari_news(limit: int = 20) -> list[dict[str, Any]]:
"""CER-017: Messari news free (no key needed for basic feed)."""
items: list[dict[str, Any]] = []
try:
async with httpx.AsyncClient(timeout=10) as client:
async with async_client(timeout=10.0) as client:
resp = await client.get(MESSARI_NEWS_URL)
if resp.status_code != 200:
return items
@@ -164,7 +165,7 @@ async def fetch_crypto_news(api_key: str = "", limit: int = 20) -> dict[str, Any
async def _fetch_cryptopanic_news(api_key: str, limit: int) -> list[dict[str, Any]]:
"""Cryptopanic as one of the sources. Failure → []."""
try:
async with httpx.AsyncClient(timeout=10) as client:
async with async_client(timeout=10.0) as client:
resp = await client.get(
CRYPTOPANIC_URL,
params={"auth_token": api_key, "public": "true"},
@@ -189,7 +190,7 @@ async def _fetch_cryptopanic_news(api_key: str, limit: int) -> list[dict[str, An
async def _fetch_lunarcrush(symbol: str, api_key: str) -> dict | None:
"""CER-P2-005: LunarCrush v4 social metrics. Ritorna None se fail."""
try:
async with httpx.AsyncClient(timeout=10) as client:
async with async_client(timeout=10.0) as client:
resp = await client.get(
LUNARCRUSH_COIN_URL.format(symbol=symbol.upper()),
headers={"Authorization": f"Bearer {api_key}"},
@@ -220,7 +221,7 @@ async def fetch_social_sentiment(symbol: str = "BTC") -> dict[str, Any]:
Altrimenti deriva proxy da fear_greed per popolare twitter/reddit sentiment
(marcato come derived=True così l'agent sa che è proxy).
"""
async with httpx.AsyncClient(timeout=10) as client:
async with async_client(timeout=10.0) as client:
fng_resp = await client.get(ALTERNATIVE_ME_URL, params={"limit": 1})
fng_data = fng_resp.json()
fng_list = fng_data.get("data", [])
@@ -275,7 +276,7 @@ async def fetch_funding_rates(asset: str = "BTC") -> dict[str, Any]:
okx_inst = f"{asset}-USDT-SWAP"
rates: list[dict[str, Any]] = []
async with httpx.AsyncClient(timeout=10) as client:
async with async_client(timeout=10.0) as client:
# Binance
try:
resp = await client.get(BINANCE_FUNDING_URL, params={"symbol": usdt_symbol})
@@ -341,7 +342,7 @@ async def fetch_cross_exchange_funding(assets: list[str] | None = None) -> dict[
assets = [a.upper() for a in (assets or ["BTC", "ETH", "SOL"])]
snapshot: dict[str, dict[str, Any]] = {}
async with httpx.AsyncClient(timeout=10) as client:
async with async_client(timeout=10.0) as client:
for asset in assets:
rates: dict[str, float | None] = {
"binance": None,
@@ -530,7 +531,7 @@ async def fetch_cointegration_pairs(
interval = "1h"
limit = max(50, lookback_hours)
async with httpx.AsyncClient(timeout=15) as client:
async with async_client(timeout=15.0) as client:
for pair in pairs:
if len(pair) != 2:
continue
@@ -574,7 +575,7 @@ async def fetch_world_news() -> dict[str, Any]:
"""Fetch world financial news from free RSS feeds."""
articles: list[dict[str, Any]] = []
async with httpx.AsyncClient(timeout=10, follow_redirects=True) as client:
async with async_client(timeout=10.0, follow_redirects=True) as client:
for source_name, url in WORLD_NEWS_FEEDS:
try:
resp = await client.get(url)
@@ -614,7 +615,7 @@ async def fetch_oi_history(asset: str = "BTC", period: str = "5m", limit: int =
limit = max(1, min(int(limit), 500))
points: list[dict[str, Any]] = []
try:
async with httpx.AsyncClient(timeout=10) as client:
async with async_client(timeout=10.0) as client:
resp = await client.get(
BINANCE_OI_HIST_URL,
params={"symbol": symbol, "period": period, "limit": limit},