feat(V2): migrazione macro completa (read-only, env ignored)
- exchanges/macro: cot.py + cot_contracts.py + fetchers.py copiati 1:1 con rewrite import mcp_common -> cerbero_mcp.common, mcp_macro -> cerbero_mcp.exchanges.macro - nuovo MacroClient stateless wrapper: trasporta solo fred_api_key/finnhub_api_key, niente HTTP session (i fetchers usano async_client ad-hoc) - tools.py: 11 tool (get_treasury_yields, get_yield_curve_slope, get_breakeven_inflation, get_economic_indicators, get_macro_calendar, get_market_overview, get_equity_futures, get_asset_price, get_cot_tff, get_cot_disaggregated, get_cot_extreme_positioning) — niente write, niente leverage_cap - routers/macro.py: prefix /mcp-macro, 11 route POST /tools/* - builder branch macro: stesse credenziali per testnet/mainnet (env ignorato); registry istanzia 2 entry, costo trascurabile (wrapper stateless) - test migrati: test_cot.py + test_fetchers.py (test_server_acl.py skippato V1-only) - nuovo test test_build_client_macro_no_env_distinction in test_exchanges_builder.py Suite: 224 passed. Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
This commit is contained in:
@@ -44,4 +44,14 @@ async def build_client(
|
|||||||
secret_key=settings.alpaca.secret_key.get_secret_value(),
|
secret_key=settings.alpaca.secret_key.get_secret_value(),
|
||||||
paper=(env == "testnet"),
|
paper=(env == "testnet"),
|
||||||
)
|
)
|
||||||
|
if exchange == "macro":
|
||||||
|
# Read-only data provider — env ignored. Il registry
|
||||||
|
# istanzia comunque 2 entry (testnet/mainnet); costo trascurabile
|
||||||
|
# (wrapper stateless senza HTTP session).
|
||||||
|
from cerbero_mcp.exchanges.macro.client import MacroClient
|
||||||
|
|
||||||
|
return MacroClient(
|
||||||
|
fred_api_key=settings.macro.fred_api_key.get_secret_value(),
|
||||||
|
finnhub_api_key=settings.macro.finnhub_api_key.get_secret_value(),
|
||||||
|
)
|
||||||
raise ValueError(f"unsupported exchange: {exchange}")
|
raise ValueError(f"unsupported exchange: {exchange}")
|
||||||
|
|||||||
@@ -0,0 +1,20 @@
|
|||||||
|
"""MacroClient: contenitore credenziali per data provider macro (FRED, Finnhub).
|
||||||
|
|
||||||
|
Macro è un read-only data provider: nessun testnet/mainnet, nessuna sessione
|
||||||
|
HTTP persistente — i `fetchers.py` aprono client httpx ad-hoc tramite
|
||||||
|
`cerbero_mcp.common.http.async_client`. Questo wrapper esiste solo per uniformità
|
||||||
|
con il pattern degli altri exchange (DeribitClient/BybitClient/...) e per essere
|
||||||
|
istanziato dal `ClientRegistry`.
|
||||||
|
"""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
|
||||||
|
class MacroClient:
|
||||||
|
"""Wrapper credenziali FRED/Finnhub. Stateless, no HTTP session."""
|
||||||
|
|
||||||
|
def __init__(self, *, fred_api_key: str = "", finnhub_api_key: str = "") -> None:
|
||||||
|
self.fred_api_key = fred_api_key
|
||||||
|
self.finnhub_api_key = finnhub_api_key
|
||||||
|
|
||||||
|
async def aclose(self) -> None: # pragma: no cover - no-op, no resources
|
||||||
|
return None
|
||||||
@@ -0,0 +1,91 @@
|
|||||||
|
"""Pure-logic helpers per COT report parsing e analytics.
|
||||||
|
|
||||||
|
Niente HTTP qui — orchestrazione fetch sta in fetchers.py. Tutto testabile
|
||||||
|
in isolamento.
|
||||||
|
"""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import Literal
|
||||||
|
|
||||||
|
ExtremeSignal = Literal["extreme_short", "extreme_long", "neutral"]
|
||||||
|
|
||||||
|
|
||||||
|
def compute_percentile(value: float, history: list[float]) -> float | None:
|
||||||
|
"""Percentile di `value` rispetto ad `history` (0-100, inclusive).
|
||||||
|
|
||||||
|
Restituisce None se history vuoto. Clipped a [0, 100] se value fuori range.
|
||||||
|
"""
|
||||||
|
if not history:
|
||||||
|
return None
|
||||||
|
n = len(history)
|
||||||
|
below_or_eq = sum(1 for h in history if h <= value)
|
||||||
|
pct = 100.0 * below_or_eq / n
|
||||||
|
return max(0.0, min(100.0, pct))
|
||||||
|
|
||||||
|
|
||||||
|
def classify_extreme(percentile: float | None, threshold: float = 5.0) -> ExtremeSignal:
|
||||||
|
"""Classifica un percentile come estremo short/long o neutral.
|
||||||
|
|
||||||
|
threshold default 5 → flagga ≤ 5 come short, ≥ 100-5=95 come long.
|
||||||
|
"""
|
||||||
|
if percentile is None:
|
||||||
|
return "neutral"
|
||||||
|
if percentile <= threshold:
|
||||||
|
return "extreme_short"
|
||||||
|
if percentile >= 100.0 - threshold:
|
||||||
|
return "extreme_long"
|
||||||
|
return "neutral"
|
||||||
|
|
||||||
|
|
||||||
|
def _to_int(v) -> int:
|
||||||
|
try:
|
||||||
|
return int(float(v))
|
||||||
|
except (TypeError, ValueError):
|
||||||
|
return 0
|
||||||
|
|
||||||
|
|
||||||
|
def _date_only(s: str) -> str:
|
||||||
|
"""Estrae 'YYYY-MM-DD' da una data ISO con o senza timestamp."""
|
||||||
|
if not s:
|
||||||
|
return ""
|
||||||
|
return s.split("T", 1)[0]
|
||||||
|
|
||||||
|
|
||||||
|
def parse_tff_row(raw: dict) -> dict:
|
||||||
|
"""Mappa una row Socrata TFF al formato API output."""
|
||||||
|
dl = _to_int(raw.get("dealer_positions_long_all"))
|
||||||
|
ds = _to_int(raw.get("dealer_positions_short_all"))
|
||||||
|
al = _to_int(raw.get("asset_mgr_positions_long"))
|
||||||
|
as_ = _to_int(raw.get("asset_mgr_positions_short"))
|
||||||
|
ll = _to_int(raw.get("lev_money_positions_long"))
|
||||||
|
ls = _to_int(raw.get("lev_money_positions_short"))
|
||||||
|
ol = _to_int(raw.get("other_rept_positions_long"))
|
||||||
|
os_ = _to_int(raw.get("other_rept_positions_short"))
|
||||||
|
return {
|
||||||
|
"report_date": _date_only(raw.get("report_date_as_yyyy_mm_dd", "")),
|
||||||
|
"dealer_long": dl, "dealer_short": ds, "dealer_net": dl - ds,
|
||||||
|
"asset_mgr_long": al, "asset_mgr_short": as_, "asset_mgr_net": al - as_,
|
||||||
|
"lev_funds_long": ll, "lev_funds_short": ls, "lev_funds_net": ll - ls,
|
||||||
|
"other_long": ol, "other_short": os_, "other_net": ol - os_,
|
||||||
|
"open_interest": _to_int(raw.get("open_interest_all")),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def parse_disagg_row(raw: dict) -> dict:
|
||||||
|
"""Mappa una row Socrata Disaggregated F&O combined al formato API output."""
|
||||||
|
pl = _to_int(raw.get("prod_merc_positions_long_all"))
|
||||||
|
ps = _to_int(raw.get("prod_merc_positions_short_all"))
|
||||||
|
sl = _to_int(raw.get("swap_positions_long_all"))
|
||||||
|
ss = _to_int(raw.get("swap_positions_short_all"))
|
||||||
|
ml = _to_int(raw.get("m_money_positions_long_all"))
|
||||||
|
ms = _to_int(raw.get("m_money_positions_short_all"))
|
||||||
|
ol = _to_int(raw.get("other_rept_positions_long_all"))
|
||||||
|
os_ = _to_int(raw.get("other_rept_positions_short_all"))
|
||||||
|
return {
|
||||||
|
"report_date": _date_only(raw.get("report_date_as_yyyy_mm_dd", "")),
|
||||||
|
"producer_long": pl, "producer_short": ps, "producer_net": pl - ps,
|
||||||
|
"swap_long": sl, "swap_short": ss, "swap_net": sl - ss,
|
||||||
|
"managed_money_long": ml, "managed_money_short": ms, "managed_money_net": ml - ms,
|
||||||
|
"other_long": ol, "other_short": os_, "other_net": ol - os_,
|
||||||
|
"open_interest": _to_int(raw.get("open_interest_all")),
|
||||||
|
}
|
||||||
@@ -0,0 +1,36 @@
|
|||||||
|
"""Costanti CFTC: ticker → contract_market_code per TFF e Disaggregated.
|
||||||
|
|
||||||
|
I codici CFTC (`cftc_contract_market_code`) sono pubblici e stabili nel tempo.
|
||||||
|
Riferimento: https://www.cftc.gov/MarketReports/CommitmentsofTraders/
|
||||||
|
"""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
CFTC_BASE_URL = "https://publicreporting.cftc.gov/resource"
|
||||||
|
TFF_DATASET_ID = "gpe5-46if"
|
||||||
|
DISAGG_DATASET_ID = "72hh-3qpy"
|
||||||
|
|
||||||
|
# TFF: equity/financial. Mapping ticker → cftc_contract_market_code.
|
||||||
|
SYMBOL_TO_CFTC_CODE_TFF: dict[str, str] = {
|
||||||
|
"ES": "13874A", # E-mini S&P 500
|
||||||
|
"NQ": "209742", # E-mini Nasdaq-100
|
||||||
|
"RTY": "239742", # E-mini Russell 2000
|
||||||
|
"ZN": "043602", # 10-Year T-Note
|
||||||
|
"ZB": "020601", # 30-Year T-Bond
|
||||||
|
"6E": "099741", # Euro FX
|
||||||
|
"6J": "097741", # Japanese Yen
|
||||||
|
"DX": "098662", # US Dollar Index
|
||||||
|
}
|
||||||
|
|
||||||
|
# Disaggregated: commodities.
|
||||||
|
SYMBOL_TO_CFTC_CODE_DISAGG: dict[str, str] = {
|
||||||
|
"CL": "067651", # Crude Oil WTI
|
||||||
|
"GC": "088691", # Gold
|
||||||
|
"SI": "084691", # Silver
|
||||||
|
"HG": "085692", # Copper
|
||||||
|
"ZW": "001602", # Wheat
|
||||||
|
"ZC": "002602", # Corn
|
||||||
|
"ZS": "005602", # Soybeans
|
||||||
|
}
|
||||||
|
|
||||||
|
ALL_TFF_SYMBOLS: list[str] = list(SYMBOL_TO_CFTC_CODE_TFF.keys())
|
||||||
|
ALL_DISAGG_SYMBOLS: list[str] = list(SYMBOL_TO_CFTC_CODE_DISAGG.keys())
|
||||||
@@ -0,0 +1,771 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from datetime import UTC, datetime, timedelta
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
import httpx
|
||||||
|
from cerbero_mcp.common.http import async_client
|
||||||
|
|
||||||
|
from cerbero_mcp.exchanges.macro.cot import classify_extreme, compute_percentile, parse_disagg_row, parse_tff_row
|
||||||
|
from cerbero_mcp.exchanges.macro.cot_contracts import (
|
||||||
|
ALL_DISAGG_SYMBOLS,
|
||||||
|
ALL_TFF_SYMBOLS,
|
||||||
|
CFTC_BASE_URL,
|
||||||
|
DISAGG_DATASET_ID,
|
||||||
|
SYMBOL_TO_CFTC_CODE_DISAGG,
|
||||||
|
SYMBOL_TO_CFTC_CODE_TFF,
|
||||||
|
TFF_DATASET_ID,
|
||||||
|
)
|
||||||
|
|
||||||
|
FRED_BASE = "https://api.stlouisfed.org/fred/series/observations"
|
||||||
|
FINNHUB_CALENDAR = "https://finnhub.io/api/v1/calendar/economic"
|
||||||
|
COINGECKO_GLOBAL = "https://api.coingecko.com/api/v3/global"
|
||||||
|
COINGECKO_SIMPLE = "https://api.coingecko.com/api/v3/simple/price"
|
||||||
|
DERIBIT_DVOL = "https://www.deribit.com/api/v2/public/get_volatility_index_data"
|
||||||
|
YAHOO_CHART = "https://query1.finance.yahoo.com/v8/finance/chart/{symbol}"
|
||||||
|
|
||||||
|
ASSET_TICKER_MAP: dict[str, tuple[str, str]] = {
|
||||||
|
"WTI": ("CL=F", "WTI Crude Oil"),
|
||||||
|
"BRENT": ("BZ=F", "Brent Crude Oil"),
|
||||||
|
"GOLD": ("GC=F", "Gold Futures"),
|
||||||
|
"SILVER": ("SI=F", "Silver Futures"),
|
||||||
|
"COPPER": ("HG=F", "Copper Futures"),
|
||||||
|
"NATGAS": ("NG=F", "Natural Gas"),
|
||||||
|
"DXY": ("DX-Y.NYB", "US Dollar Index"),
|
||||||
|
"SPX": ("^GSPC", "S&P 500"),
|
||||||
|
"NDX": ("^NDX", "Nasdaq 100"),
|
||||||
|
"DJI": ("^DJI", "Dow Jones"),
|
||||||
|
"RUT": ("^RUT", "Russell 2000"),
|
||||||
|
"VIX": ("^VIX", "CBOE Volatility Index"),
|
||||||
|
"US5Y": ("^FVX", "US 5-Year Treasury"),
|
||||||
|
"US10Y": ("^TNX", "US 10-Year Treasury"),
|
||||||
|
"US30Y": ("^TYX", "US 30-Year Treasury"),
|
||||||
|
"US2Y": ("^UST2YR", "US 2-Year Treasury"),
|
||||||
|
"EURUSD": ("EURUSD=X", "EUR/USD"),
|
||||||
|
"USDJPY": ("JPY=X", "USD/JPY"),
|
||||||
|
"GBPUSD": ("GBPUSD=X", "GBP/USD"),
|
||||||
|
"BTCUSD": ("BTC-USD", "Bitcoin/USD"),
|
||||||
|
"ETHUSD": ("ETH-USD", "Ethereum/USD"),
|
||||||
|
"ES": ("ES=F", "E-mini S&P 500 Futures"),
|
||||||
|
"NQ": ("NQ=F", "E-mini Nasdaq 100 Futures"),
|
||||||
|
"YM": ("YM=F", "E-mini Dow Futures"),
|
||||||
|
"RTY": ("RTY=F", "E-mini Russell 2000 Futures"),
|
||||||
|
}
|
||||||
|
|
||||||
|
_ASSET_CACHE: dict[str, dict] = {}
|
||||||
|
_ASSET_CACHE_TTL = 60.0
|
||||||
|
|
||||||
|
|
||||||
|
async def _fetch_yahoo_meta(client: httpx.AsyncClient, symbol: str, range_: str = "10d") -> dict:
|
||||||
|
try:
|
||||||
|
resp = await client.get(
|
||||||
|
YAHOO_CHART.format(symbol=symbol),
|
||||||
|
params={"interval": "1d", "range": range_},
|
||||||
|
headers={"User-Agent": "Mozilla/5.0"},
|
||||||
|
)
|
||||||
|
if resp.status_code != 200:
|
||||||
|
return {}
|
||||||
|
result = (resp.json().get("chart") or {}).get("result") or []
|
||||||
|
if not result:
|
||||||
|
return {}
|
||||||
|
r0 = result[0]
|
||||||
|
meta = r0.get("meta") or {}
|
||||||
|
closes = ((r0.get("indicators") or {}).get("quote") or [{}])[0].get("close") or []
|
||||||
|
closes = [c for c in closes if c is not None]
|
||||||
|
return {"meta": meta, "closes": closes}
|
||||||
|
except Exception:
|
||||||
|
return {}
|
||||||
|
|
||||||
|
|
||||||
|
async def fetch_asset_price(ticker: str) -> dict[str, Any]:
|
||||||
|
import time
|
||||||
|
|
||||||
|
key = ticker.upper()
|
||||||
|
now = time.monotonic()
|
||||||
|
cached = _ASSET_CACHE.get(key)
|
||||||
|
if cached and (now - cached["ts"]) < _ASSET_CACHE_TTL:
|
||||||
|
return cached["data"]
|
||||||
|
|
||||||
|
mapping = ASSET_TICKER_MAP.get(key)
|
||||||
|
if not mapping:
|
||||||
|
return {"ticker": ticker, "error": f"unknown ticker {ticker}"}
|
||||||
|
symbol, name = mapping
|
||||||
|
|
||||||
|
async with async_client(timeout=10.0) as client:
|
||||||
|
info = await _fetch_yahoo_meta(client, symbol, "10d")
|
||||||
|
meta = info.get("meta") or {}
|
||||||
|
closes = info.get("closes") or []
|
||||||
|
price = meta.get("regularMarketPrice")
|
||||||
|
prev_close = meta.get("previousClose")
|
||||||
|
change_24h_pct = None
|
||||||
|
if price is not None and prev_close:
|
||||||
|
try:
|
||||||
|
change_24h_pct = round((float(price) - float(prev_close)) / float(prev_close) * 100, 3)
|
||||||
|
except Exception:
|
||||||
|
change_24h_pct = None
|
||||||
|
change_7d_pct = None
|
||||||
|
if len(closes) >= 6 and price is not None:
|
||||||
|
try:
|
||||||
|
change_7d_pct = round((float(price) - float(closes[-6])) / float(closes[-6]) * 100, 3)
|
||||||
|
except Exception:
|
||||||
|
change_7d_pct = None
|
||||||
|
|
||||||
|
out = {
|
||||||
|
"ticker": key,
|
||||||
|
"name": name,
|
||||||
|
"price": float(price) if price is not None else None,
|
||||||
|
"change_24h_pct": change_24h_pct,
|
||||||
|
"change_7d_pct": change_7d_pct,
|
||||||
|
"source": f"yfinance:{symbol}",
|
||||||
|
"data_timestamp": datetime.now(UTC).isoformat(),
|
||||||
|
}
|
||||||
|
_ASSET_CACHE[key] = {"data": out, "ts": now}
|
||||||
|
return out
|
||||||
|
|
||||||
|
|
||||||
|
_TREASURY_CACHE: dict[str, Any] = {"data": None, "ts": 0.0}
|
||||||
|
_TREASURY_TTL = 300.0
|
||||||
|
|
||||||
|
|
||||||
|
async def fetch_treasury_yields() -> dict[str, Any]:
|
||||||
|
import time
|
||||||
|
|
||||||
|
now = time.monotonic()
|
||||||
|
if _TREASURY_CACHE["data"] and (now - _TREASURY_CACHE["ts"]) < _TREASURY_TTL:
|
||||||
|
return _TREASURY_CACHE["data"]
|
||||||
|
|
||||||
|
symbols = [
|
||||||
|
("us2y", "^UST2YR"),
|
||||||
|
("us5y", "^FVX"),
|
||||||
|
("us10y", "^TNX"),
|
||||||
|
("us30y", "^TYX"),
|
||||||
|
]
|
||||||
|
yields: dict[str, float | None] = {}
|
||||||
|
async with async_client(timeout=10.0) as client:
|
||||||
|
for key, sym in symbols:
|
||||||
|
info = await _fetch_yahoo_meta(client, sym, "5d")
|
||||||
|
meta = info.get("meta") or {}
|
||||||
|
price = meta.get("regularMarketPrice")
|
||||||
|
yields[key] = float(price) if price is not None else None
|
||||||
|
|
||||||
|
spread = None
|
||||||
|
if yields.get("us10y") is not None and yields.get("us2y") is not None:
|
||||||
|
spread = round(yields["us10y"] - yields["us2y"], 3)
|
||||||
|
shape = "unknown"
|
||||||
|
if spread is not None:
|
||||||
|
if spread > 0.25:
|
||||||
|
shape = "normal"
|
||||||
|
elif spread < -0.1:
|
||||||
|
shape = "inverted"
|
||||||
|
else:
|
||||||
|
shape = "flat"
|
||||||
|
|
||||||
|
out = {
|
||||||
|
"yields": yields,
|
||||||
|
"spread_2y10y": spread,
|
||||||
|
"yield_curve_shape": shape,
|
||||||
|
"data_timestamp": datetime.now(UTC).isoformat(),
|
||||||
|
}
|
||||||
|
_TREASURY_CACHE["data"] = out
|
||||||
|
_TREASURY_CACHE["ts"] = now
|
||||||
|
return out
|
||||||
|
|
||||||
|
|
||||||
|
def yield_curve_metrics(yields: dict[str, float | None]) -> dict[str, Any]:
|
||||||
|
"""Slope + convexity da curva yields (us2y, us5y, us10y, us30y).
|
||||||
|
Convexity (butterfly): 2*us10y - us2y - us30y. >0 = curva concava.
|
||||||
|
"""
|
||||||
|
y2 = yields.get("us2y")
|
||||||
|
y5 = yields.get("us5y")
|
||||||
|
y10 = yields.get("us10y")
|
||||||
|
y30 = yields.get("us30y")
|
||||||
|
|
||||||
|
slope_2y10y = (y10 - y2) if (y2 is not None and y10 is not None) else None
|
||||||
|
slope_5y30y = (y30 - y5) if (y5 is not None and y30 is not None) else None
|
||||||
|
butterfly_2_10_30 = (2 * y10 - y2 - y30) if (y2 is not None and y10 is not None and y30 is not None) else None
|
||||||
|
|
||||||
|
regime = "unknown"
|
||||||
|
if slope_2y10y is not None:
|
||||||
|
if slope_2y10y >= 0.5:
|
||||||
|
regime = "steep"
|
||||||
|
elif slope_2y10y > 0.1:
|
||||||
|
regime = "normal"
|
||||||
|
elif slope_2y10y > -0.1:
|
||||||
|
regime = "flat"
|
||||||
|
else:
|
||||||
|
regime = "inverted"
|
||||||
|
|
||||||
|
return {
|
||||||
|
"slope_2y10y": round(slope_2y10y, 3) if slope_2y10y is not None else None,
|
||||||
|
"slope_5y30y": round(slope_5y30y, 3) if slope_5y30y is not None else None,
|
||||||
|
"butterfly_2_10_30": round(butterfly_2_10_30, 3) if butterfly_2_10_30 is not None else None,
|
||||||
|
"regime": regime,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
async def fetch_yield_curve_slope() -> dict[str, Any]:
|
||||||
|
"""Curve slope/convexity metrics su treasury yields correnti."""
|
||||||
|
base = await fetch_treasury_yields()
|
||||||
|
metrics = yield_curve_metrics(base.get("yields") or {})
|
||||||
|
return {
|
||||||
|
"yields": base.get("yields"),
|
||||||
|
**metrics,
|
||||||
|
"data_timestamp": datetime.now(UTC).isoformat(),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
async def fetch_breakeven_inflation(fred_api_key: str = "") -> dict[str, Any]:
|
||||||
|
"""Breakeven inflation rate via FRED:
|
||||||
|
- T10YIE (10Y breakeven, market expectation 10Y inflation)
|
||||||
|
- T5YIE (5Y breakeven)
|
||||||
|
- T5YIFR (5Y forward 5Y forward inflation expectation)
|
||||||
|
"""
|
||||||
|
if not fred_api_key:
|
||||||
|
return {"error": "No FRED API key configured", "breakevens": {}}
|
||||||
|
|
||||||
|
series_map = {
|
||||||
|
"be_5y": "T5YIE",
|
||||||
|
"be_10y": "T10YIE",
|
||||||
|
"be_5y5y_forward": "T5YIFR",
|
||||||
|
}
|
||||||
|
out: dict[str, float | None] = {}
|
||||||
|
async with async_client(timeout=10.0) as client:
|
||||||
|
for name, series_id in series_map.items():
|
||||||
|
resp = await client.get(
|
||||||
|
FRED_BASE,
|
||||||
|
params={
|
||||||
|
"series_id": series_id,
|
||||||
|
"api_key": fred_api_key,
|
||||||
|
"file_type": "json",
|
||||||
|
"sort_order": "desc",
|
||||||
|
"limit": 1,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
data = resp.json()
|
||||||
|
obs = data.get("observations", [])
|
||||||
|
try:
|
||||||
|
out[name] = float(obs[0]["value"]) if obs and obs[0]["value"] != "." else None
|
||||||
|
except (ValueError, IndexError, KeyError):
|
||||||
|
out[name] = None
|
||||||
|
|
||||||
|
interpretation = "unknown"
|
||||||
|
be10 = out.get("be_10y")
|
||||||
|
if be10 is not None:
|
||||||
|
if be10 > 3.0:
|
||||||
|
interpretation = "high_inflation_expected"
|
||||||
|
elif be10 < 1.5:
|
||||||
|
interpretation = "low_inflation_expected"
|
||||||
|
else:
|
||||||
|
interpretation = "anchored"
|
||||||
|
|
||||||
|
return {
|
||||||
|
"breakevens": out,
|
||||||
|
"interpretation": interpretation,
|
||||||
|
"data_timestamp": datetime.now(UTC).isoformat(),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
async def fetch_equity_futures() -> dict[str, Any]:
|
||||||
|
"""Fetch ES/NQ/YM/RTY futures con session detection."""
|
||||||
|
tickers = [("es", "ES=F"), ("nq", "NQ=F"), ("ym", "YM=F"), ("rty", "RTY=F")]
|
||||||
|
now = datetime.now(UTC)
|
||||||
|
weekday = now.weekday() # 0=Mon
|
||||||
|
hour_utc = now.hour
|
||||||
|
cash_open = (weekday < 5) and (13 <= hour_utc < 20)
|
||||||
|
if cash_open:
|
||||||
|
session = "regular"
|
||||||
|
elif weekday >= 5:
|
||||||
|
session = "weekend"
|
||||||
|
elif hour_utc < 13:
|
||||||
|
session = "pre-market"
|
||||||
|
else:
|
||||||
|
session = "after-hours"
|
||||||
|
|
||||||
|
out: dict[str, Any] = {}
|
||||||
|
async with async_client(timeout=10.0) as client:
|
||||||
|
for key, sym in tickers:
|
||||||
|
info = await _fetch_yahoo_meta(client, sym, "5d")
|
||||||
|
meta = info.get("meta") or {}
|
||||||
|
price = meta.get("regularMarketPrice")
|
||||||
|
prev = meta.get("previousClose") or meta.get("chartPreviousClose")
|
||||||
|
change_pct = None
|
||||||
|
if price is not None and prev:
|
||||||
|
try:
|
||||||
|
change_pct = round((float(price) - float(prev)) / float(prev) * 100, 3)
|
||||||
|
except Exception:
|
||||||
|
change_pct = None
|
||||||
|
out[key] = {
|
||||||
|
"price": float(price) if price is not None else None,
|
||||||
|
"change_pct": change_pct,
|
||||||
|
"session": session,
|
||||||
|
}
|
||||||
|
|
||||||
|
next_open = None
|
||||||
|
if weekday < 5 and hour_utc < 13:
|
||||||
|
next_open = now.replace(hour=13, minute=30, second=0, microsecond=0).isoformat()
|
||||||
|
else:
|
||||||
|
days_ahead = (7 - weekday) if weekday >= 5 else 1
|
||||||
|
nd = (now.replace(hour=13, minute=30, second=0, microsecond=0) + timedelta(days=days_ahead))
|
||||||
|
next_open = nd.isoformat()
|
||||||
|
|
||||||
|
return {
|
||||||
|
"futures": out,
|
||||||
|
"session_status": {
|
||||||
|
"cash_open": cash_open,
|
||||||
|
"session": session,
|
||||||
|
"next_open_utc": next_open,
|
||||||
|
},
|
||||||
|
"data_timestamp": datetime.now(UTC).isoformat(),
|
||||||
|
}
|
||||||
|
|
||||||
|
_MARKET_CACHE: dict[str, Any] = {"data": None, "ts": 0.0}
|
||||||
|
_MARKET_CACHE_TTL = 120.0
|
||||||
|
|
||||||
|
|
||||||
|
async def _fetch_yahoo_price(client: httpx.AsyncClient, symbol: str) -> float | None:
|
||||||
|
try:
|
||||||
|
resp = await client.get(
|
||||||
|
YAHOO_CHART.format(symbol=symbol),
|
||||||
|
params={"interval": "1d", "range": "5d"},
|
||||||
|
headers={"User-Agent": "Mozilla/5.0"},
|
||||||
|
)
|
||||||
|
if resp.status_code != 200:
|
||||||
|
return None
|
||||||
|
result = (resp.json().get("chart") or {}).get("result") or []
|
||||||
|
if not result:
|
||||||
|
return None
|
||||||
|
price = (result[0].get("meta") or {}).get("regularMarketPrice")
|
||||||
|
return float(price) if price is not None else None
|
||||||
|
except Exception:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
async def _fetch_dvol_latest(client: httpx.AsyncClient, currency: str) -> float | None:
|
||||||
|
now_ms = int(datetime.now(UTC).timestamp() * 1000)
|
||||||
|
start_ms = now_ms - 7 * 24 * 3600 * 1000
|
||||||
|
try:
|
||||||
|
resp = await client.get(
|
||||||
|
DERIBIT_DVOL,
|
||||||
|
params={
|
||||||
|
"currency": currency,
|
||||||
|
"start_timestamp": start_ms,
|
||||||
|
"end_timestamp": now_ms,
|
||||||
|
"resolution": "1D",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
rows = (resp.json().get("result") or {}).get("data") or []
|
||||||
|
if not rows:
|
||||||
|
return None
|
||||||
|
return float(rows[-1][4])
|
||||||
|
except Exception:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
async def fetch_economic_indicators(
|
||||||
|
fred_api_key: str = "",
|
||||||
|
indicators: list[str] | None = None,
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
series_map = {
|
||||||
|
"fed_rate": "FEDFUNDS",
|
||||||
|
"cpi": "CPIAUCSL",
|
||||||
|
"unemployment": "UNRATE",
|
||||||
|
"us10y_yield": "DGS10",
|
||||||
|
}
|
||||||
|
result: dict[str, Any] = {}
|
||||||
|
if not fred_api_key:
|
||||||
|
return {"indicators": result, "error": "No FRED API key configured"}
|
||||||
|
async with async_client(timeout=10.0) as client:
|
||||||
|
for name, series_id in series_map.items():
|
||||||
|
if indicators and name not in indicators:
|
||||||
|
continue
|
||||||
|
resp = await client.get(
|
||||||
|
FRED_BASE,
|
||||||
|
params={
|
||||||
|
"series_id": series_id,
|
||||||
|
"api_key": fred_api_key,
|
||||||
|
"file_type": "json",
|
||||||
|
"sort_order": "desc",
|
||||||
|
"limit": 1,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
data = resp.json()
|
||||||
|
obs = data.get("observations", [])
|
||||||
|
result[name] = float(obs[0]["value"]) if obs else None
|
||||||
|
result["updated_at"] = datetime.now(UTC).isoformat()
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
CURRENCY_TO_COUNTRY = {
|
||||||
|
"USD": ("US", "United States"),
|
||||||
|
"EUR": ("EU", "Euro Area"),
|
||||||
|
"JPY": ("JP", "Japan"),
|
||||||
|
"GBP": ("UK", "United Kingdom"),
|
||||||
|
"CAD": ("CA", "Canada"),
|
||||||
|
"AUD": ("AU", "Australia"),
|
||||||
|
"NZD": ("NZ", "New Zealand"),
|
||||||
|
"CHF": ("CH", "Switzerland"),
|
||||||
|
"CNY": ("CN", "China"),
|
||||||
|
}
|
||||||
|
|
||||||
|
_HIGH_IMPACT_EVENTS = (
|
||||||
|
"fomc", "fed", "cpi", "nfp", "non-farm", "nonfarm", "ppi",
|
||||||
|
"ecb", "boj", "boe", "gdp", "unemployment rate",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def _market_impact_historical(name: str) -> str:
|
||||||
|
n = (name or "").lower()
|
||||||
|
for kw in _HIGH_IMPACT_EVENTS:
|
||||||
|
if kw in n:
|
||||||
|
return "high_vol_spike"
|
||||||
|
return "normal"
|
||||||
|
|
||||||
|
|
||||||
|
async def fetch_macro_calendar(
|
||||||
|
finnhub_api_key: str = "",
|
||||||
|
days_ahead: int = 7,
|
||||||
|
country_filter: list[str] | None = None,
|
||||||
|
importance_min: str | None = None,
|
||||||
|
start: str | None = None,
|
||||||
|
end: str | None = None,
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
"""Fetch economic calendar con filtri country/importance/date range."""
|
||||||
|
events: list[dict[str, Any]] = []
|
||||||
|
|
||||||
|
importance_order = {"low": 0, "medium": 1, "high": 2}
|
||||||
|
min_level = importance_order.get(
|
||||||
|
(importance_min or "").lower(), 0
|
||||||
|
) if importance_min else 0
|
||||||
|
|
||||||
|
start_dt: datetime | None = None
|
||||||
|
end_dt: datetime | None = None
|
||||||
|
if start:
|
||||||
|
try:
|
||||||
|
start_dt = datetime.fromisoformat(start).replace(tzinfo=UTC)
|
||||||
|
except ValueError:
|
||||||
|
start_dt = datetime.strptime(start, "%Y-%m-%d").replace(tzinfo=UTC)
|
||||||
|
if end:
|
||||||
|
try:
|
||||||
|
end_dt = datetime.fromisoformat(end).replace(tzinfo=UTC)
|
||||||
|
except ValueError:
|
||||||
|
end_dt = datetime.strptime(end, "%Y-%m-%d").replace(tzinfo=UTC)
|
||||||
|
|
||||||
|
country_filter_set = (
|
||||||
|
{c.upper() for c in country_filter} if country_filter else None
|
||||||
|
)
|
||||||
|
|
||||||
|
# Try Forex Factory free feed first
|
||||||
|
try:
|
||||||
|
async with async_client(timeout=10.0) as client:
|
||||||
|
resp = await client.get("https://nfs.faireconomy.media/ff_calendar_thisweek.json")
|
||||||
|
if resp.status_code == 200:
|
||||||
|
raw = resp.json()
|
||||||
|
now = datetime.now(UTC)
|
||||||
|
for e in raw:
|
||||||
|
date_str = e.get("date", "")
|
||||||
|
event_dt: datetime | None = None
|
||||||
|
try:
|
||||||
|
event_dt = datetime.fromisoformat(date_str.replace("Z", "+00:00"))
|
||||||
|
if event_dt < now:
|
||||||
|
continue
|
||||||
|
except (ValueError, TypeError):
|
||||||
|
pass
|
||||||
|
|
||||||
|
currency = (e.get("country", "") or "").upper()
|
||||||
|
country_code, country_name = CURRENCY_TO_COUNTRY.get(
|
||||||
|
currency, (currency or "", e.get("country", "") or "")
|
||||||
|
)
|
||||||
|
|
||||||
|
if country_filter_set and country_code not in country_filter_set:
|
||||||
|
continue
|
||||||
|
|
||||||
|
impact = (e.get("impact", "") or "").lower()
|
||||||
|
importance = (
|
||||||
|
"high" if impact == "high" else "medium" if impact == "medium" else "low"
|
||||||
|
)
|
||||||
|
if importance_order[importance] < min_level:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if start_dt and event_dt and event_dt < start_dt:
|
||||||
|
continue
|
||||||
|
if end_dt and event_dt and event_dt > end_dt:
|
||||||
|
continue
|
||||||
|
|
||||||
|
name = e.get("title", "")
|
||||||
|
events.append(
|
||||||
|
{
|
||||||
|
"date": date_str,
|
||||||
|
"datetime_utc": event_dt.isoformat() if event_dt else date_str,
|
||||||
|
"name": name,
|
||||||
|
"event": name,
|
||||||
|
"country": country_name,
|
||||||
|
"country_code": country_code,
|
||||||
|
"importance": importance,
|
||||||
|
"forecast": e.get("forecast", ""),
|
||||||
|
"previous": e.get("previous", ""),
|
||||||
|
"actual": e.get("actual"),
|
||||||
|
"market_impact_historical": _market_impact_historical(name),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Fallback to Finnhub if we have a key and no events
|
||||||
|
if not events and finnhub_api_key:
|
||||||
|
try:
|
||||||
|
now = datetime.now(UTC)
|
||||||
|
end_default = now + timedelta(days=days_ahead)
|
||||||
|
async with async_client(timeout=10.0) as client:
|
||||||
|
resp = await client.get(
|
||||||
|
FINNHUB_CALENDAR,
|
||||||
|
params={
|
||||||
|
"from": (start_dt or now).strftime("%Y-%m-%d"),
|
||||||
|
"to": (end_dt or end_default).strftime("%Y-%m-%d"),
|
||||||
|
"token": finnhub_api_key,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
data = resp.json()
|
||||||
|
if isinstance(data, dict) and "error" in data:
|
||||||
|
return {"events": [], "error": data["error"]}
|
||||||
|
raw = data if isinstance(data, list) else data.get("economicCalendar", [])
|
||||||
|
for e in raw:
|
||||||
|
importance_raw = (
|
||||||
|
e.get("importance")
|
||||||
|
or e.get("impact")
|
||||||
|
or "medium"
|
||||||
|
)
|
||||||
|
if isinstance(importance_raw, int):
|
||||||
|
importance = (
|
||||||
|
"high" if importance_raw >= 3 else
|
||||||
|
"medium" if importance_raw >= 2 else
|
||||||
|
"low"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
importance = str(importance_raw).lower()
|
||||||
|
if importance not in ("low", "medium", "high"):
|
||||||
|
importance = "medium"
|
||||||
|
if importance_order[importance] < min_level:
|
||||||
|
continue
|
||||||
|
country_code = (e.get("country", "") or "").upper()
|
||||||
|
country_name = CURRENCY_TO_COUNTRY.get(
|
||||||
|
country_code, (country_code, country_code)
|
||||||
|
)[1]
|
||||||
|
if country_filter_set and country_code not in country_filter_set:
|
||||||
|
continue
|
||||||
|
name = e.get("event", "")
|
||||||
|
date_str = e.get("date", e.get("time", ""))
|
||||||
|
events.append({
|
||||||
|
"date": date_str,
|
||||||
|
"datetime_utc": date_str,
|
||||||
|
"name": name,
|
||||||
|
"event": name,
|
||||||
|
"country": country_name,
|
||||||
|
"country_code": country_code,
|
||||||
|
"importance": importance,
|
||||||
|
"forecast": e.get("forecast", ""),
|
||||||
|
"previous": e.get("previous", e.get("prev", "")),
|
||||||
|
"actual": e.get("actual"),
|
||||||
|
"market_impact_historical": _market_impact_historical(name),
|
||||||
|
})
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
if not events:
|
||||||
|
return {"events": [], "note": "No calendar source available"}
|
||||||
|
|
||||||
|
return {"events": events}
|
||||||
|
|
||||||
|
|
||||||
|
async def fetch_market_overview() -> dict[str, Any]:
|
||||||
|
import time
|
||||||
|
|
||||||
|
now = time.monotonic()
|
||||||
|
if _MARKET_CACHE["data"] is not None and (now - _MARKET_CACHE["ts"]) < _MARKET_CACHE_TTL:
|
||||||
|
return _MARKET_CACHE["data"]
|
||||||
|
|
||||||
|
async with async_client(timeout=10.0) as client:
|
||||||
|
global_data: dict[str, Any] = {}
|
||||||
|
prices: dict[str, Any] = {}
|
||||||
|
try:
|
||||||
|
global_resp = await client.get(COINGECKO_GLOBAL)
|
||||||
|
global_data = global_resp.json().get("data", {}) or {}
|
||||||
|
except Exception:
|
||||||
|
global_data = {}
|
||||||
|
try:
|
||||||
|
price_resp = await client.get(
|
||||||
|
COINGECKO_SIMPLE,
|
||||||
|
params={"ids": "bitcoin,ethereum", "vs_currencies": "usd"},
|
||||||
|
)
|
||||||
|
prices = price_resp.json() or {}
|
||||||
|
except Exception:
|
||||||
|
prices = {}
|
||||||
|
dvol_btc = await _fetch_dvol_latest(client, "BTC")
|
||||||
|
dvol_eth = await _fetch_dvol_latest(client, "ETH")
|
||||||
|
sp500 = await _fetch_yahoo_price(client, "^GSPC")
|
||||||
|
gold = await _fetch_yahoo_price(client, "GC=F")
|
||||||
|
vix = await _fetch_yahoo_price(client, "^VIX")
|
||||||
|
|
||||||
|
out = {
|
||||||
|
"btc_dominance": global_data.get("market_cap_percentage", {}).get("btc"),
|
||||||
|
"total_market_cap": global_data.get("total_market_cap", {}).get("usd"),
|
||||||
|
"btc_price": prices.get("bitcoin", {}).get("usd"),
|
||||||
|
"eth_price": prices.get("ethereum", {}).get("usd"),
|
||||||
|
"sp500": sp500,
|
||||||
|
"gold": gold,
|
||||||
|
"vix": vix,
|
||||||
|
"dvol_btc": dvol_btc,
|
||||||
|
"dvol_eth": dvol_eth,
|
||||||
|
"data_timestamp": datetime.now(UTC).isoformat(),
|
||||||
|
}
|
||||||
|
_MARKET_CACHE["data"] = out
|
||||||
|
_MARKET_CACHE["ts"] = now
|
||||||
|
return out
|
||||||
|
|
||||||
|
|
||||||
|
_COT_TTL = 3600.0 # 1h
|
||||||
|
_COT_CACHE: dict[tuple[str, str, int], dict[str, Any]] = {}
|
||||||
|
_COT_CACHE_TS: dict[tuple[str, str, int], float] = {}
|
||||||
|
|
||||||
|
|
||||||
|
async def fetch_cot_tff(symbol: str, lookback_weeks: int = 52) -> dict[str, Any]:
|
||||||
|
"""Fetch COT TFF report per simbolo equity/financial. Returns ASC by date."""
|
||||||
|
import time
|
||||||
|
|
||||||
|
symbol = symbol.upper()
|
||||||
|
if symbol not in SYMBOL_TO_CFTC_CODE_TFF:
|
||||||
|
return {"error": "unknown_symbol", "available": ALL_TFF_SYMBOLS}
|
||||||
|
|
||||||
|
key = (symbol, "tff", lookback_weeks)
|
||||||
|
now = time.monotonic()
|
||||||
|
if key in _COT_CACHE and (now - _COT_CACHE_TS[key]) < _COT_TTL:
|
||||||
|
return _COT_CACHE[key]
|
||||||
|
|
||||||
|
code = SYMBOL_TO_CFTC_CODE_TFF[symbol]
|
||||||
|
url = f"{CFTC_BASE_URL}/{TFF_DATASET_ID}.json"
|
||||||
|
async with async_client(timeout=10.0) as client:
|
||||||
|
resp = await client.get(
|
||||||
|
url,
|
||||||
|
params={
|
||||||
|
"cftc_contract_market_code": code,
|
||||||
|
"$order": "report_date_as_yyyy_mm_dd DESC",
|
||||||
|
"$limit": str(lookback_weeks),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
if resp.status_code != 200:
|
||||||
|
return {"symbol": symbol, "report_type": "tff", "rows": [], "error": "cftc_unavailable"}
|
||||||
|
raw_rows = resp.json() or []
|
||||||
|
parsed = [parse_tff_row(r) for r in raw_rows]
|
||||||
|
parsed.sort(key=lambda r: r["report_date"]) # ASC by date
|
||||||
|
out = {
|
||||||
|
"symbol": symbol,
|
||||||
|
"report_type": "tff",
|
||||||
|
"rows": parsed,
|
||||||
|
"data_timestamp": datetime.now(UTC).isoformat(),
|
||||||
|
}
|
||||||
|
_COT_CACHE[key] = out
|
||||||
|
_COT_CACHE_TS[key] = now
|
||||||
|
return out
|
||||||
|
|
||||||
|
|
||||||
|
async def fetch_cot_disaggregated(symbol: str, lookback_weeks: int = 52) -> dict[str, Any]:
|
||||||
|
"""Fetch COT Disaggregated report per simbolo commodity. Returns ASC by date."""
|
||||||
|
import time
|
||||||
|
|
||||||
|
symbol = symbol.upper()
|
||||||
|
if symbol not in SYMBOL_TO_CFTC_CODE_DISAGG:
|
||||||
|
return {"error": "unknown_symbol", "available": ALL_DISAGG_SYMBOLS}
|
||||||
|
|
||||||
|
key = (symbol, "disaggregated", lookback_weeks)
|
||||||
|
now = time.monotonic()
|
||||||
|
if key in _COT_CACHE and (now - _COT_CACHE_TS[key]) < _COT_TTL:
|
||||||
|
return _COT_CACHE[key]
|
||||||
|
|
||||||
|
code = SYMBOL_TO_CFTC_CODE_DISAGG[symbol]
|
||||||
|
url = f"{CFTC_BASE_URL}/{DISAGG_DATASET_ID}.json"
|
||||||
|
async with async_client(timeout=10.0) as client:
|
||||||
|
resp = await client.get(
|
||||||
|
url,
|
||||||
|
params={
|
||||||
|
"cftc_contract_market_code": code,
|
||||||
|
"$order": "report_date_as_yyyy_mm_dd DESC",
|
||||||
|
"$limit": str(lookback_weeks),
|
||||||
|
},
|
||||||
|
)
|
||||||
|
if resp.status_code != 200:
|
||||||
|
return {"symbol": symbol, "report_type": "disaggregated", "rows": [], "error": "cftc_unavailable"}
|
||||||
|
raw_rows = resp.json() or []
|
||||||
|
parsed = [parse_disagg_row(r) for r in raw_rows]
|
||||||
|
parsed.sort(key=lambda r: r["report_date"])
|
||||||
|
out = {
|
||||||
|
"symbol": symbol,
|
||||||
|
"report_type": "disaggregated",
|
||||||
|
"rows": parsed,
|
||||||
|
"data_timestamp": datetime.now(UTC).isoformat(),
|
||||||
|
}
|
||||||
|
_COT_CACHE[key] = out
|
||||||
|
_COT_CACHE_TS[key] = now
|
||||||
|
return out
|
||||||
|
|
||||||
|
|
||||||
|
async def fetch_cot_extreme_positioning(lookback_weeks: int = 156) -> dict[str, Any]:
|
||||||
|
"""Scanner posizionamento estremo (percentile <=5 o >=95) sui simboli watchlist.
|
||||||
|
|
||||||
|
TFF -> key_role = lev_funds (lev_funds_net).
|
||||||
|
Disaggregated -> key_role = managed_money (managed_money_net).
|
||||||
|
"""
|
||||||
|
import asyncio
|
||||||
|
|
||||||
|
tff_tasks = [fetch_cot_tff(s, lookback_weeks) for s in ALL_TFF_SYMBOLS]
|
||||||
|
disagg_tasks = [fetch_cot_disaggregated(s, lookback_weeks) for s in ALL_DISAGG_SYMBOLS]
|
||||||
|
tff_results, disagg_results = await asyncio.gather(
|
||||||
|
asyncio.gather(*tff_tasks, return_exceptions=True),
|
||||||
|
asyncio.gather(*disagg_tasks, return_exceptions=True),
|
||||||
|
)
|
||||||
|
|
||||||
|
extremes: list[dict[str, Any]] = []
|
||||||
|
|
||||||
|
for res in tff_results:
|
||||||
|
if isinstance(res, BaseException) or not isinstance(res, dict):
|
||||||
|
continue
|
||||||
|
rows = res.get("rows") or []
|
||||||
|
if len(rows) < 4:
|
||||||
|
continue
|
||||||
|
series = [r["lev_funds_net"] for r in rows]
|
||||||
|
current = series[-1]
|
||||||
|
history = series[:-1]
|
||||||
|
pct = compute_percentile(current, history)
|
||||||
|
extremes.append({
|
||||||
|
"symbol": res["symbol"],
|
||||||
|
"report_type": "tff",
|
||||||
|
"key_role": "lev_funds",
|
||||||
|
"current_net": current,
|
||||||
|
"percentile": pct,
|
||||||
|
"signal": classify_extreme(pct),
|
||||||
|
"report_date": rows[-1]["report_date"],
|
||||||
|
})
|
||||||
|
|
||||||
|
for res in disagg_results:
|
||||||
|
if isinstance(res, BaseException) or not isinstance(res, dict):
|
||||||
|
continue
|
||||||
|
rows = res.get("rows") or []
|
||||||
|
if len(rows) < 4:
|
||||||
|
continue
|
||||||
|
series = [r["managed_money_net"] for r in rows]
|
||||||
|
current = series[-1]
|
||||||
|
history = series[:-1]
|
||||||
|
pct = compute_percentile(current, history)
|
||||||
|
extremes.append({
|
||||||
|
"symbol": res["symbol"],
|
||||||
|
"report_type": "disaggregated",
|
||||||
|
"key_role": "managed_money",
|
||||||
|
"current_net": current,
|
||||||
|
"percentile": pct,
|
||||||
|
"signal": classify_extreme(pct),
|
||||||
|
"report_date": rows[-1]["report_date"],
|
||||||
|
})
|
||||||
|
|
||||||
|
return {
|
||||||
|
"lookback_weeks": lookback_weeks,
|
||||||
|
"extremes": extremes,
|
||||||
|
"data_timestamp": datetime.now(UTC).isoformat(),
|
||||||
|
}
|
||||||
@@ -0,0 +1,154 @@
|
|||||||
|
"""Tool macro V2: pydantic schemas + async functions.
|
||||||
|
|
||||||
|
Ogni funzione prende (client: MacroClient, params: <Req>) e ritorna un dict.
|
||||||
|
Pure logica, no FastAPI dependency, no ACL. Macro è read-only data provider:
|
||||||
|
nessun write tool, nessun leverage_cap.
|
||||||
|
"""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
from pydantic import BaseModel, Field
|
||||||
|
|
||||||
|
from cerbero_mcp.exchanges.macro.client import MacroClient
|
||||||
|
from cerbero_mcp.exchanges.macro.fetchers import (
|
||||||
|
fetch_asset_price,
|
||||||
|
fetch_breakeven_inflation,
|
||||||
|
fetch_cot_disaggregated,
|
||||||
|
fetch_cot_extreme_positioning,
|
||||||
|
fetch_cot_tff,
|
||||||
|
fetch_economic_indicators,
|
||||||
|
fetch_equity_futures,
|
||||||
|
fetch_macro_calendar,
|
||||||
|
fetch_market_overview,
|
||||||
|
fetch_treasury_yields,
|
||||||
|
fetch_yield_curve_slope,
|
||||||
|
)
|
||||||
|
|
||||||
|
# === Schemas ===
|
||||||
|
|
||||||
|
|
||||||
|
class GetEconomicIndicatorsReq(BaseModel):
|
||||||
|
indicators: list[str] | None = None
|
||||||
|
|
||||||
|
|
||||||
|
class GetMacroCalendarReq(BaseModel):
|
||||||
|
days: int = 7
|
||||||
|
country_filter: list[str] | None = None
|
||||||
|
importance_min: str | None = None
|
||||||
|
start: str | None = None
|
||||||
|
end: str | None = None
|
||||||
|
|
||||||
|
|
||||||
|
class GetMarketOverviewReq(BaseModel):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class GetAssetPriceReq(BaseModel):
|
||||||
|
ticker: str
|
||||||
|
|
||||||
|
|
||||||
|
class GetTreasuryYieldsReq(BaseModel):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class GetEquityFuturesReq(BaseModel):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class GetYieldCurveSlopeReq(BaseModel):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class GetBreakevenInflationReq(BaseModel):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class GetCotTffReq(BaseModel):
|
||||||
|
symbol: str
|
||||||
|
lookback_weeks: int = Field(default=52, ge=4, le=520)
|
||||||
|
|
||||||
|
|
||||||
|
class GetCotDisaggregatedReq(BaseModel):
|
||||||
|
symbol: str
|
||||||
|
lookback_weeks: int = Field(default=52, ge=4, le=520)
|
||||||
|
|
||||||
|
|
||||||
|
class GetCotExtremeReq(BaseModel):
|
||||||
|
lookback_weeks: int = Field(default=156, ge=4, le=520)
|
||||||
|
|
||||||
|
|
||||||
|
# === Tool functions ===
|
||||||
|
|
||||||
|
|
||||||
|
async def get_economic_indicators(
|
||||||
|
client: MacroClient, params: GetEconomicIndicatorsReq
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
return await fetch_economic_indicators(
|
||||||
|
fred_api_key=client.fred_api_key, indicators=params.indicators
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def get_macro_calendar(
|
||||||
|
client: MacroClient, params: GetMacroCalendarReq
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
return await fetch_macro_calendar(
|
||||||
|
finnhub_api_key=client.finnhub_api_key,
|
||||||
|
days_ahead=params.days,
|
||||||
|
country_filter=params.country_filter,
|
||||||
|
importance_min=params.importance_min,
|
||||||
|
start=params.start,
|
||||||
|
end=params.end,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def get_market_overview(
|
||||||
|
client: MacroClient, params: GetMarketOverviewReq
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
return await fetch_market_overview()
|
||||||
|
|
||||||
|
|
||||||
|
async def get_asset_price(
|
||||||
|
client: MacroClient, params: GetAssetPriceReq
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
return await fetch_asset_price(params.ticker)
|
||||||
|
|
||||||
|
|
||||||
|
async def get_treasury_yields(
|
||||||
|
client: MacroClient, params: GetTreasuryYieldsReq
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
return await fetch_treasury_yields()
|
||||||
|
|
||||||
|
|
||||||
|
async def get_equity_futures(
|
||||||
|
client: MacroClient, params: GetEquityFuturesReq
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
return await fetch_equity_futures()
|
||||||
|
|
||||||
|
|
||||||
|
async def get_yield_curve_slope(
|
||||||
|
client: MacroClient, params: GetYieldCurveSlopeReq
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
return await fetch_yield_curve_slope()
|
||||||
|
|
||||||
|
|
||||||
|
async def get_breakeven_inflation(
|
||||||
|
client: MacroClient, params: GetBreakevenInflationReq
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
return await fetch_breakeven_inflation(fred_api_key=client.fred_api_key)
|
||||||
|
|
||||||
|
|
||||||
|
async def get_cot_tff(client: MacroClient, params: GetCotTffReq) -> dict[str, Any]:
|
||||||
|
return await fetch_cot_tff(params.symbol, params.lookback_weeks)
|
||||||
|
|
||||||
|
|
||||||
|
async def get_cot_disaggregated(
|
||||||
|
client: MacroClient, params: GetCotDisaggregatedReq
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
return await fetch_cot_disaggregated(params.symbol, params.lookback_weeks)
|
||||||
|
|
||||||
|
|
||||||
|
async def get_cot_extreme_positioning(
|
||||||
|
client: MacroClient, params: GetCotExtremeReq
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
return await fetch_cot_extreme_positioning(params.lookback_weeks)
|
||||||
@@ -0,0 +1,111 @@
|
|||||||
|
"""Router /mcp-macro/* — read-only data provider.
|
||||||
|
|
||||||
|
Macro non distingue testnet/mainnet (FRED/Finnhub sono endpoint pubblici unici),
|
||||||
|
ma manteniamo la signature `Environment` per uniformità con gli altri router.
|
||||||
|
Tutti i tool sono READ — niente write, niente leverage_cap.
|
||||||
|
"""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import Literal
|
||||||
|
|
||||||
|
from fastapi import APIRouter, Depends, Request
|
||||||
|
|
||||||
|
from cerbero_mcp.client_registry import ClientRegistry
|
||||||
|
from cerbero_mcp.exchanges.macro import tools as t
|
||||||
|
from cerbero_mcp.exchanges.macro.client import MacroClient
|
||||||
|
|
||||||
|
Environment = Literal["testnet", "mainnet"]
|
||||||
|
|
||||||
|
|
||||||
|
def get_environment(request: Request) -> Environment:
|
||||||
|
return request.state.environment
|
||||||
|
|
||||||
|
|
||||||
|
async def get_macro_client(
|
||||||
|
request: Request, env: Environment = Depends(get_environment)
|
||||||
|
) -> MacroClient:
|
||||||
|
registry: ClientRegistry = request.app.state.registry
|
||||||
|
return await registry.get("macro", env)
|
||||||
|
|
||||||
|
|
||||||
|
def make_router() -> APIRouter:
|
||||||
|
r = APIRouter(prefix="/mcp-macro", tags=["macro"])
|
||||||
|
|
||||||
|
@r.post("/tools/get_economic_indicators")
|
||||||
|
async def _get_economic_indicators(
|
||||||
|
params: t.GetEconomicIndicatorsReq,
|
||||||
|
client: MacroClient = Depends(get_macro_client),
|
||||||
|
):
|
||||||
|
return await t.get_economic_indicators(client, params)
|
||||||
|
|
||||||
|
@r.post("/tools/get_macro_calendar")
|
||||||
|
async def _get_macro_calendar(
|
||||||
|
params: t.GetMacroCalendarReq,
|
||||||
|
client: MacroClient = Depends(get_macro_client),
|
||||||
|
):
|
||||||
|
return await t.get_macro_calendar(client, params)
|
||||||
|
|
||||||
|
@r.post("/tools/get_market_overview")
|
||||||
|
async def _get_market_overview(
|
||||||
|
params: t.GetMarketOverviewReq,
|
||||||
|
client: MacroClient = Depends(get_macro_client),
|
||||||
|
):
|
||||||
|
return await t.get_market_overview(client, params)
|
||||||
|
|
||||||
|
@r.post("/tools/get_asset_price")
|
||||||
|
async def _get_asset_price(
|
||||||
|
params: t.GetAssetPriceReq,
|
||||||
|
client: MacroClient = Depends(get_macro_client),
|
||||||
|
):
|
||||||
|
return await t.get_asset_price(client, params)
|
||||||
|
|
||||||
|
@r.post("/tools/get_treasury_yields")
|
||||||
|
async def _get_treasury_yields(
|
||||||
|
params: t.GetTreasuryYieldsReq,
|
||||||
|
client: MacroClient = Depends(get_macro_client),
|
||||||
|
):
|
||||||
|
return await t.get_treasury_yields(client, params)
|
||||||
|
|
||||||
|
@r.post("/tools/get_equity_futures")
|
||||||
|
async def _get_equity_futures(
|
||||||
|
params: t.GetEquityFuturesReq,
|
||||||
|
client: MacroClient = Depends(get_macro_client),
|
||||||
|
):
|
||||||
|
return await t.get_equity_futures(client, params)
|
||||||
|
|
||||||
|
@r.post("/tools/get_yield_curve_slope")
|
||||||
|
async def _get_yield_curve_slope(
|
||||||
|
params: t.GetYieldCurveSlopeReq,
|
||||||
|
client: MacroClient = Depends(get_macro_client),
|
||||||
|
):
|
||||||
|
return await t.get_yield_curve_slope(client, params)
|
||||||
|
|
||||||
|
@r.post("/tools/get_breakeven_inflation")
|
||||||
|
async def _get_breakeven_inflation(
|
||||||
|
params: t.GetBreakevenInflationReq,
|
||||||
|
client: MacroClient = Depends(get_macro_client),
|
||||||
|
):
|
||||||
|
return await t.get_breakeven_inflation(client, params)
|
||||||
|
|
||||||
|
@r.post("/tools/get_cot_tff")
|
||||||
|
async def _get_cot_tff(
|
||||||
|
params: t.GetCotTffReq,
|
||||||
|
client: MacroClient = Depends(get_macro_client),
|
||||||
|
):
|
||||||
|
return await t.get_cot_tff(client, params)
|
||||||
|
|
||||||
|
@r.post("/tools/get_cot_disaggregated")
|
||||||
|
async def _get_cot_disaggregated(
|
||||||
|
params: t.GetCotDisaggregatedReq,
|
||||||
|
client: MacroClient = Depends(get_macro_client),
|
||||||
|
):
|
||||||
|
return await t.get_cot_disaggregated(client, params)
|
||||||
|
|
||||||
|
@r.post("/tools/get_cot_extreme_positioning")
|
||||||
|
async def _get_cot_extreme_positioning(
|
||||||
|
params: t.GetCotExtremeReq,
|
||||||
|
client: MacroClient = Depends(get_macro_client),
|
||||||
|
):
|
||||||
|
return await t.get_cot_extreme_positioning(client, params)
|
||||||
|
|
||||||
|
return r
|
||||||
@@ -0,0 +1,117 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from cerbero_mcp.exchanges.macro.cot import (
|
||||||
|
classify_extreme,
|
||||||
|
compute_percentile,
|
||||||
|
parse_disagg_row,
|
||||||
|
parse_tff_row,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_compute_percentile_basic():
|
||||||
|
history = [10, 20, 30, 40, 50, 60, 70, 80, 90, 100]
|
||||||
|
assert compute_percentile(50, history) == 50.0
|
||||||
|
assert compute_percentile(10, history) == 10.0
|
||||||
|
assert compute_percentile(100, history) == 100.0
|
||||||
|
|
||||||
|
|
||||||
|
def test_compute_percentile_value_below_min():
|
||||||
|
history = [10, 20, 30]
|
||||||
|
assert compute_percentile(5, history) == 0.0
|
||||||
|
|
||||||
|
|
||||||
|
def test_compute_percentile_value_above_max():
|
||||||
|
history = [10, 20, 30]
|
||||||
|
assert compute_percentile(40, history) == 100.0
|
||||||
|
|
||||||
|
|
||||||
|
def test_compute_percentile_empty_history():
|
||||||
|
assert compute_percentile(50, []) is None
|
||||||
|
|
||||||
|
|
||||||
|
def test_classify_extreme_below_threshold():
|
||||||
|
assert classify_extreme(3.0) == "extreme_short"
|
||||||
|
assert classify_extreme(5.0) == "extreme_short" # boundary inclusive
|
||||||
|
|
||||||
|
|
||||||
|
def test_classify_extreme_above_threshold():
|
||||||
|
assert classify_extreme(96.0) == "extreme_long"
|
||||||
|
assert classify_extreme(95.0) == "extreme_long" # boundary inclusive
|
||||||
|
|
||||||
|
|
||||||
|
def test_classify_extreme_neutral():
|
||||||
|
assert classify_extreme(50.0) == "neutral"
|
||||||
|
assert classify_extreme(94.99) == "neutral"
|
||||||
|
assert classify_extreme(5.01) == "neutral"
|
||||||
|
|
||||||
|
|
||||||
|
def test_classify_extreme_none_input():
|
||||||
|
assert classify_extreme(None) == "neutral"
|
||||||
|
|
||||||
|
|
||||||
|
# Payload Socrata reale (subset campi rilevanti, valori arbitrari per test)
|
||||||
|
TFF_SOCRATA_ROW = {
|
||||||
|
"report_date_as_yyyy_mm_dd": "2026-04-22T00:00:00.000",
|
||||||
|
"dealer_positions_long_all": "12345",
|
||||||
|
"dealer_positions_short_all": "23456",
|
||||||
|
"asset_mgr_positions_long": "654321",
|
||||||
|
"asset_mgr_positions_short": "200000",
|
||||||
|
"lev_money_positions_long": "100000",
|
||||||
|
"lev_money_positions_short": "350000",
|
||||||
|
"other_rept_positions_long": "50000",
|
||||||
|
"other_rept_positions_short": "50000",
|
||||||
|
"open_interest_all": "2500000",
|
||||||
|
}
|
||||||
|
|
||||||
|
DISAGG_SOCRATA_ROW = {
|
||||||
|
"report_date_as_yyyy_mm_dd": "2026-04-22T00:00:00.000",
|
||||||
|
"prod_merc_positions_long_all": "100000",
|
||||||
|
"prod_merc_positions_short_all": "300000",
|
||||||
|
"swap_positions_long_all": "50000",
|
||||||
|
"swap_positions_short_all": "60000",
|
||||||
|
"m_money_positions_long_all": "200000",
|
||||||
|
"m_money_positions_short_all": "80000",
|
||||||
|
"other_rept_positions_long_all": "10000",
|
||||||
|
"other_rept_positions_short_all": "10000",
|
||||||
|
"open_interest_all": "1500000",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def test_parse_tff_row_extracts_all_fields():
|
||||||
|
row = parse_tff_row(TFF_SOCRATA_ROW)
|
||||||
|
assert row["report_date"] == "2026-04-22"
|
||||||
|
assert row["dealer_long"] == 12345
|
||||||
|
assert row["dealer_short"] == 23456
|
||||||
|
assert row["dealer_net"] == 12345 - 23456
|
||||||
|
assert row["asset_mgr_long"] == 654321
|
||||||
|
assert row["asset_mgr_net"] == 654321 - 200000
|
||||||
|
assert row["lev_funds_long"] == 100000
|
||||||
|
assert row["lev_funds_short"] == 350000
|
||||||
|
assert row["lev_funds_net"] == 100000 - 350000
|
||||||
|
assert row["other_long"] == 50000
|
||||||
|
assert row["other_net"] == 0
|
||||||
|
assert row["open_interest"] == 2500000
|
||||||
|
|
||||||
|
|
||||||
|
def test_parse_tff_row_handles_missing_field():
|
||||||
|
payload = {"report_date_as_yyyy_mm_dd": "2026-04-22T00:00:00.000"}
|
||||||
|
row = parse_tff_row(payload)
|
||||||
|
assert row["report_date"] == "2026-04-22"
|
||||||
|
assert row["dealer_long"] == 0
|
||||||
|
assert row["dealer_net"] == 0
|
||||||
|
|
||||||
|
|
||||||
|
def test_parse_disagg_row_extracts_all_fields():
|
||||||
|
row = parse_disagg_row(DISAGG_SOCRATA_ROW)
|
||||||
|
assert row["report_date"] == "2026-04-22"
|
||||||
|
assert row["producer_long"] == 100000
|
||||||
|
assert row["producer_short"] == 300000
|
||||||
|
assert row["producer_net"] == -200000
|
||||||
|
assert row["swap_long"] == 50000
|
||||||
|
assert row["swap_net"] == -10000
|
||||||
|
assert row["managed_money_long"] == 200000
|
||||||
|
assert row["managed_money_short"] == 80000
|
||||||
|
assert row["managed_money_net"] == 120000
|
||||||
|
assert row["other_long"] == 10000
|
||||||
|
assert row["other_net"] == 0
|
||||||
|
assert row["open_interest"] == 1500000
|
||||||
@@ -0,0 +1,402 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from datetime import UTC
|
||||||
|
|
||||||
|
import httpx
|
||||||
|
import pytest
|
||||||
|
import pytest_httpx
|
||||||
|
from cerbero_mcp.exchanges.macro.fetchers import (
|
||||||
|
fetch_breakeven_inflation,
|
||||||
|
fetch_economic_indicators,
|
||||||
|
fetch_macro_calendar,
|
||||||
|
fetch_market_overview,
|
||||||
|
yield_curve_metrics,
|
||||||
|
)
|
||||||
|
|
||||||
|
# --- fetch_economic_indicators ---
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_economic_indicators_no_key():
|
||||||
|
result = await fetch_economic_indicators(fred_api_key="")
|
||||||
|
assert "error" in result
|
||||||
|
assert result["error"] == "No FRED API key configured"
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_economic_indicators_happy_path(httpx_mock: pytest_httpx.HTTPXMock):
|
||||||
|
for series_id in ("FEDFUNDS", "CPIAUCSL", "UNRATE", "DGS10"):
|
||||||
|
httpx_mock.add_response(
|
||||||
|
url=httpx.URL(
|
||||||
|
"https://api.stlouisfed.org/fred/series/observations",
|
||||||
|
params={
|
||||||
|
"series_id": series_id,
|
||||||
|
"api_key": "testkey",
|
||||||
|
"file_type": "json",
|
||||||
|
"sort_order": "desc",
|
||||||
|
"limit": "1",
|
||||||
|
},
|
||||||
|
),
|
||||||
|
json={"observations": [{"value": "5.25"}]},
|
||||||
|
)
|
||||||
|
result = await fetch_economic_indicators(fred_api_key="testkey")
|
||||||
|
assert result["fed_rate"] == 5.25
|
||||||
|
assert result["cpi"] == 5.25
|
||||||
|
assert result["unemployment"] == 5.25
|
||||||
|
assert result["us10y_yield"] == 5.25
|
||||||
|
assert "updated_at" in result
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_economic_indicators_filter(httpx_mock: pytest_httpx.HTTPXMock):
|
||||||
|
httpx_mock.add_response(
|
||||||
|
url=httpx.URL(
|
||||||
|
"https://api.stlouisfed.org/fred/series/observations",
|
||||||
|
params={
|
||||||
|
"series_id": "FEDFUNDS",
|
||||||
|
"api_key": "k",
|
||||||
|
"file_type": "json",
|
||||||
|
"sort_order": "desc",
|
||||||
|
"limit": "1",
|
||||||
|
},
|
||||||
|
),
|
||||||
|
json={"observations": [{"value": "5.33"}]},
|
||||||
|
)
|
||||||
|
result = await fetch_economic_indicators(fred_api_key="k", indicators=["fed_rate"])
|
||||||
|
assert "fed_rate" in result
|
||||||
|
assert "cpi" not in result
|
||||||
|
|
||||||
|
|
||||||
|
# --- fetch_macro_calendar ---
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_macro_calendar_forex_factory_happy(httpx_mock: pytest_httpx.HTTPXMock):
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
|
||||||
|
future = (datetime.now(UTC) + timedelta(days=1)).isoformat()
|
||||||
|
httpx_mock.add_response(
|
||||||
|
url="https://nfs.faireconomy.media/ff_calendar_thisweek.json",
|
||||||
|
json=[
|
||||||
|
{
|
||||||
|
"date": future,
|
||||||
|
"title": "CPI",
|
||||||
|
"country": "US",
|
||||||
|
"impact": "High",
|
||||||
|
"forecast": "3.0%",
|
||||||
|
"previous": "3.2%",
|
||||||
|
}
|
||||||
|
],
|
||||||
|
)
|
||||||
|
result = await fetch_macro_calendar()
|
||||||
|
assert "events" in result
|
||||||
|
assert len(result["events"]) >= 1
|
||||||
|
assert result["events"][0]["name"] == "CPI"
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_macro_calendar_no_source(httpx_mock: pytest_httpx.HTTPXMock):
|
||||||
|
httpx_mock.add_response(
|
||||||
|
url="https://nfs.faireconomy.media/ff_calendar_thisweek.json",
|
||||||
|
status_code=500,
|
||||||
|
)
|
||||||
|
result = await fetch_macro_calendar(finnhub_api_key="")
|
||||||
|
assert result == {"events": [], "note": "No calendar source available"}
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
@pytest.mark.httpx_mock(assert_all_responses_were_requested=False, assert_all_requests_were_expected=False)
|
||||||
|
async def test_macro_calendar_finnhub_fallback(httpx_mock: pytest_httpx.HTTPXMock):
|
||||||
|
httpx_mock.add_response(
|
||||||
|
url="https://nfs.faireconomy.media/ff_calendar_thisweek.json",
|
||||||
|
status_code=500,
|
||||||
|
)
|
||||||
|
|
||||||
|
def dispatch(request: httpx.Request) -> httpx.Response:
|
||||||
|
if "finnhub.io" in str(request.url):
|
||||||
|
return httpx.Response(
|
||||||
|
200,
|
||||||
|
json=[{"date": "2024-01-15", "event": "FOMC", "importance": "high", "forecast": "", "prev": ""}],
|
||||||
|
)
|
||||||
|
return httpx.Response(500)
|
||||||
|
|
||||||
|
httpx_mock.add_callback(dispatch)
|
||||||
|
result = await fetch_macro_calendar(finnhub_api_key="fkey")
|
||||||
|
assert "events" in result
|
||||||
|
assert result["events"][0]["name"] == "FOMC"
|
||||||
|
|
||||||
|
|
||||||
|
# --- fetch_market_overview ---
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_market_overview_happy(httpx_mock: pytest_httpx.HTTPXMock):
|
||||||
|
httpx_mock.add_response(
|
||||||
|
url="https://api.coingecko.com/api/v3/global",
|
||||||
|
json={
|
||||||
|
"data": {
|
||||||
|
"market_cap_percentage": {"btc": 52.3},
|
||||||
|
"total_market_cap": {"usd": 2_000_000_000_000},
|
||||||
|
}
|
||||||
|
},
|
||||||
|
)
|
||||||
|
httpx_mock.add_response(
|
||||||
|
url=httpx.URL(
|
||||||
|
"https://api.coingecko.com/api/v3/simple/price",
|
||||||
|
params={"ids": "bitcoin,ethereum", "vs_currencies": "usd"},
|
||||||
|
),
|
||||||
|
json={"bitcoin": {"usd": 65000}, "ethereum": {"usd": 3500}},
|
||||||
|
)
|
||||||
|
import re as _re
|
||||||
|
httpx_mock.add_response(
|
||||||
|
url=_re.compile(
|
||||||
|
r"https://www\.deribit\.com/api/v2/public/get_volatility_index_data\?currency=BTC.*"
|
||||||
|
),
|
||||||
|
json={"result": {"data": [[1, 50, 52, 49, 51.5]], "continuation": None}},
|
||||||
|
)
|
||||||
|
httpx_mock.add_response(
|
||||||
|
url=_re.compile(
|
||||||
|
r"https://www\.deribit\.com/api/v2/public/get_volatility_index_data\?currency=ETH.*"
|
||||||
|
),
|
||||||
|
json={"result": {"data": [[1, 60, 62, 59, 61.2]], "continuation": None}},
|
||||||
|
)
|
||||||
|
import re as _re
|
||||||
|
httpx_mock.add_response(
|
||||||
|
url=_re.compile(r"https://query1\.finance\.yahoo\.com/v8/finance/chart/\^GSPC.*"),
|
||||||
|
json={"chart": {"result": [{"meta": {"regularMarketPrice": 5830.12}}]}},
|
||||||
|
)
|
||||||
|
httpx_mock.add_response(
|
||||||
|
url=_re.compile(r"https://query1\.finance\.yahoo\.com/v8/finance/chart/GC[%=].*"),
|
||||||
|
json={"chart": {"result": [{"meta": {"regularMarketPrice": 2412.5}}]}},
|
||||||
|
)
|
||||||
|
httpx_mock.add_response(
|
||||||
|
url=_re.compile(r"https://query1\.finance\.yahoo\.com/v8/finance/chart/\^VIX.*"),
|
||||||
|
json={"chart": {"result": [{"meta": {"regularMarketPrice": 18.3}}]}},
|
||||||
|
)
|
||||||
|
# Clear module cache to force fresh fetch
|
||||||
|
from cerbero_mcp.exchanges.macro import fetchers as _f
|
||||||
|
_f._MARKET_CACHE["data"] = None
|
||||||
|
_f._MARKET_CACHE["ts"] = 0.0
|
||||||
|
result = await fetch_market_overview()
|
||||||
|
assert result["btc_dominance"] == 52.3
|
||||||
|
assert result["btc_price"] == 65000
|
||||||
|
assert result["eth_price"] == 3500
|
||||||
|
assert result["total_market_cap"] == 2_000_000_000_000
|
||||||
|
assert result["dvol_btc"] == 51.5
|
||||||
|
assert result["dvol_eth"] == 61.2
|
||||||
|
assert result["sp500"] == 5830.12
|
||||||
|
assert result["gold"] == 2412.5
|
||||||
|
assert result["vix"] == 18.3
|
||||||
|
assert "data_timestamp" in result
|
||||||
|
|
||||||
|
|
||||||
|
# --- yield_curve_metrics ---
|
||||||
|
|
||||||
|
def test_yield_curve_metrics_normal_curve():
|
||||||
|
out = yield_curve_metrics({"us2y": 4.0, "us5y": 4.2, "us10y": 4.5, "us30y": 4.8})
|
||||||
|
assert out["slope_2y10y"] == 0.5
|
||||||
|
assert out["slope_5y30y"] == 0.6
|
||||||
|
assert out["regime"] == "steep"
|
||||||
|
# butterfly: 2*4.5 - 4.0 - 4.8 = 0.2
|
||||||
|
assert out["butterfly_2_10_30"] == 0.2
|
||||||
|
|
||||||
|
|
||||||
|
def test_yield_curve_metrics_inverted():
|
||||||
|
out = yield_curve_metrics({"us2y": 5.5, "us5y": 5.0, "us10y": 4.5, "us30y": 4.3})
|
||||||
|
assert out["slope_2y10y"] == -1.0
|
||||||
|
assert out["regime"] == "inverted"
|
||||||
|
|
||||||
|
|
||||||
|
def test_yield_curve_metrics_partial_data():
|
||||||
|
out = yield_curve_metrics({"us10y": 4.5})
|
||||||
|
assert out["slope_2y10y"] is None
|
||||||
|
assert out["regime"] == "unknown"
|
||||||
|
|
||||||
|
|
||||||
|
# --- fetch_breakeven_inflation ---
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_breakeven_no_key():
|
||||||
|
out = await fetch_breakeven_inflation(fred_api_key="")
|
||||||
|
assert "error" in out
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_breakeven_happy_path(httpx_mock: pytest_httpx.HTTPXMock):
|
||||||
|
for series_id, val in [("T5YIE", "2.3"), ("T10YIE", "2.5"), ("T5YIFR", "2.7")]:
|
||||||
|
httpx_mock.add_response(
|
||||||
|
url=httpx.URL(
|
||||||
|
"https://api.stlouisfed.org/fred/series/observations",
|
||||||
|
params={
|
||||||
|
"series_id": series_id,
|
||||||
|
"api_key": "k",
|
||||||
|
"file_type": "json",
|
||||||
|
"sort_order": "desc",
|
||||||
|
"limit": "1",
|
||||||
|
},
|
||||||
|
),
|
||||||
|
json={"observations": [{"value": val}]},
|
||||||
|
)
|
||||||
|
out = await fetch_breakeven_inflation(fred_api_key="k")
|
||||||
|
assert out["breakevens"]["be_5y"] == 2.3
|
||||||
|
assert out["breakevens"]["be_10y"] == 2.5
|
||||||
|
assert out["breakevens"]["be_5y5y_forward"] == 2.7
|
||||||
|
assert out["interpretation"] == "anchored"
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_breakeven_high_inflation(httpx_mock: pytest_httpx.HTTPXMock):
|
||||||
|
for series_id in ("T5YIE", "T10YIE", "T5YIFR"):
|
||||||
|
httpx_mock.add_response(
|
||||||
|
url=httpx.URL(
|
||||||
|
"https://api.stlouisfed.org/fred/series/observations",
|
||||||
|
params={
|
||||||
|
"series_id": series_id,
|
||||||
|
"api_key": "k",
|
||||||
|
"file_type": "json",
|
||||||
|
"sort_order": "desc",
|
||||||
|
"limit": "1",
|
||||||
|
},
|
||||||
|
),
|
||||||
|
json={"observations": [{"value": "3.5"}]},
|
||||||
|
)
|
||||||
|
out = await fetch_breakeven_inflation(fred_api_key="k")
|
||||||
|
assert out["interpretation"] == "high_inflation_expected"
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_fetch_cot_tff_happy_path(httpx_mock: pytest_httpx.HTTPXMock):
|
||||||
|
from cerbero_mcp.exchanges.macro.fetchers import fetch_cot_tff
|
||||||
|
httpx_mock.add_response(
|
||||||
|
url=httpx.URL(
|
||||||
|
"https://publicreporting.cftc.gov/resource/gpe5-46if.json",
|
||||||
|
params={
|
||||||
|
"cftc_contract_market_code": "13874A",
|
||||||
|
"$order": "report_date_as_yyyy_mm_dd DESC",
|
||||||
|
"$limit": "52",
|
||||||
|
},
|
||||||
|
),
|
||||||
|
json=[
|
||||||
|
{
|
||||||
|
"report_date_as_yyyy_mm_dd": "2026-04-22T00:00:00.000",
|
||||||
|
"dealer_positions_long_all": "12345",
|
||||||
|
"dealer_positions_short_all": "23456",
|
||||||
|
"asset_mgr_positions_long": "654321",
|
||||||
|
"asset_mgr_positions_short": "200000",
|
||||||
|
"lev_money_positions_long": "100000",
|
||||||
|
"lev_money_positions_short": "350000",
|
||||||
|
"other_rept_positions_long": "50000",
|
||||||
|
"other_rept_positions_short": "50000",
|
||||||
|
"open_interest_all": "2500000",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"report_date_as_yyyy_mm_dd": "2026-04-15T00:00:00.000",
|
||||||
|
"dealer_positions_long_all": "11000",
|
||||||
|
"dealer_positions_short_all": "22000",
|
||||||
|
"asset_mgr_positions_long": "640000",
|
||||||
|
"asset_mgr_positions_short": "210000",
|
||||||
|
"lev_money_positions_long": "110000",
|
||||||
|
"lev_money_positions_short": "320000",
|
||||||
|
"other_rept_positions_long": "48000",
|
||||||
|
"other_rept_positions_short": "52000",
|
||||||
|
"open_interest_all": "2480000",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
)
|
||||||
|
out = await fetch_cot_tff("ES", lookback_weeks=52)
|
||||||
|
assert out["symbol"] == "ES"
|
||||||
|
assert out["report_type"] == "tff"
|
||||||
|
assert len(out["rows"]) == 2
|
||||||
|
# Ordering ASC by date (oldest first)
|
||||||
|
assert out["rows"][0]["report_date"] == "2026-04-15"
|
||||||
|
assert out["rows"][1]["report_date"] == "2026-04-22"
|
||||||
|
assert out["rows"][1]["lev_funds_net"] == -250000
|
||||||
|
assert "data_timestamp" in out
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_fetch_cot_tff_unknown_symbol():
|
||||||
|
from cerbero_mcp.exchanges.macro.fetchers import fetch_cot_tff
|
||||||
|
out = await fetch_cot_tff("INVALID", lookback_weeks=52)
|
||||||
|
assert out.get("error") == "unknown_symbol"
|
||||||
|
assert "ES" in out.get("available", [])
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_fetch_cot_disagg_happy_path(httpx_mock: pytest_httpx.HTTPXMock):
|
||||||
|
from cerbero_mcp.exchanges.macro.fetchers import fetch_cot_disaggregated
|
||||||
|
httpx_mock.add_response(
|
||||||
|
url=httpx.URL(
|
||||||
|
"https://publicreporting.cftc.gov/resource/72hh-3qpy.json",
|
||||||
|
params={
|
||||||
|
"cftc_contract_market_code": "067651",
|
||||||
|
"$order": "report_date_as_yyyy_mm_dd DESC",
|
||||||
|
"$limit": "52",
|
||||||
|
},
|
||||||
|
),
|
||||||
|
json=[
|
||||||
|
{
|
||||||
|
"report_date_as_yyyy_mm_dd": "2026-04-22T00:00:00.000",
|
||||||
|
"prod_merc_positions_long_all": "100000",
|
||||||
|
"prod_merc_positions_short_all": "300000",
|
||||||
|
"swap_positions_long_all": "50000",
|
||||||
|
"swap_positions_short_all": "60000",
|
||||||
|
"m_money_positions_long_all": "200000",
|
||||||
|
"m_money_positions_short_all": "80000",
|
||||||
|
"other_rept_positions_long_all": "10000",
|
||||||
|
"other_rept_positions_short_all": "10000",
|
||||||
|
"open_interest_all": "1500000",
|
||||||
|
},
|
||||||
|
],
|
||||||
|
)
|
||||||
|
out = await fetch_cot_disaggregated("CL", lookback_weeks=52)
|
||||||
|
assert out["symbol"] == "CL"
|
||||||
|
assert out["report_type"] == "disaggregated"
|
||||||
|
assert len(out["rows"]) == 1
|
||||||
|
assert out["rows"][0]["managed_money_net"] == 120000
|
||||||
|
assert out["rows"][0]["producer_net"] == -200000
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_fetch_cot_disagg_unknown_symbol():
|
||||||
|
from cerbero_mcp.exchanges.macro.fetchers import fetch_cot_disaggregated
|
||||||
|
out = await fetch_cot_disaggregated("XYZ", lookback_weeks=52)
|
||||||
|
assert out.get("error") == "unknown_symbol"
|
||||||
|
assert "CL" in out.get("available", [])
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_fetch_cot_extreme_positioning_flags_outliers(monkeypatch):
|
||||||
|
"""Mock fetch_cot_tff e fetch_cot_disagg per simulare history e ultimo punto."""
|
||||||
|
from unittest.mock import AsyncMock
|
||||||
|
|
||||||
|
from cerbero_mcp.exchanges.macro import fetchers as f
|
||||||
|
|
||||||
|
# Simula una serie ES dove ultimo lev_funds_net è in basso (extreme_short)
|
||||||
|
es_rows = [
|
||||||
|
{"report_date": f"2026-{m:02d}-01", "lev_funds_net": v}
|
||||||
|
for m, v in [(1, 0), (2, 50), (3, 100), (4, -500)]
|
||||||
|
]
|
||||||
|
cl_rows = [
|
||||||
|
{"report_date": f"2026-{m:02d}-01", "managed_money_net": v}
|
||||||
|
for m, v in [(1, 100), (2, 200), (3, 300), (4, 1000)]
|
||||||
|
]
|
||||||
|
|
||||||
|
async def fake_tff(symbol, lookback_weeks):
|
||||||
|
if symbol == "ES":
|
||||||
|
return {"symbol": "ES", "report_type": "tff", "rows": es_rows}
|
||||||
|
return {"symbol": symbol, "report_type": "tff", "rows": []}
|
||||||
|
|
||||||
|
async def fake_disagg(symbol, lookback_weeks):
|
||||||
|
if symbol == "CL":
|
||||||
|
return {"symbol": "CL", "report_type": "disaggregated", "rows": cl_rows}
|
||||||
|
return {"symbol": symbol, "report_type": "disaggregated", "rows": []}
|
||||||
|
|
||||||
|
monkeypatch.setattr(f, "fetch_cot_tff", AsyncMock(side_effect=fake_tff))
|
||||||
|
monkeypatch.setattr(f, "fetch_cot_disaggregated", AsyncMock(side_effect=fake_disagg))
|
||||||
|
|
||||||
|
out = await f.fetch_cot_extreme_positioning(lookback_weeks=4)
|
||||||
|
assert "extremes" in out
|
||||||
|
by_sym = {e["symbol"]: e for e in out["extremes"]}
|
||||||
|
assert by_sym["ES"]["signal"] == "extreme_short"
|
||||||
|
assert by_sym["ES"]["key_role"] == "lev_funds"
|
||||||
|
assert by_sym["CL"]["signal"] == "extreme_long"
|
||||||
|
assert by_sym["CL"]["key_role"] == "managed_money"
|
||||||
|
|
||||||
@@ -102,6 +102,29 @@ async def test_build_client_alpaca_returns_correct_env(monkeypatch):
|
|||||||
assert c_live.paper is False
|
assert c_live.paper is False
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_build_client_macro_no_env_distinction(monkeypatch):
|
||||||
|
from tests.unit.test_settings import _minimal_env
|
||||||
|
|
||||||
|
for k, v in _minimal_env().items():
|
||||||
|
monkeypatch.setenv(k, v)
|
||||||
|
|
||||||
|
from cerbero_mcp.settings import Settings
|
||||||
|
from cerbero_mcp.exchanges import build_client
|
||||||
|
from cerbero_mcp.exchanges.macro.client import MacroClient
|
||||||
|
|
||||||
|
s = Settings()
|
||||||
|
c_test = await build_client(s, "macro", "testnet")
|
||||||
|
c_live = await build_client(s, "macro", "mainnet")
|
||||||
|
|
||||||
|
# entrambi sono MacroClient validi (env ignorato)
|
||||||
|
assert isinstance(c_test, MacroClient)
|
||||||
|
assert isinstance(c_live, MacroClient)
|
||||||
|
# Stesse credenziali (env ignorato)
|
||||||
|
assert c_test.fred_api_key == c_live.fred_api_key
|
||||||
|
assert c_test.finnhub_api_key == c_live.finnhub_api_key
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_build_client_unknown_exchange_raises(monkeypatch):
|
async def test_build_client_unknown_exchange_raises(monkeypatch):
|
||||||
from tests.unit.test_settings import _minimal_env
|
from tests.unit.test_settings import _minimal_env
|
||||||
|
|||||||
Reference in New Issue
Block a user