feat: 15 nuovi indicatori quant (common + deribit + bybit + macro + sentiment)
Common (mcp_common): - indicators.py: vol_cone, hurst_exponent, half_life_mean_reversion, garch11_forecast, autocorrelation, rolling_sharpe, var_cvar - options.py (nuovo): oi_weighted_skew, smile_asymmetry, atm_vs_wings_vol, dealer_gamma_profile, vanna_charm_aggregate - microstructure.py (nuovo): orderbook_imbalance (ratio + microprice + slope) - stats.py (nuovo): cointegration_test Engle-Granger + ADF helper Deribit (+6 tool MCP): - get_dealer_gamma_profile (net dealer gamma + flip level) - get_vanna_charm (vanna/charm aggregati pesati OI) - get_oi_weighted_skew, get_smile_asymmetry, get_atm_vs_wings_vol - get_orderbook_imbalance Bybit (+2 tool MCP): - get_orderbook_imbalance, get_basis_term_structure (futures dated curve) Macro (+2 tool MCP): - get_yield_curve_slope (2y10y/5y30y + butterfly + regime) - get_breakeven_inflation (FRED T5YIE/T10YIE/T5YIFR) Sentiment (+3 tool MCP): - get_funding_arb_spread (opportunità arb compatte annualizzate) - get_liquidation_heatmap (heuristic da OI delta + funding extreme, no feed paid Coinglass) - get_cointegration_pairs (Engle-Granger su coppie crypto Binance hourly) Tutto in TDD pure-Python (no numpy/scipy in mcp_common). README aggiornato con elenco completo. 442 test totali verdi. Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
This commit is contained in:
@@ -438,6 +438,137 @@ async def fetch_cross_exchange_funding(assets: list[str] | None = None) -> dict[
|
||||
}
|
||||
|
||||
|
||||
async def fetch_funding_arb_spread(assets: list[str] | None = None) -> dict[str, Any]:
|
||||
"""Riassume opportunità di arbitrage funding su cross-exchange in un
|
||||
formato compatto: per ogni asset, rate min/max + spread + annualized %.
|
||||
Wrapper su fetch_cross_exchange_funding focalizzato su action items.
|
||||
"""
|
||||
base = await fetch_cross_exchange_funding(assets)
|
||||
snapshot = base.get("snapshot") or {}
|
||||
rows: list[dict[str, Any]] = []
|
||||
for asset, data in snapshot.items():
|
||||
rates = {k: v for k, v in data.items() if k in ("binance", "bybit", "okx", "hyperliquid") and v is not None}
|
||||
if len(rates) < 2:
|
||||
continue
|
||||
sorted_rates = sorted(rates.items(), key=lambda x: x[1])
|
||||
low_ex, low_v = sorted_rates[0]
|
||||
high_ex, high_v = sorted_rates[-1]
|
||||
spread = high_v - low_v
|
||||
# Funding cycle: 8h on most, 1h on hyperliquid → assume 8h => 3x/day
|
||||
ann_pct = spread * 3 * 365 * 100
|
||||
actionable = ann_pct > 50
|
||||
rows.append({
|
||||
"asset": asset,
|
||||
"long_venue": low_ex,
|
||||
"short_venue": high_ex,
|
||||
"long_funding": low_v,
|
||||
"short_funding": high_v,
|
||||
"spread": spread,
|
||||
"annualized_pct": round(ann_pct, 2),
|
||||
"actionable": actionable,
|
||||
})
|
||||
rows.sort(key=lambda r: -r["annualized_pct"])
|
||||
return {
|
||||
"opportunities": rows,
|
||||
"data_timestamp": base.get("data_timestamp"),
|
||||
}
|
||||
|
||||
|
||||
async def fetch_liquidation_heatmap(asset: str = "BTC") -> dict[str, Any]:
|
||||
"""Heuristic liquidation pressure: combina OI delta + funding extreme su
|
||||
asset. NON usa feed liq paid (Coinglass): stima dove si concentra
|
||||
leveraged exposure pronta a essere liquidata.
|
||||
|
||||
long_squeeze_risk: high se OI cresce + funding positivo (long crowded).
|
||||
short_squeeze_risk: high se OI cresce + funding negativo (short crowded).
|
||||
"""
|
||||
asset = asset.upper()
|
||||
oi = await fetch_oi_history(asset=asset, period="5m", limit=288)
|
||||
funding = await fetch_cross_exchange_funding(assets=[asset])
|
||||
snap = (funding.get("snapshot") or {}).get(asset) or {}
|
||||
rates = [v for k, v in snap.items() if k in ("binance", "bybit", "okx", "hyperliquid") and v is not None]
|
||||
avg_funding = sum(rates) / len(rates) if rates else None
|
||||
|
||||
delta_4h = oi.get("delta_pct_4h")
|
||||
delta_24h = oi.get("delta_pct_24h")
|
||||
|
||||
long_risk = "low"
|
||||
short_risk = "low"
|
||||
if avg_funding is not None and delta_24h is not None:
|
||||
if avg_funding > 0.0001 and delta_24h > 5:
|
||||
long_risk = "high"
|
||||
elif avg_funding > 0.00005 and delta_24h > 2:
|
||||
long_risk = "medium"
|
||||
if avg_funding < -0.0001 and delta_24h > 5:
|
||||
short_risk = "high"
|
||||
elif avg_funding < -0.00005 and delta_24h > 2:
|
||||
short_risk = "medium"
|
||||
|
||||
return {
|
||||
"asset": asset,
|
||||
"avg_funding_rate": avg_funding,
|
||||
"oi_delta_pct_4h": delta_4h,
|
||||
"oi_delta_pct_24h": delta_24h,
|
||||
"long_squeeze_risk": long_risk,
|
||||
"short_squeeze_risk": short_risk,
|
||||
"note": "heuristic — non sostituisce feed liq dedicati (Coinglass).",
|
||||
}
|
||||
|
||||
|
||||
async def fetch_cointegration_pairs(
|
||||
pairs: list[list[str]] | None = None,
|
||||
lookback_hours: int = 24,
|
||||
) -> dict[str, Any]:
|
||||
"""Test Engle-Granger su coppie crypto su Binance hourly.
|
||||
pairs: lista di [base, quote] (es. [["BTC", "ETH"]]). Default top-3.
|
||||
"""
|
||||
from mcp_common.stats import cointegration_test
|
||||
|
||||
pairs = pairs or [["BTC", "ETH"], ["BTC", "SOL"], ["ETH", "SOL"]]
|
||||
out: list[dict[str, Any]] = []
|
||||
interval = "1h"
|
||||
limit = max(50, lookback_hours)
|
||||
|
||||
async with httpx.AsyncClient(timeout=15) as client:
|
||||
for pair in pairs:
|
||||
if len(pair) != 2:
|
||||
continue
|
||||
a, b = pair[0].upper(), pair[1].upper()
|
||||
sym_a = f"{a}USDT"
|
||||
sym_b = f"{b}USDT"
|
||||
try:
|
||||
resp_a = await client.get(
|
||||
"https://api.binance.com/api/v3/klines",
|
||||
params={"symbol": sym_a, "interval": interval, "limit": limit},
|
||||
)
|
||||
resp_b = await client.get(
|
||||
"https://api.binance.com/api/v3/klines",
|
||||
params={"symbol": sym_b, "interval": interval, "limit": limit},
|
||||
)
|
||||
if resp_a.status_code != 200 or resp_b.status_code != 200:
|
||||
continue
|
||||
closes_a = [float(k[4]) for k in resp_a.json()]
|
||||
closes_b = [float(k[4]) for k in resp_b.json()]
|
||||
if len(closes_a) != len(closes_b):
|
||||
n = min(len(closes_a), len(closes_b))
|
||||
closes_a = closes_a[-n:]
|
||||
closes_b = closes_b[-n:]
|
||||
result = cointegration_test(closes_a, closes_b)
|
||||
out.append({
|
||||
"pair": [a, b],
|
||||
"samples": len(closes_a),
|
||||
**result,
|
||||
})
|
||||
except Exception as e:
|
||||
out.append({"pair": [a, b], "error": str(e)})
|
||||
|
||||
out.sort(key=lambda r: r.get("adf_t_stat") or 0)
|
||||
return {
|
||||
"results": out,
|
||||
"lookback_hours": lookback_hours,
|
||||
}
|
||||
|
||||
|
||||
async def fetch_world_news() -> dict[str, Any]:
|
||||
"""Fetch world financial news from free RSS feeds."""
|
||||
articles: list[dict[str, Any]] = []
|
||||
|
||||
Reference in New Issue
Block a user