0ba5a05219
Add a unified historical endpoint that fans out to every exchange supporting the requested (asset_class, symbol) pair, then merges the results into a single consensus candle series with per-bar divergence metrics: - candles[i].close = median across sources - candles[i].sources = count of contributing exchanges - candles[i].div_pct = (max-min)/median for that bar's close Crypto routes BTC/ETH/SOL across Bybit + Hyperliquid + Deribit; equities route to Alpaca for now (IBKR omitted from MVP because its bars endpoint takes a relative period instead of start/end). Partial failures return a warning envelope (failed_sources) instead of failing the whole request; all sources failing → HTTP 502. Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
135 lines
4.7 KiB
Python
135 lines
4.7 KiB
Python
from __future__ import annotations
|
|
|
|
from typing import Any
|
|
|
|
import pytest
|
|
from cerbero_mcp.exchanges.cross.client import CrossClient
|
|
from fastapi import HTTPException
|
|
|
|
|
|
class _Fake:
|
|
def __init__(self, candles: list[dict[str, Any]] | None = None,
|
|
*, raises: Exception | None = None):
|
|
self._candles = candles or []
|
|
self._raises = raises
|
|
self.calls: list[dict[str, Any]] = []
|
|
|
|
async def get_historical(self, **kwargs: Any) -> dict[str, Any]:
|
|
if self._raises:
|
|
raise self._raises
|
|
self.calls.append(kwargs)
|
|
return {"candles": list(self._candles)}
|
|
|
|
async def get_bars(self, **kwargs: Any) -> dict[str, Any]:
|
|
if self._raises:
|
|
raise self._raises
|
|
self.calls.append(kwargs)
|
|
return {"candles": list(self._candles)}
|
|
|
|
|
|
class _FakeRegistry:
|
|
def __init__(self, clients: dict[str, _Fake]):
|
|
self._clients = clients
|
|
|
|
async def get(self, exchange: str, env: str) -> _Fake:
|
|
if exchange not in self._clients:
|
|
raise KeyError(exchange)
|
|
return self._clients[exchange]
|
|
|
|
|
|
def _c(ts: int, close: float = 100.0) -> dict[str, Any]:
|
|
return {"timestamp": ts, "open": close, "high": close, "low": close,
|
|
"close": close, "volume": 1.0}
|
|
|
|
|
|
@pytest.mark.asyncio
|
|
async def test_crypto_three_sources_aggregates():
|
|
fakes = {
|
|
"bybit": _Fake([_c(1, 100), _c(2, 200)]),
|
|
"hyperliquid": _Fake([_c(1, 100), _c(2, 200)]),
|
|
"deribit": _Fake([_c(1, 100), _c(2, 200)]),
|
|
}
|
|
cc = CrossClient(_FakeRegistry(fakes), env="mainnet")
|
|
out = await cc.get_historical(
|
|
symbol="BTC", asset_class="crypto", interval="1h",
|
|
start_date="2026-05-09T00:00:00", end_date="2026-05-10T00:00:00",
|
|
)
|
|
assert out["symbol"] == "BTC"
|
|
assert out["asset_class"] == "crypto"
|
|
assert len(out["candles"]) == 2
|
|
assert out["candles"][0]["sources"] == 3
|
|
assert out["candles"][0]["div_pct"] == 0.0
|
|
assert set(out["sources_used"]) == {"bybit", "hyperliquid", "deribit"}
|
|
assert out["failed_sources"] == []
|
|
|
|
|
|
@pytest.mark.asyncio
|
|
async def test_crypto_partial_failure_returns_partial_with_warning():
|
|
fakes = {
|
|
"bybit": _Fake([_c(1, 100)]),
|
|
"hyperliquid": _Fake([_c(1, 100)]),
|
|
"deribit": _Fake(raises=RuntimeError("upstream down")),
|
|
}
|
|
cc = CrossClient(_FakeRegistry(fakes), env="mainnet")
|
|
out = await cc.get_historical(
|
|
symbol="BTC", asset_class="crypto", interval="1h",
|
|
start_date="2026-05-09T00:00:00", end_date="2026-05-10T00:00:00",
|
|
)
|
|
assert out["candles"][0]["sources"] == 2
|
|
assert set(out["sources_used"]) == {"bybit", "hyperliquid"}
|
|
assert len(out["failed_sources"]) == 1
|
|
assert out["failed_sources"][0]["exchange"] == "deribit"
|
|
assert "upstream down" in out["failed_sources"][0]["error"]
|
|
|
|
|
|
@pytest.mark.asyncio
|
|
async def test_all_sources_fail_raises_502():
|
|
fakes = {
|
|
"bybit": _Fake(raises=RuntimeError("a")),
|
|
"hyperliquid": _Fake(raises=RuntimeError("b")),
|
|
"deribit": _Fake(raises=RuntimeError("c")),
|
|
}
|
|
cc = CrossClient(_FakeRegistry(fakes), env="mainnet")
|
|
with pytest.raises(HTTPException) as exc_info:
|
|
await cc.get_historical(
|
|
symbol="BTC", asset_class="crypto", interval="1h",
|
|
start_date="2026-05-09T00:00:00", end_date="2026-05-10T00:00:00",
|
|
)
|
|
assert exc_info.value.status_code == 502
|
|
|
|
|
|
@pytest.mark.asyncio
|
|
async def test_unsupported_symbol_raises_400():
|
|
cc = CrossClient(_FakeRegistry({}), env="mainnet")
|
|
with pytest.raises(HTTPException) as exc_info:
|
|
await cc.get_historical(
|
|
symbol="NONEXISTENT", asset_class="crypto", interval="1h",
|
|
start_date="2026-05-09T00:00:00", end_date="2026-05-10T00:00:00",
|
|
)
|
|
assert exc_info.value.status_code == 400
|
|
|
|
|
|
@pytest.mark.asyncio
|
|
async def test_stocks_routes_to_alpaca_only():
|
|
fake = _Fake([_c(1, 175.0)])
|
|
cc = CrossClient(_FakeRegistry({"alpaca": fake}), env="mainnet")
|
|
out = await cc.get_historical(
|
|
symbol="AAPL", asset_class="stocks", interval="1d",
|
|
start_date="2026-04-09T00:00:00", end_date="2026-04-10T00:00:00",
|
|
)
|
|
assert out["sources_used"] == ["alpaca"]
|
|
assert out["candles"][0]["close"] == 175.0
|
|
# Alpaca was called with native symbol
|
|
assert fake.calls[0]["symbol"] == "AAPL"
|
|
|
|
|
|
@pytest.mark.asyncio
|
|
async def test_unsupported_interval_raises_400():
|
|
cc = CrossClient(_FakeRegistry({}), env="mainnet")
|
|
with pytest.raises(HTTPException) as exc_info:
|
|
await cc.get_historical(
|
|
symbol="BTC", asset_class="crypto", interval="3h",
|
|
start_date="2026-05-09T00:00:00", end_date="2026-05-10T00:00:00",
|
|
)
|
|
assert exc_info.value.status_code == 400
|