feat(V2): /mcp-cross/tools/get_historical with cross-exchange consensus
Add a unified historical endpoint that fans out to every exchange supporting the requested (asset_class, symbol) pair, then merges the results into a single consensus candle series with per-bar divergence metrics: - candles[i].close = median across sources - candles[i].sources = count of contributing exchanges - candles[i].div_pct = (max-min)/median for that bar's close Crypto routes BTC/ETH/SOL across Bybit + Hyperliquid + Deribit; equities route to Alpaca for now (IBKR omitted from MVP because its bars endpoint takes a relative period instead of start/end). Partial failures return a warning envelope (failed_sources) instead of failing the whole request; all sources failing → HTTP 502. Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
This commit is contained in:
@@ -23,6 +23,7 @@ from cerbero_mcp.exchanges import build_client
|
|||||||
from cerbero_mcp.routers import (
|
from cerbero_mcp.routers import (
|
||||||
alpaca,
|
alpaca,
|
||||||
bybit,
|
bybit,
|
||||||
|
cross,
|
||||||
deribit,
|
deribit,
|
||||||
hyperliquid,
|
hyperliquid,
|
||||||
ibkr,
|
ibkr,
|
||||||
@@ -71,6 +72,7 @@ def _make_app(settings: Settings) -> FastAPI:
|
|||||||
app.include_router(ibkr.make_router())
|
app.include_router(ibkr.make_router())
|
||||||
app.include_router(macro.make_router())
|
app.include_router(macro.make_router())
|
||||||
app.include_router(sentiment.make_router())
|
app.include_router(sentiment.make_router())
|
||||||
|
app.include_router(cross.make_router())
|
||||||
app.include_router(admin.make_admin_router())
|
app.include_router(admin.make_admin_router())
|
||||||
|
|
||||||
return app
|
return app
|
||||||
|
|||||||
@@ -0,0 +1,146 @@
|
|||||||
|
"""Cross-exchange historical aggregator.
|
||||||
|
|
||||||
|
Fan-out a canonical (symbol, asset_class, interval, start, end) request to
|
||||||
|
every active exchange that supports the pair, then merge the results into
|
||||||
|
a single consensus candle series with per-bar divergence metrics.
|
||||||
|
"""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import datetime as _dt
|
||||||
|
from typing import Any, Literal, Protocol
|
||||||
|
|
||||||
|
from fastapi import HTTPException
|
||||||
|
|
||||||
|
from cerbero_mcp.exchanges.cross.consensus import merge_candles
|
||||||
|
from cerbero_mcp.exchanges.cross.symbol_map import (
|
||||||
|
get_sources,
|
||||||
|
supported_intervals,
|
||||||
|
to_native_interval,
|
||||||
|
to_native_symbol,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
Environment = Literal["testnet", "mainnet"]
|
||||||
|
|
||||||
|
|
||||||
|
class _Registry(Protocol):
|
||||||
|
async def get(self, exchange: str, env: Environment) -> Any: ...
|
||||||
|
|
||||||
|
|
||||||
|
def _iso_to_ms(s: str) -> int:
|
||||||
|
return int(_dt.datetime.fromisoformat(
|
||||||
|
s.replace("Z", "+00:00")
|
||||||
|
).timestamp() * 1000)
|
||||||
|
|
||||||
|
|
||||||
|
async def _call_bybit(client: Any, sym: str, interval: str,
|
||||||
|
start: str, end: str) -> dict[str, Any]:
|
||||||
|
resp: dict[str, Any] = await client.get_historical(
|
||||||
|
symbol=sym, category="linear", interval=interval,
|
||||||
|
start=_iso_to_ms(start), end=_iso_to_ms(end),
|
||||||
|
)
|
||||||
|
return resp
|
||||||
|
|
||||||
|
|
||||||
|
async def _call_hyperliquid(client: Any, sym: str, interval: str,
|
||||||
|
start: str, end: str) -> dict[str, Any]:
|
||||||
|
resp: dict[str, Any] = await client.get_historical(
|
||||||
|
instrument=sym, start_date=start, end_date=end, resolution=interval,
|
||||||
|
)
|
||||||
|
return resp
|
||||||
|
|
||||||
|
|
||||||
|
async def _call_deribit(client: Any, sym: str, interval: str,
|
||||||
|
start: str, end: str) -> dict[str, Any]:
|
||||||
|
resp: dict[str, Any] = await client.get_historical(
|
||||||
|
instrument=sym, start_date=start, end_date=end, resolution=interval,
|
||||||
|
)
|
||||||
|
return resp
|
||||||
|
|
||||||
|
|
||||||
|
async def _call_alpaca(client: Any, sym: str, interval: str,
|
||||||
|
start: str, end: str) -> dict[str, Any]:
|
||||||
|
resp: dict[str, Any] = await client.get_bars(
|
||||||
|
symbol=sym, asset_class="stocks", interval=interval,
|
||||||
|
start=start, end=end,
|
||||||
|
)
|
||||||
|
return resp
|
||||||
|
|
||||||
|
|
||||||
|
_DISPATCH = {
|
||||||
|
"bybit": _call_bybit,
|
||||||
|
"hyperliquid": _call_hyperliquid,
|
||||||
|
"deribit": _call_deribit,
|
||||||
|
"alpaca": _call_alpaca,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class CrossClient:
|
||||||
|
def __init__(self, registry: _Registry, *, env: Environment):
|
||||||
|
self._registry = registry
|
||||||
|
self._env = env
|
||||||
|
|
||||||
|
async def _fetch_one(
|
||||||
|
self, exchange: str, native_sym: str, native_interval: str,
|
||||||
|
start: str, end: str,
|
||||||
|
) -> tuple[str, list[dict[str, Any]] | Exception]:
|
||||||
|
try:
|
||||||
|
client = await self._registry.get(exchange, self._env)
|
||||||
|
resp = await _DISPATCH[exchange](
|
||||||
|
client, native_sym, native_interval, start, end,
|
||||||
|
)
|
||||||
|
return exchange, resp.get("candles", [])
|
||||||
|
except Exception as e: # noqa: BLE001
|
||||||
|
return exchange, e
|
||||||
|
|
||||||
|
async def get_historical(
|
||||||
|
self, *, symbol: str, asset_class: str, interval: str,
|
||||||
|
start_date: str, end_date: str,
|
||||||
|
) -> dict[str, Any]:
|
||||||
|
sources = get_sources(asset_class, symbol)
|
||||||
|
if not sources:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=400,
|
||||||
|
detail=f"unsupported symbol/asset_class: {symbol} ({asset_class})",
|
||||||
|
)
|
||||||
|
if interval not in supported_intervals():
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=400,
|
||||||
|
detail=f"unsupported interval: {interval}; "
|
||||||
|
f"supported: {supported_intervals()}",
|
||||||
|
)
|
||||||
|
|
||||||
|
tasks = [
|
||||||
|
self._fetch_one(
|
||||||
|
ex,
|
||||||
|
to_native_symbol(asset_class, symbol, ex),
|
||||||
|
to_native_interval(interval, ex),
|
||||||
|
start_date, end_date,
|
||||||
|
)
|
||||||
|
for ex in sources
|
||||||
|
]
|
||||||
|
results = await asyncio.gather(*tasks)
|
||||||
|
|
||||||
|
by_source: dict[str, list[dict[str, Any]]] = {}
|
||||||
|
failed: list[dict[str, str]] = []
|
||||||
|
for ex, payload in results:
|
||||||
|
if isinstance(payload, Exception):
|
||||||
|
failed.append({"exchange": ex, "error": f"{type(payload).__name__}: {payload}"})
|
||||||
|
else:
|
||||||
|
by_source[ex] = payload
|
||||||
|
|
||||||
|
if not by_source:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=502,
|
||||||
|
detail={"error": "all sources failed", "failed_sources": failed},
|
||||||
|
)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"symbol": symbol.upper(),
|
||||||
|
"asset_class": asset_class,
|
||||||
|
"interval": interval,
|
||||||
|
"candles": merge_candles(by_source),
|
||||||
|
"sources_used": sorted(by_source.keys()),
|
||||||
|
"failed_sources": failed,
|
||||||
|
}
|
||||||
@@ -0,0 +1,37 @@
|
|||||||
|
"""Pure consensus aggregation: merge per-source OHLCV candles by timestamp.
|
||||||
|
|
||||||
|
The output is a single time-series with the median OHLC across sources,
|
||||||
|
mean volume, the contributing source count, and a divergence % computed
|
||||||
|
on the close range. div_pct gives a quick quality signal: 0 means full
|
||||||
|
agreement, > X% means at least one source is suspect.
|
||||||
|
"""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from collections import defaultdict
|
||||||
|
from statistics import median
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
|
||||||
|
def merge_candles(by_source: dict[str, list[dict[str, Any]]]) -> list[dict[str, Any]]:
|
||||||
|
grouped: dict[int, list[dict[str, Any]]] = defaultdict(list)
|
||||||
|
for candles in by_source.values():
|
||||||
|
for c in candles:
|
||||||
|
grouped[int(c["timestamp"])].append(c)
|
||||||
|
|
||||||
|
out: list[dict[str, Any]] = []
|
||||||
|
for ts in sorted(grouped):
|
||||||
|
rows = grouped[ts]
|
||||||
|
closes = [float(r["close"]) for r in rows]
|
||||||
|
med_close = float(median(closes))
|
||||||
|
div_pct = (max(closes) - min(closes)) / med_close if med_close else 0.0
|
||||||
|
out.append({
|
||||||
|
"timestamp": ts,
|
||||||
|
"open": float(median(float(r["open"]) for r in rows)),
|
||||||
|
"high": float(median(float(r["high"]) for r in rows)),
|
||||||
|
"low": float(median(float(r["low"]) for r in rows)),
|
||||||
|
"close": med_close,
|
||||||
|
"volume": sum(float(r["volume"]) for r in rows) / len(rows),
|
||||||
|
"sources": len(rows),
|
||||||
|
"div_pct": div_pct,
|
||||||
|
})
|
||||||
|
return out
|
||||||
@@ -0,0 +1,60 @@
|
|||||||
|
"""Routing table: canonical (asset_class, symbol, interval) → per-exchange native.
|
||||||
|
|
||||||
|
Crypto canonical symbols default to USD/USDT-quoted perpetuals on the most
|
||||||
|
liquid pair available. Equities currently route to Alpaca only — IBKR is
|
||||||
|
omitted from the cross MVP because its bars endpoint takes a relative
|
||||||
|
period instead of (start, end).
|
||||||
|
"""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
AssetClass = str
|
||||||
|
|
||||||
|
_CRYPTO_SYMBOLS: dict[str, dict[str, str]] = {
|
||||||
|
"BTC": {"bybit": "BTCUSDT", "hyperliquid": "BTC", "deribit": "BTC-PERPETUAL"},
|
||||||
|
"ETH": {"bybit": "ETHUSDT", "hyperliquid": "ETH", "deribit": "ETH-PERPETUAL"},
|
||||||
|
"SOL": {"bybit": "SOLUSDT", "hyperliquid": "SOL"},
|
||||||
|
}
|
||||||
|
|
||||||
|
_STOCK_SYMBOLS: dict[str, dict[str, str]] = {
|
||||||
|
"AAPL": {"alpaca": "AAPL"},
|
||||||
|
"SPY": {"alpaca": "SPY"},
|
||||||
|
"QQQ": {"alpaca": "QQQ"},
|
||||||
|
"TSLA": {"alpaca": "TSLA"},
|
||||||
|
"NVDA": {"alpaca": "NVDA"},
|
||||||
|
}
|
||||||
|
|
||||||
|
_SYMBOLS: dict[AssetClass, dict[str, dict[str, str]]] = {
|
||||||
|
"crypto": _CRYPTO_SYMBOLS,
|
||||||
|
"stocks": _STOCK_SYMBOLS,
|
||||||
|
}
|
||||||
|
|
||||||
|
_INTERVALS: dict[str, dict[str, str]] = {
|
||||||
|
"1m": {"bybit": "1", "hyperliquid": "1m", "deribit": "1m", "alpaca": "1m"},
|
||||||
|
"5m": {"bybit": "5", "hyperliquid": "5m", "deribit": "5m", "alpaca": "5m"},
|
||||||
|
"15m": {"bybit": "15", "hyperliquid": "15m", "deribit": "15m", "alpaca": "15m"},
|
||||||
|
"1h": {"bybit": "60", "hyperliquid": "1h", "deribit": "1h", "alpaca": "1h"},
|
||||||
|
"4h": {"bybit": "240", "hyperliquid": "4h", "deribit": "4h", "alpaca": "4h"},
|
||||||
|
"1d": {"bybit": "D", "hyperliquid": "1d", "deribit": "1d", "alpaca": "1d"},
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def get_sources(asset_class: AssetClass, symbol: str) -> list[str]:
|
||||||
|
table = _SYMBOLS.get(asset_class, {})
|
||||||
|
mapping = table.get(symbol.upper())
|
||||||
|
if mapping is None:
|
||||||
|
return []
|
||||||
|
return list(mapping.keys())
|
||||||
|
|
||||||
|
|
||||||
|
def to_native_symbol(
|
||||||
|
asset_class: AssetClass, symbol: str, exchange: str
|
||||||
|
) -> str:
|
||||||
|
return _SYMBOLS[asset_class][symbol.upper()][exchange]
|
||||||
|
|
||||||
|
|
||||||
|
def to_native_interval(interval: str, exchange: str) -> str:
|
||||||
|
return _INTERVALS[interval][exchange]
|
||||||
|
|
||||||
|
|
||||||
|
def supported_intervals() -> list[str]:
|
||||||
|
return list(_INTERVALS.keys())
|
||||||
@@ -0,0 +1,28 @@
|
|||||||
|
"""Pydantic schemas + thin tool wrappers for the /mcp-cross router."""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import Literal
|
||||||
|
|
||||||
|
from pydantic import BaseModel
|
||||||
|
|
||||||
|
from cerbero_mcp.exchanges.cross.client import CrossClient
|
||||||
|
|
||||||
|
AssetClass = Literal["crypto", "stocks"]
|
||||||
|
|
||||||
|
|
||||||
|
class GetHistoricalReq(BaseModel):
|
||||||
|
symbol: str
|
||||||
|
asset_class: AssetClass = "crypto"
|
||||||
|
interval: str = "1h"
|
||||||
|
start_date: str
|
||||||
|
end_date: str
|
||||||
|
|
||||||
|
|
||||||
|
async def get_historical(client: CrossClient, params: GetHistoricalReq) -> dict:
|
||||||
|
return await client.get_historical(
|
||||||
|
symbol=params.symbol,
|
||||||
|
asset_class=params.asset_class,
|
||||||
|
interval=params.interval,
|
||||||
|
start_date=params.start_date,
|
||||||
|
end_date=params.end_date,
|
||||||
|
)
|
||||||
@@ -0,0 +1,36 @@
|
|||||||
|
"""Router /mcp-cross/* — historical data with cross-exchange consensus."""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import Literal, cast
|
||||||
|
|
||||||
|
from fastapi import APIRouter, Depends, Request
|
||||||
|
|
||||||
|
from cerbero_mcp.client_registry import ClientRegistry
|
||||||
|
from cerbero_mcp.exchanges.cross import tools as t
|
||||||
|
from cerbero_mcp.exchanges.cross.client import CrossClient
|
||||||
|
|
||||||
|
Environment = Literal["testnet", "mainnet"]
|
||||||
|
|
||||||
|
|
||||||
|
def get_environment(request: Request) -> Environment:
|
||||||
|
return cast(Environment, request.state.environment)
|
||||||
|
|
||||||
|
|
||||||
|
def get_cross_client(
|
||||||
|
request: Request, env: Environment = Depends(get_environment),
|
||||||
|
) -> CrossClient:
|
||||||
|
registry: ClientRegistry = request.app.state.registry
|
||||||
|
return CrossClient(registry, env=env)
|
||||||
|
|
||||||
|
|
||||||
|
def make_router() -> APIRouter:
|
||||||
|
r = APIRouter(prefix="/mcp-cross", tags=["cross"])
|
||||||
|
|
||||||
|
@r.post("/tools/get_historical")
|
||||||
|
async def _get_historical(
|
||||||
|
params: t.GetHistoricalReq,
|
||||||
|
client: CrossClient = Depends(get_cross_client),
|
||||||
|
):
|
||||||
|
return await t.get_historical(client, params)
|
||||||
|
|
||||||
|
return r
|
||||||
@@ -0,0 +1,134 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from cerbero_mcp.exchanges.cross.client import CrossClient
|
||||||
|
from fastapi import HTTPException
|
||||||
|
|
||||||
|
|
||||||
|
class _Fake:
|
||||||
|
def __init__(self, candles: list[dict[str, Any]] | None = None,
|
||||||
|
*, raises: Exception | None = None):
|
||||||
|
self._candles = candles or []
|
||||||
|
self._raises = raises
|
||||||
|
self.calls: list[dict[str, Any]] = []
|
||||||
|
|
||||||
|
async def get_historical(self, **kwargs: Any) -> dict[str, Any]:
|
||||||
|
if self._raises:
|
||||||
|
raise self._raises
|
||||||
|
self.calls.append(kwargs)
|
||||||
|
return {"candles": list(self._candles)}
|
||||||
|
|
||||||
|
async def get_bars(self, **kwargs: Any) -> dict[str, Any]:
|
||||||
|
if self._raises:
|
||||||
|
raise self._raises
|
||||||
|
self.calls.append(kwargs)
|
||||||
|
return {"candles": list(self._candles)}
|
||||||
|
|
||||||
|
|
||||||
|
class _FakeRegistry:
|
||||||
|
def __init__(self, clients: dict[str, _Fake]):
|
||||||
|
self._clients = clients
|
||||||
|
|
||||||
|
async def get(self, exchange: str, env: str) -> _Fake:
|
||||||
|
if exchange not in self._clients:
|
||||||
|
raise KeyError(exchange)
|
||||||
|
return self._clients[exchange]
|
||||||
|
|
||||||
|
|
||||||
|
def _c(ts: int, close: float = 100.0) -> dict[str, Any]:
|
||||||
|
return {"timestamp": ts, "open": close, "high": close, "low": close,
|
||||||
|
"close": close, "volume": 1.0}
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_crypto_three_sources_aggregates():
|
||||||
|
fakes = {
|
||||||
|
"bybit": _Fake([_c(1, 100), _c(2, 200)]),
|
||||||
|
"hyperliquid": _Fake([_c(1, 100), _c(2, 200)]),
|
||||||
|
"deribit": _Fake([_c(1, 100), _c(2, 200)]),
|
||||||
|
}
|
||||||
|
cc = CrossClient(_FakeRegistry(fakes), env="mainnet")
|
||||||
|
out = await cc.get_historical(
|
||||||
|
symbol="BTC", asset_class="crypto", interval="1h",
|
||||||
|
start_date="2026-05-09T00:00:00", end_date="2026-05-10T00:00:00",
|
||||||
|
)
|
||||||
|
assert out["symbol"] == "BTC"
|
||||||
|
assert out["asset_class"] == "crypto"
|
||||||
|
assert len(out["candles"]) == 2
|
||||||
|
assert out["candles"][0]["sources"] == 3
|
||||||
|
assert out["candles"][0]["div_pct"] == 0.0
|
||||||
|
assert set(out["sources_used"]) == {"bybit", "hyperliquid", "deribit"}
|
||||||
|
assert out["failed_sources"] == []
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_crypto_partial_failure_returns_partial_with_warning():
|
||||||
|
fakes = {
|
||||||
|
"bybit": _Fake([_c(1, 100)]),
|
||||||
|
"hyperliquid": _Fake([_c(1, 100)]),
|
||||||
|
"deribit": _Fake(raises=RuntimeError("upstream down")),
|
||||||
|
}
|
||||||
|
cc = CrossClient(_FakeRegistry(fakes), env="mainnet")
|
||||||
|
out = await cc.get_historical(
|
||||||
|
symbol="BTC", asset_class="crypto", interval="1h",
|
||||||
|
start_date="2026-05-09T00:00:00", end_date="2026-05-10T00:00:00",
|
||||||
|
)
|
||||||
|
assert out["candles"][0]["sources"] == 2
|
||||||
|
assert set(out["sources_used"]) == {"bybit", "hyperliquid"}
|
||||||
|
assert len(out["failed_sources"]) == 1
|
||||||
|
assert out["failed_sources"][0]["exchange"] == "deribit"
|
||||||
|
assert "upstream down" in out["failed_sources"][0]["error"]
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_all_sources_fail_raises_502():
|
||||||
|
fakes = {
|
||||||
|
"bybit": _Fake(raises=RuntimeError("a")),
|
||||||
|
"hyperliquid": _Fake(raises=RuntimeError("b")),
|
||||||
|
"deribit": _Fake(raises=RuntimeError("c")),
|
||||||
|
}
|
||||||
|
cc = CrossClient(_FakeRegistry(fakes), env="mainnet")
|
||||||
|
with pytest.raises(HTTPException) as exc_info:
|
||||||
|
await cc.get_historical(
|
||||||
|
symbol="BTC", asset_class="crypto", interval="1h",
|
||||||
|
start_date="2026-05-09T00:00:00", end_date="2026-05-10T00:00:00",
|
||||||
|
)
|
||||||
|
assert exc_info.value.status_code == 502
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_unsupported_symbol_raises_400():
|
||||||
|
cc = CrossClient(_FakeRegistry({}), env="mainnet")
|
||||||
|
with pytest.raises(HTTPException) as exc_info:
|
||||||
|
await cc.get_historical(
|
||||||
|
symbol="NONEXISTENT", asset_class="crypto", interval="1h",
|
||||||
|
start_date="2026-05-09T00:00:00", end_date="2026-05-10T00:00:00",
|
||||||
|
)
|
||||||
|
assert exc_info.value.status_code == 400
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_stocks_routes_to_alpaca_only():
|
||||||
|
fake = _Fake([_c(1, 175.0)])
|
||||||
|
cc = CrossClient(_FakeRegistry({"alpaca": fake}), env="mainnet")
|
||||||
|
out = await cc.get_historical(
|
||||||
|
symbol="AAPL", asset_class="stocks", interval="1d",
|
||||||
|
start_date="2026-04-09T00:00:00", end_date="2026-04-10T00:00:00",
|
||||||
|
)
|
||||||
|
assert out["sources_used"] == ["alpaca"]
|
||||||
|
assert out["candles"][0]["close"] == 175.0
|
||||||
|
# Alpaca was called with native symbol
|
||||||
|
assert fake.calls[0]["symbol"] == "AAPL"
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_unsupported_interval_raises_400():
|
||||||
|
cc = CrossClient(_FakeRegistry({}), env="mainnet")
|
||||||
|
with pytest.raises(HTTPException) as exc_info:
|
||||||
|
await cc.get_historical(
|
||||||
|
symbol="BTC", asset_class="crypto", interval="3h",
|
||||||
|
start_date="2026-05-09T00:00:00", end_date="2026-05-10T00:00:00",
|
||||||
|
)
|
||||||
|
assert exc_info.value.status_code == 400
|
||||||
@@ -0,0 +1,90 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from cerbero_mcp.exchanges.cross.consensus import merge_candles
|
||||||
|
|
||||||
|
|
||||||
|
def _c(ts, o, h, l, c, v):
|
||||||
|
return {"timestamp": ts, "open": o, "high": h, "low": l, "close": c, "volume": v}
|
||||||
|
|
||||||
|
|
||||||
|
def test_empty_input():
|
||||||
|
assert merge_candles({}) == []
|
||||||
|
|
||||||
|
|
||||||
|
def test_single_source_passthrough():
|
||||||
|
out = merge_candles({"bybit": [_c(1, 100, 110, 90, 105, 5)]})
|
||||||
|
assert len(out) == 1
|
||||||
|
assert out[0]["timestamp"] == 1
|
||||||
|
assert out[0]["close"] == 105
|
||||||
|
assert out[0]["sources"] == 1
|
||||||
|
assert out[0]["div_pct"] == 0.0
|
||||||
|
|
||||||
|
|
||||||
|
def test_three_sources_identical_no_divergence():
|
||||||
|
src = {
|
||||||
|
"bybit": [_c(1, 100, 110, 90, 105, 5)],
|
||||||
|
"hyperliquid": [_c(1, 100, 110, 90, 105, 3)],
|
||||||
|
"deribit": [_c(1, 100, 110, 90, 105, 7)],
|
||||||
|
}
|
||||||
|
out = merge_candles(src)
|
||||||
|
assert len(out) == 1
|
||||||
|
assert out[0]["close"] == 105.0
|
||||||
|
assert out[0]["sources"] == 3
|
||||||
|
assert out[0]["div_pct"] == 0.0
|
||||||
|
# volume is mean across sources
|
||||||
|
assert abs(out[0]["volume"] - 5.0) < 1e-9
|
||||||
|
|
||||||
|
|
||||||
|
def test_three_sources_divergent_close():
|
||||||
|
src = {
|
||||||
|
"bybit": [_c(1, 100, 110, 90, 100, 1)],
|
||||||
|
"hyperliquid": [_c(1, 100, 110, 90, 110, 1)],
|
||||||
|
"deribit": [_c(1, 100, 110, 90, 105, 1)],
|
||||||
|
}
|
||||||
|
out = merge_candles(src)
|
||||||
|
# median of [100, 110, 105] = 105
|
||||||
|
assert out[0]["close"] == 105.0
|
||||||
|
# div_pct = (110 - 100) / 105 ≈ 0.0952
|
||||||
|
assert abs(out[0]["div_pct"] - 10 / 105) < 1e-6
|
||||||
|
assert out[0]["sources"] == 3
|
||||||
|
|
||||||
|
|
||||||
|
def test_misaligned_timestamps():
|
||||||
|
src = {
|
||||||
|
"bybit": [_c(1, 100, 110, 90, 105, 1), _c(2, 100, 110, 90, 105, 1)],
|
||||||
|
"hyperliquid": [_c(2, 100, 110, 90, 105, 1), _c(3, 100, 110, 90, 105, 1)],
|
||||||
|
}
|
||||||
|
out = merge_candles(src)
|
||||||
|
timestamps = [c["timestamp"] for c in out]
|
||||||
|
sources_by_ts = {c["timestamp"]: c["sources"] for c in out}
|
||||||
|
assert timestamps == [1, 2, 3]
|
||||||
|
assert sources_by_ts == {1: 1, 2: 2, 3: 1}
|
||||||
|
|
||||||
|
|
||||||
|
def test_two_sources_even_median():
|
||||||
|
src = {
|
||||||
|
"bybit": [_c(1, 100, 110, 90, 100, 1)],
|
||||||
|
"hyperliquid": [_c(1, 100, 110, 90, 110, 1)],
|
||||||
|
}
|
||||||
|
out = merge_candles(src)
|
||||||
|
# even median = mean of two = 105
|
||||||
|
assert out[0]["close"] == 105.0
|
||||||
|
|
||||||
|
|
||||||
|
def test_empty_source_ignored():
|
||||||
|
src = {
|
||||||
|
"bybit": [_c(1, 100, 110, 90, 105, 1)],
|
||||||
|
"hyperliquid": [],
|
||||||
|
}
|
||||||
|
out = merge_candles(src)
|
||||||
|
assert len(out) == 1
|
||||||
|
assert out[0]["sources"] == 1
|
||||||
|
|
||||||
|
|
||||||
|
def test_output_sorted_by_timestamp():
|
||||||
|
src = {
|
||||||
|
"bybit": [_c(3, 100, 110, 90, 105, 1), _c(1, 100, 110, 90, 105, 1),
|
||||||
|
_c(2, 100, 110, 90, 105, 1)],
|
||||||
|
}
|
||||||
|
out = merge_candles(src)
|
||||||
|
assert [c["timestamp"] for c in out] == [1, 2, 3]
|
||||||
@@ -0,0 +1,47 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
from cerbero_mcp.exchanges.cross.symbol_map import (
|
||||||
|
get_sources,
|
||||||
|
to_native_interval,
|
||||||
|
to_native_symbol,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def test_btc_crypto_sources():
|
||||||
|
assert set(get_sources("crypto", "BTC")) == {"bybit", "hyperliquid", "deribit"}
|
||||||
|
|
||||||
|
|
||||||
|
def test_eth_crypto_sources():
|
||||||
|
assert set(get_sources("crypto", "ETH")) == {"bybit", "hyperliquid", "deribit"}
|
||||||
|
|
||||||
|
|
||||||
|
def test_unknown_crypto_symbol_returns_empty():
|
||||||
|
assert get_sources("crypto", "DOGEFAKE") == []
|
||||||
|
|
||||||
|
|
||||||
|
def test_stocks_aapl_sources():
|
||||||
|
assert set(get_sources("stocks", "AAPL")) == {"alpaca"}
|
||||||
|
|
||||||
|
|
||||||
|
def test_native_symbol_btc():
|
||||||
|
assert to_native_symbol("crypto", "BTC", "bybit") == "BTCUSDT"
|
||||||
|
assert to_native_symbol("crypto", "BTC", "hyperliquid") == "BTC"
|
||||||
|
assert to_native_symbol("crypto", "BTC", "deribit") == "BTC-PERPETUAL"
|
||||||
|
|
||||||
|
|
||||||
|
def test_native_symbol_unsupported_pair_raises():
|
||||||
|
with pytest.raises(KeyError):
|
||||||
|
to_native_symbol("crypto", "BTC", "alpaca")
|
||||||
|
|
||||||
|
|
||||||
|
def test_native_interval_1h():
|
||||||
|
assert to_native_interval("1h", "bybit") == "60"
|
||||||
|
assert to_native_interval("1h", "hyperliquid") == "1h"
|
||||||
|
assert to_native_interval("1h", "deribit") == "1h"
|
||||||
|
assert to_native_interval("1h", "alpaca") == "1h"
|
||||||
|
|
||||||
|
|
||||||
|
def test_native_interval_unknown_canonical_raises():
|
||||||
|
with pytest.raises(KeyError):
|
||||||
|
to_native_interval("3h", "bybit")
|
||||||
Reference in New Issue
Block a user