feat: import 6 MCP services + common workspace
This commit is contained in:
@@ -0,0 +1,23 @@
|
||||
[project]
|
||||
name = "option-mcp-common"
|
||||
version = "0.1.0"
|
||||
requires-python = ">=3.11"
|
||||
dependencies = [
|
||||
"fastapi>=0.115",
|
||||
"uvicorn[standard]>=0.30",
|
||||
"mcp>=1.0",
|
||||
"httpx>=0.27",
|
||||
"pydantic>=2.6",
|
||||
"pydantic-settings>=2.3",
|
||||
"python-json-logger>=2.0",
|
||||
]
|
||||
|
||||
[project.optional-dependencies]
|
||||
dev = ["pytest>=8", "pytest-asyncio>=0.23", "pytest-httpx>=0.30", "ruff>=0.5"]
|
||||
|
||||
[build-system]
|
||||
requires = ["hatchling"]
|
||||
build-backend = "hatchling.build"
|
||||
|
||||
[tool.hatch.build.targets.wheel]
|
||||
packages = ["src/option_mcp_common"]
|
||||
@@ -0,0 +1,19 @@
|
||||
from option_mcp_common.models import (
|
||||
Event,
|
||||
EventPriority,
|
||||
EventType,
|
||||
L1State,
|
||||
L2Entry,
|
||||
L3Entry,
|
||||
UserInstruction,
|
||||
)
|
||||
|
||||
__all__ = [
|
||||
"L1State",
|
||||
"L2Entry",
|
||||
"L3Entry",
|
||||
"Event",
|
||||
"EventPriority",
|
||||
"EventType",
|
||||
"UserInstruction",
|
||||
]
|
||||
@@ -0,0 +1,98 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from collections.abc import Callable
|
||||
from dataclasses import dataclass, field
|
||||
from functools import wraps
|
||||
|
||||
from fastapi import HTTPException, Request, status
|
||||
|
||||
|
||||
@dataclass
|
||||
class Principal:
|
||||
name: str
|
||||
capabilities: set[str] = field(default_factory=set)
|
||||
|
||||
|
||||
@dataclass
|
||||
class TokenStore:
|
||||
tokens: dict[str, Principal]
|
||||
|
||||
def get(self, token: str) -> Principal | None:
|
||||
return self.tokens.get(token)
|
||||
|
||||
|
||||
def require_principal(request: Request) -> Principal:
|
||||
auth = request.headers.get("Authorization", "")
|
||||
if not auth.startswith("Bearer "):
|
||||
raise HTTPException(status.HTTP_401_UNAUTHORIZED, "missing bearer token")
|
||||
token = auth[len("Bearer "):].strip()
|
||||
store: TokenStore = request.app.state.token_store
|
||||
principal = store.get(token)
|
||||
if principal is None:
|
||||
raise HTTPException(status.HTTP_403_FORBIDDEN, "invalid token")
|
||||
return principal
|
||||
|
||||
|
||||
def acl_requires(*, core: bool = False, observer: bool = False) -> Callable:
|
||||
"""Decorator: require at least one matching capability."""
|
||||
allowed: set[str] = set()
|
||||
if core:
|
||||
allowed.add("core")
|
||||
if observer:
|
||||
allowed.add("observer")
|
||||
|
||||
def decorator(func: Callable) -> Callable:
|
||||
@wraps(func)
|
||||
async def async_wrapper(*args, **kwargs):
|
||||
principal = kwargs.get("principal")
|
||||
if principal is None:
|
||||
for a in args:
|
||||
if isinstance(a, Principal):
|
||||
principal = a
|
||||
break
|
||||
if principal is None or not (principal.capabilities & allowed):
|
||||
raise HTTPException(
|
||||
status.HTTP_403_FORBIDDEN,
|
||||
f"capability required: {allowed}",
|
||||
)
|
||||
return await func(*args, **kwargs) if _is_coro(func) else func(*args, **kwargs)
|
||||
|
||||
@wraps(func)
|
||||
def sync_wrapper(*args, **kwargs):
|
||||
principal = kwargs.get("principal")
|
||||
if principal is None:
|
||||
for a in args:
|
||||
if isinstance(a, Principal):
|
||||
principal = a
|
||||
break
|
||||
if principal is None or not (principal.capabilities & allowed):
|
||||
raise HTTPException(
|
||||
status.HTTP_403_FORBIDDEN,
|
||||
f"capability required: {allowed}",
|
||||
)
|
||||
return func(*args, **kwargs)
|
||||
|
||||
return async_wrapper if _is_coro(func) else sync_wrapper
|
||||
|
||||
return decorator
|
||||
|
||||
|
||||
def _is_coro(func: Callable) -> bool:
|
||||
import asyncio
|
||||
return asyncio.iscoroutinefunction(func)
|
||||
|
||||
|
||||
def load_token_store_from_files(
|
||||
core_token_file: str | None,
|
||||
observer_token_file: str | None,
|
||||
) -> TokenStore:
|
||||
tokens: dict[str, Principal] = {}
|
||||
if core_token_file:
|
||||
with open(core_token_file) as f:
|
||||
tokens[f.read().strip()] = Principal(name="core", capabilities={"core"})
|
||||
if observer_token_file:
|
||||
with open(observer_token_file) as f:
|
||||
tokens[f.read().strip()] = Principal(
|
||||
name="observer", capabilities={"observer"}
|
||||
)
|
||||
return TokenStore(tokens=tokens)
|
||||
@@ -0,0 +1,80 @@
|
||||
"""CER-P5-010: env validation policy — fail-fast per mandatory, soft per optional.
|
||||
|
||||
Usage al boot di ogni mcp `__main__.py`:
|
||||
|
||||
from option_mcp_common.env_validation import require_env, optional_env, summarize
|
||||
|
||||
creds_file = require_env("CREDENTIALS_FILE", "deribit credentials JSON path")
|
||||
host = optional_env("HOST", default="0.0.0.0")
|
||||
summarize(["CREDENTIALS_FILE", "HOST", "PORT"])
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class MissingEnvError(RuntimeError):
|
||||
"""Mandatory env var absent or empty."""
|
||||
|
||||
|
||||
def require_env(name: str, description: str = "") -> str:
|
||||
"""Fail-fast: raise MissingEnvError se name non presente o vuoto.
|
||||
|
||||
Uscita dal processo con codice 2 se chiamato dal main(). Comporta
|
||||
logging chiaro del missing var prima dell'exit.
|
||||
"""
|
||||
val = (os.environ.get(name) or "").strip()
|
||||
if not val:
|
||||
msg = f"missing mandatory env var: {name}"
|
||||
if description:
|
||||
msg += f" ({description})"
|
||||
logger.error(msg)
|
||||
raise MissingEnvError(msg)
|
||||
return val
|
||||
|
||||
|
||||
def optional_env(name: str, *, default: str = "") -> str:
|
||||
"""Soft: ritorna env o default. Log INFO se default usato."""
|
||||
val = (os.environ.get(name) or "").strip()
|
||||
if not val:
|
||||
if default:
|
||||
logger.info("env %s not set, using default=%r", name, default)
|
||||
return default
|
||||
return val
|
||||
|
||||
|
||||
def summarize(names: list[str]) -> None:
|
||||
"""Log INFO di tutti gli env rilevanti con presenza (mask se SECRET/KEY/TOKEN)."""
|
||||
sensitive_tokens = ("SECRET", "KEY", "TOKEN", "PASSWORD", "CREDENTIAL", "WALLET")
|
||||
for n in names:
|
||||
val = os.environ.get(n)
|
||||
if val is None:
|
||||
logger.info("env[%s]: <unset>", n)
|
||||
continue
|
||||
if any(t in n.upper() for t in sensitive_tokens):
|
||||
logger.info("env[%s]: <set, %d chars>", n, len(val))
|
||||
else:
|
||||
logger.info("env[%s]: %s", n, val)
|
||||
|
||||
|
||||
def fail_fast_if_missing(names: list[str]) -> None:
|
||||
"""Verifica lista di nomi mandatory al boot. Exit 2 se uno solo manca.
|
||||
|
||||
Uso preferito: early call in main() per bloccare boot se config incompleta.
|
||||
"""
|
||||
missing: list[str] = []
|
||||
for n in names:
|
||||
if not (os.environ.get(n) or "").strip():
|
||||
missing.append(n)
|
||||
if missing:
|
||||
logger.error("boot aborted: missing mandatory env vars: %s", missing)
|
||||
print(
|
||||
f"FATAL: missing mandatory env vars: {missing}",
|
||||
file=sys.stderr,
|
||||
)
|
||||
sys.exit(2)
|
||||
@@ -0,0 +1,139 @@
|
||||
from __future__ import annotations
|
||||
|
||||
|
||||
def sma(values: list[float], period: int) -> float | None:
|
||||
if len(values) < period:
|
||||
return None
|
||||
return sum(values[-period:]) / period
|
||||
|
||||
|
||||
def rsi(closes: list[float], period: int = 14) -> float | None:
|
||||
if len(closes) < period + 1:
|
||||
return None
|
||||
gains: list[float] = []
|
||||
losses: list[float] = []
|
||||
for i in range(1, len(closes)):
|
||||
delta = closes[i] - closes[i - 1]
|
||||
gains.append(max(delta, 0.0))
|
||||
losses.append(-min(delta, 0.0))
|
||||
avg_gain = sum(gains[:period]) / period
|
||||
avg_loss = sum(losses[:period]) / period
|
||||
for i in range(period, len(gains)):
|
||||
avg_gain = (avg_gain * (period - 1) + gains[i]) / period
|
||||
avg_loss = (avg_loss * (period - 1) + losses[i]) / period
|
||||
if avg_loss == 0:
|
||||
return 100.0
|
||||
rs = avg_gain / avg_loss
|
||||
return 100.0 - (100.0 / (1.0 + rs))
|
||||
|
||||
|
||||
def _ema_series(values: list[float], period: int) -> list[float]:
|
||||
if len(values) < period:
|
||||
return []
|
||||
k = 2.0 / (period + 1)
|
||||
seed = sum(values[:period]) / period
|
||||
out = [seed]
|
||||
for v in values[period:]:
|
||||
out.append(out[-1] + k * (v - out[-1]))
|
||||
return out
|
||||
|
||||
|
||||
def macd(
|
||||
closes: list[float],
|
||||
fast: int = 12,
|
||||
slow: int = 26,
|
||||
signal: int = 9,
|
||||
) -> dict[str, float | None]:
|
||||
nothing: dict[str, float | None] = {"macd": None, "signal": None, "hist": None}
|
||||
if len(closes) < slow + signal:
|
||||
return nothing
|
||||
ema_fast = _ema_series(closes, fast)
|
||||
ema_slow = _ema_series(closes, slow)
|
||||
offset = slow - fast
|
||||
aligned_fast = ema_fast[offset:]
|
||||
macd_line = [f - s for f, s in zip(aligned_fast, ema_slow, strict=False)]
|
||||
if len(macd_line) < signal:
|
||||
return nothing
|
||||
signal_line = _ema_series(macd_line, signal)
|
||||
if not signal_line:
|
||||
return nothing
|
||||
last_macd = macd_line[-1]
|
||||
last_sig = signal_line[-1]
|
||||
return {
|
||||
"macd": last_macd,
|
||||
"signal": last_sig,
|
||||
"hist": last_macd - last_sig,
|
||||
}
|
||||
|
||||
|
||||
def atr(
|
||||
highs: list[float],
|
||||
lows: list[float],
|
||||
closes: list[float],
|
||||
period: int = 14,
|
||||
) -> float | None:
|
||||
if len(closes) < period + 1:
|
||||
return None
|
||||
trs: list[float] = []
|
||||
for i in range(1, len(closes)):
|
||||
tr = max(
|
||||
highs[i] - lows[i],
|
||||
abs(highs[i] - closes[i - 1]),
|
||||
abs(lows[i] - closes[i - 1]),
|
||||
)
|
||||
trs.append(tr)
|
||||
if len(trs) < period:
|
||||
return None
|
||||
avg = sum(trs[:period]) / period
|
||||
for i in range(period, len(trs)):
|
||||
avg = (avg * (period - 1) + trs[i]) / period
|
||||
return avg
|
||||
|
||||
|
||||
def adx(
|
||||
highs: list[float],
|
||||
lows: list[float],
|
||||
closes: list[float],
|
||||
period: int = 14,
|
||||
) -> dict[str, float | None]:
|
||||
nothing: dict[str, float | None] = {"adx": None, "+di": None, "-di": None}
|
||||
if len(closes) < 2 * period + 1:
|
||||
return nothing
|
||||
trs: list[float] = []
|
||||
plus_dms: list[float] = []
|
||||
minus_dms: list[float] = []
|
||||
for i in range(1, len(closes)):
|
||||
tr = max(
|
||||
highs[i] - lows[i],
|
||||
abs(highs[i] - closes[i - 1]),
|
||||
abs(lows[i] - closes[i - 1]),
|
||||
)
|
||||
up = highs[i] - highs[i - 1]
|
||||
dn = lows[i - 1] - lows[i]
|
||||
plus_dm = up if (up > dn and up > 0) else 0.0
|
||||
minus_dm = dn if (dn > up and dn > 0) else 0.0
|
||||
trs.append(tr)
|
||||
plus_dms.append(plus_dm)
|
||||
minus_dms.append(minus_dm)
|
||||
|
||||
atr_s = sum(trs[:period])
|
||||
pdm_s = sum(plus_dms[:period])
|
||||
mdm_s = sum(minus_dms[:period])
|
||||
dxs: list[float] = []
|
||||
pdi = mdi = 0.0
|
||||
for i in range(period, len(trs)):
|
||||
atr_s = atr_s - atr_s / period + trs[i]
|
||||
pdm_s = pdm_s - pdm_s / period + plus_dms[i]
|
||||
mdm_s = mdm_s - mdm_s / period + minus_dms[i]
|
||||
pdi = 100.0 * pdm_s / atr_s if atr_s else 0.0
|
||||
mdi = 100.0 * mdm_s / atr_s if atr_s else 0.0
|
||||
s = pdi + mdi
|
||||
dx = 100.0 * abs(pdi - mdi) / s if s else 0.0
|
||||
dxs.append(dx)
|
||||
|
||||
if len(dxs) < period:
|
||||
return nothing
|
||||
adx_val = sum(dxs[:period]) / period
|
||||
for i in range(period, len(dxs)):
|
||||
adx_val = (adx_val * (period - 1) + dxs[i]) / period
|
||||
return {"adx": adx_val, "+di": pdi, "-di": mdi}
|
||||
@@ -0,0 +1,81 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
|
||||
# pythonjsonlogger rinominato in .json; keep fallback per compat
|
||||
try:
|
||||
from pythonjsonlogger.json import JsonFormatter as _JsonFormatter # noqa: N813
|
||||
except ImportError:
|
||||
from pythonjsonlogger.jsonlogger import JsonFormatter as _JsonFormatter # noqa: N813
|
||||
|
||||
SECRET_PATTERNS = [
|
||||
(re.compile(r"Bearer\s+[\w\-\._]+", re.IGNORECASE), "Bearer ***"),
|
||||
(re.compile(r'("api_key"\s*:\s*")[^"]+(")'), r'\1***\2'),
|
||||
(re.compile(r'("password"\s*:\s*")[^"]+(")'), r'\1***\2'),
|
||||
(re.compile(r'("private_key"\s*:\s*")[^"]+(")'), r'\1***\2'),
|
||||
(re.compile(r'("client_secret"\s*:\s*")[^"]+(")'), r'\1***\2'),
|
||||
(re.compile(r"sk-[\w]{20,}"), "sk-***"),
|
||||
]
|
||||
|
||||
|
||||
class SecretsFilter(logging.Filter):
|
||||
def filter(self, record: logging.LogRecord) -> bool:
|
||||
msg = record.getMessage()
|
||||
for pattern, replacement in SECRET_PATTERNS:
|
||||
msg = pattern.sub(replacement, msg)
|
||||
record.msg = msg
|
||||
record.args = () # already formatted into msg
|
||||
return True
|
||||
|
||||
|
||||
def get_json_logger(name: str, level: int = logging.INFO) -> logging.Logger:
|
||||
logger = logging.getLogger(name)
|
||||
if logger.handlers:
|
||||
return logger # already configured
|
||||
logger.setLevel(level)
|
||||
handler = logging.StreamHandler(sys.stderr)
|
||||
formatter = _JsonFormatter("%(asctime)s %(name)s %(levelname)s %(message)s")
|
||||
handler.setFormatter(formatter)
|
||||
handler.addFilter(SecretsFilter())
|
||||
logger.addHandler(handler)
|
||||
logger.propagate = False
|
||||
return logger
|
||||
|
||||
|
||||
def configure_root_logging(
|
||||
*,
|
||||
level: str | int | None = None,
|
||||
format_type: str | None = None,
|
||||
) -> None:
|
||||
"""CER-P5-009: configura il root logger con JSON o text formatter.
|
||||
|
||||
Env overrides:
|
||||
- LOG_LEVEL (default INFO)
|
||||
- LOG_FORMAT=json|text (default json — production-ready structured log)
|
||||
|
||||
Applica SecretsFilter su entrambi i format.
|
||||
"""
|
||||
lvl_raw = level if level is not None else os.environ.get("LOG_LEVEL", "INFO")
|
||||
lvl = logging.getLevelName(lvl_raw.upper()) if isinstance(lvl_raw, str) else lvl_raw
|
||||
fmt = (format_type or os.environ.get("LOG_FORMAT") or "json").lower()
|
||||
|
||||
root = logging.getLogger()
|
||||
# Rimuovi handler esistenti (basicConfig li avrebbe lasciati duplicati)
|
||||
for h in list(root.handlers):
|
||||
root.removeHandler(h)
|
||||
|
||||
handler = logging.StreamHandler(sys.stderr)
|
||||
if fmt == "json":
|
||||
handler.setFormatter(
|
||||
_JsonFormatter("%(asctime)s %(name)s %(levelname)s %(message)s")
|
||||
)
|
||||
else:
|
||||
handler.setFormatter(
|
||||
logging.Formatter("%(asctime)s %(levelname)s %(name)s %(message)s")
|
||||
)
|
||||
handler.addFilter(SecretsFilter())
|
||||
root.addHandler(handler)
|
||||
root.setLevel(lvl)
|
||||
@@ -0,0 +1,239 @@
|
||||
"""Bridge MCP → endpoint REST esistenti.
|
||||
|
||||
Implementa manualmente JSON-RPC 2.0 MCP su `POST /mcp` (no SSE, risposta
|
||||
diretta in body JSON). Supporta:
|
||||
- initialize
|
||||
- notifications/initialized
|
||||
- tools/list
|
||||
- tools/call
|
||||
|
||||
Claude Code config esempio:
|
||||
|
||||
{
|
||||
"mcpServers": {
|
||||
"cerbero-memory": {
|
||||
"type": "http",
|
||||
"url": "http://localhost:8080/mcp-memory/mcp",
|
||||
"headers": {"Authorization": "Bearer <observer-token>"}
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import Any
|
||||
|
||||
import httpx
|
||||
from fastapi import FastAPI, Request
|
||||
from fastapi.responses import JSONResponse
|
||||
|
||||
from option_mcp_common.auth import TokenStore
|
||||
|
||||
MCP_PROTOCOL_VERSION = "2024-11-05"
|
||||
|
||||
|
||||
def _derive_input_schemas(app: FastAPI, tool_names: list[str]) -> dict[str, dict]:
|
||||
"""Estrae JSON schema del body Pydantic per ogni route POST /tools/<name>.
|
||||
|
||||
Risolve annotazioni lazy (PEP 563) via `typing.get_type_hints`.
|
||||
Ritorna mapping {tool_name: json_schema}. Route senza body Pydantic o non
|
||||
risolvibili vengono saltate: il chiamante userà un fallback.
|
||||
"""
|
||||
import typing
|
||||
from pydantic import BaseModel
|
||||
|
||||
names_set = set(tool_names)
|
||||
out: dict[str, dict] = {}
|
||||
for route in app.routes:
|
||||
path = getattr(route, "path", "")
|
||||
if not path.startswith("/tools/"):
|
||||
continue
|
||||
name = path[len("/tools/"):]
|
||||
if name not in names_set:
|
||||
continue
|
||||
endpoint = getattr(route, "endpoint", None)
|
||||
if endpoint is None:
|
||||
continue
|
||||
try:
|
||||
hints = typing.get_type_hints(endpoint)
|
||||
except Exception:
|
||||
continue
|
||||
for pname, ann in hints.items():
|
||||
if pname == "return":
|
||||
continue
|
||||
if isinstance(ann, type) and issubclass(ann, BaseModel):
|
||||
try:
|
||||
out[name] = ann.model_json_schema()
|
||||
except Exception:
|
||||
pass
|
||||
break
|
||||
return out
|
||||
|
||||
|
||||
def _make_proxy_handler(internal_base_url: str, tool_name: str, token: str):
|
||||
async def handler(args: dict | None) -> Any:
|
||||
async with httpx.AsyncClient(timeout=30.0) as c:
|
||||
r = await c.post(
|
||||
f"{internal_base_url}/tools/{tool_name}",
|
||||
headers={"Authorization": f"Bearer {token}"} if token else {},
|
||||
json=args or {},
|
||||
)
|
||||
if r.status_code >= 400:
|
||||
raise RuntimeError(
|
||||
f"tool {tool_name} failed: HTTP {r.status_code} — {r.text[:500]}"
|
||||
)
|
||||
try:
|
||||
return r.json()
|
||||
except Exception:
|
||||
return {"raw": r.text}
|
||||
|
||||
return handler
|
||||
|
||||
|
||||
def mount_mcp_endpoint(
|
||||
app: FastAPI,
|
||||
*,
|
||||
name: str,
|
||||
version: str,
|
||||
token_store: TokenStore,
|
||||
internal_base_url: str,
|
||||
tools: list[dict],
|
||||
) -> None:
|
||||
"""Registra un endpoint MCP JSON-RPC 2.0 su POST /mcp.
|
||||
|
||||
Ogni tool è proxato verso POST {internal_base_url}/tools/<name> con il
|
||||
Bearer token del client MCP (preservando le ACL REST esistenti).
|
||||
|
||||
Args:
|
||||
app: istanza FastAPI del service
|
||||
name: nome server MCP
|
||||
version: versione del service
|
||||
token_store: lo stesso usato dai tool REST
|
||||
internal_base_url: URL base interno (es. "http://localhost:9015")
|
||||
tools: lista di {"name": str, "description": str, "input_schema"?: dict}
|
||||
"""
|
||||
tools_by_name = {t["name"]: t for t in tools}
|
||||
|
||||
# Auto-derive input schemas from FastAPI routes (Pydantic body models).
|
||||
# Permette al LLM di conoscere i nomi dei parametri obbligatori invece di
|
||||
# indovinarli. Se il tool ha `input_schema` esplicito, vince sull'auto-derive.
|
||||
derived_schemas = _derive_input_schemas(app, [t["name"] for t in tools])
|
||||
|
||||
def _tool_defs() -> list[dict]:
|
||||
defs = []
|
||||
for t in tools:
|
||||
schema = t.get("input_schema") or derived_schemas.get(t["name"]) or {
|
||||
"type": "object",
|
||||
"additionalProperties": True,
|
||||
}
|
||||
defs.append({
|
||||
"name": t["name"],
|
||||
"description": t.get("description", t["name"]),
|
||||
"inputSchema": schema,
|
||||
})
|
||||
return defs
|
||||
|
||||
async def _handle_rpc(body: dict, token: str) -> dict | None:
|
||||
rpc_id = body.get("id")
|
||||
method = body.get("method")
|
||||
params = body.get("params") or {}
|
||||
|
||||
# Notification (no id) → no response
|
||||
if method == "notifications/initialized":
|
||||
return None
|
||||
|
||||
if method == "initialize":
|
||||
return {
|
||||
"jsonrpc": "2.0",
|
||||
"id": rpc_id,
|
||||
"result": {
|
||||
"protocolVersion": MCP_PROTOCOL_VERSION,
|
||||
"capabilities": {"tools": {"listChanged": False}},
|
||||
"serverInfo": {"name": name, "version": version},
|
||||
},
|
||||
}
|
||||
|
||||
if method == "tools/list":
|
||||
return {
|
||||
"jsonrpc": "2.0",
|
||||
"id": rpc_id,
|
||||
"result": {"tools": _tool_defs()},
|
||||
}
|
||||
|
||||
if method == "tools/call":
|
||||
tool_name = params.get("name", "")
|
||||
args = params.get("arguments") or {}
|
||||
if tool_name not in tools_by_name:
|
||||
return {
|
||||
"jsonrpc": "2.0",
|
||||
"id": rpc_id,
|
||||
"error": {"code": -32601, "message": f"tool non trovato: {tool_name}"},
|
||||
}
|
||||
handler = _make_proxy_handler(internal_base_url, tool_name, token)
|
||||
try:
|
||||
result = await handler(args)
|
||||
return {
|
||||
"jsonrpc": "2.0",
|
||||
"id": rpc_id,
|
||||
"result": {
|
||||
"content": [
|
||||
{
|
||||
"type": "text",
|
||||
"text": _to_text(result),
|
||||
}
|
||||
],
|
||||
"isError": False,
|
||||
},
|
||||
}
|
||||
except Exception as e:
|
||||
return {
|
||||
"jsonrpc": "2.0",
|
||||
"id": rpc_id,
|
||||
"result": {
|
||||
"content": [{"type": "text", "text": str(e)}],
|
||||
"isError": True,
|
||||
},
|
||||
}
|
||||
|
||||
return {
|
||||
"jsonrpc": "2.0",
|
||||
"id": rpc_id,
|
||||
"error": {"code": -32601, "message": f"metodo non supportato: {method}"},
|
||||
}
|
||||
|
||||
@app.post("/mcp")
|
||||
async def mcp_entry(request: Request):
|
||||
auth = request.headers.get("Authorization", "")
|
||||
if not auth.startswith("Bearer "):
|
||||
return JSONResponse({"error": "missing bearer token"}, status_code=401)
|
||||
token = auth[len("Bearer "):].strip()
|
||||
principal = token_store.get(token)
|
||||
if principal is None:
|
||||
return JSONResponse({"error": "invalid token"}, status_code=403)
|
||||
|
||||
body = await request.json()
|
||||
|
||||
# Batch support
|
||||
if isinstance(body, list):
|
||||
results = []
|
||||
for item in body:
|
||||
resp = await _handle_rpc(item, token)
|
||||
if resp is not None:
|
||||
results.append(resp)
|
||||
return JSONResponse(results)
|
||||
|
||||
resp = await _handle_rpc(body, token)
|
||||
if resp is None:
|
||||
# Notification (no id) → 204 no content
|
||||
return JSONResponse(None, status_code=204)
|
||||
return JSONResponse(resp)
|
||||
|
||||
|
||||
def _to_text(value: Any) -> str:
|
||||
import json
|
||||
if isinstance(value, str):
|
||||
return value
|
||||
try:
|
||||
return json.dumps(value, ensure_ascii=False, indent=2)
|
||||
except Exception:
|
||||
return str(value)
|
||||
@@ -0,0 +1,98 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from enum import StrEnum
|
||||
from functools import total_ordering
|
||||
from typing import Any
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
|
||||
@total_ordering
|
||||
class EventPriority(StrEnum):
|
||||
LOW = "low"
|
||||
NORMAL = "normal"
|
||||
HIGH = "high"
|
||||
CRITICAL = "critical"
|
||||
|
||||
def _rank(self) -> int:
|
||||
return ["low", "normal", "high", "critical"].index(self.value)
|
||||
|
||||
def __lt__(self, other: EventPriority) -> bool:
|
||||
return self._rank() < other._rank()
|
||||
|
||||
|
||||
class EventType(StrEnum):
|
||||
ALERT = "alert"
|
||||
USER_INSTRUCTION = "user_instruction"
|
||||
SYSTEM = "system"
|
||||
|
||||
|
||||
class L1State(BaseModel):
|
||||
"""Singleton row with current operational state."""
|
||||
|
||||
updated_at: str
|
||||
equity_total: float | None = None
|
||||
equity_by_exchange: dict[str, float] = Field(default_factory=dict)
|
||||
bias: str | None = None
|
||||
pnl_day: float | None = None
|
||||
pnl_total: float | None = None
|
||||
capital: float | None = None
|
||||
open_positions_count: int = 0
|
||||
greeks_aggregate: dict[str, float] = Field(default_factory=dict)
|
||||
notes: str | None = None
|
||||
|
||||
|
||||
class L2Entry(BaseModel):
|
||||
"""Reasoning entry — schema matches system_prompt v2.
|
||||
|
||||
`authored_by_model`: identifica l'LLM che ha generato la entry (es.
|
||||
"google/gemini-3-flash-preview" per core, "anthropic/claude-haiku-4-5"
|
||||
per worker). None se scritto da operatore umano via UI.
|
||||
"""
|
||||
|
||||
timestamp: str
|
||||
setup: str
|
||||
tesi: str | None = None
|
||||
tesi_check: str | None = None
|
||||
invalidation: str | None = None
|
||||
esito: str
|
||||
scostamento: str | None = None
|
||||
scostamento_sigma: float | None = None
|
||||
lezione: str | None = None
|
||||
sizing_note: str | None = None
|
||||
run_id: str | None = None
|
||||
user_instruction_id: int | None = None
|
||||
authored_by_model: str | None = None
|
||||
|
||||
|
||||
class L3Entry(BaseModel):
|
||||
"""Compacted pattern from L2 batch."""
|
||||
|
||||
created_at: str
|
||||
category: str # "pattern_errore" | "pattern_vincente" | "correlazione"
|
||||
summary: str
|
||||
source_l2_ids: list[int]
|
||||
|
||||
|
||||
class Event(BaseModel):
|
||||
id: int | None = None
|
||||
created_at: str
|
||||
expires_at: str
|
||||
type: EventType
|
||||
source: str
|
||||
priority: EventPriority
|
||||
payload: dict[str, Any]
|
||||
acked_at: str | None = None
|
||||
ack_outcome: str | None = None
|
||||
ack_notes: str | None = None
|
||||
|
||||
|
||||
class UserInstruction(BaseModel):
|
||||
id: int | None = None
|
||||
created_at: str
|
||||
text: str
|
||||
priority: EventPriority
|
||||
require_ack: bool = True
|
||||
source: str = "observer"
|
||||
acked_at: str | None = None
|
||||
ack_outcome: str | None = None
|
||||
@@ -0,0 +1,92 @@
|
||||
"""CER-016 hard guard server-side su place_order.
|
||||
|
||||
Caps configurabili via env (default safety-first, mirati a ~200 EUR single,
|
||||
1000 EUR aggregato, 3x max leverage).
|
||||
|
||||
Thresholds sono numerici semplici — l'operatore stabilisce unità (EUR/USD)
|
||||
via env; il server compara su un unico campo `notional` in valore monetario.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
|
||||
from fastapi import HTTPException
|
||||
|
||||
|
||||
def _env_float(name: str, default: float) -> float:
|
||||
raw = os.environ.get(name)
|
||||
if not raw:
|
||||
return default
|
||||
try:
|
||||
return float(raw)
|
||||
except (TypeError, ValueError):
|
||||
return default
|
||||
|
||||
|
||||
def _env_int(name: str, default: int) -> int:
|
||||
raw = os.environ.get(name)
|
||||
if not raw:
|
||||
return default
|
||||
try:
|
||||
return int(raw)
|
||||
except (TypeError, ValueError):
|
||||
return default
|
||||
|
||||
|
||||
def max_notional() -> float:
|
||||
return _env_float("CERBERO_MAX_NOTIONAL", 200.0)
|
||||
|
||||
|
||||
def max_aggregate() -> float:
|
||||
return _env_float("CERBERO_MAX_AGGREGATE", 1000.0)
|
||||
|
||||
|
||||
def max_leverage() -> int:
|
||||
return _env_int("CERBERO_MAX_LEVERAGE", 3)
|
||||
|
||||
|
||||
def _hard_reject(reason: str) -> None:
|
||||
raise HTTPException(
|
||||
status_code=403,
|
||||
detail={
|
||||
"error": "HARD_PROHIBITION",
|
||||
"message": reason,
|
||||
"caps": {
|
||||
"max_notional": max_notional(),
|
||||
"max_aggregate": max_aggregate(),
|
||||
"max_leverage": max_leverage(),
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
def enforce_leverage(leverage: int | float | None) -> int:
|
||||
"""Ritorna leverage applicabile. Default 3x se None. Reject se > cap."""
|
||||
cap = max_leverage()
|
||||
if leverage is None:
|
||||
return cap
|
||||
lev = int(leverage)
|
||||
if lev < 1:
|
||||
_hard_reject(f"leverage must be >= 1 (got {lev})")
|
||||
if lev > cap:
|
||||
_hard_reject(f"leverage {lev}x exceeds hard cap {cap}x")
|
||||
return lev
|
||||
|
||||
|
||||
def enforce_single_notional(notional: float, *, exchange: str, instrument: str) -> None:
|
||||
cap = max_notional()
|
||||
if notional > cap:
|
||||
_hard_reject(
|
||||
f"{exchange}.{instrument} notional {notional:.2f} exceeds single trade cap {cap:.2f}"
|
||||
)
|
||||
|
||||
|
||||
def enforce_aggregate(current_total: float, new_notional: float) -> None:
|
||||
cap = max_aggregate()
|
||||
total = current_total + new_notional
|
||||
if total > cap:
|
||||
_hard_reject(
|
||||
f"aggregate notional {total:.2f} (current {current_total:.2f} + new "
|
||||
f"{new_notional:.2f}) exceeds cap {cap:.2f}"
|
||||
)
|
||||
@@ -0,0 +1,220 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import os
|
||||
import time
|
||||
import uuid
|
||||
from datetime import UTC, datetime
|
||||
|
||||
from collections.abc import Callable
|
||||
from contextlib import AbstractAsyncContextManager
|
||||
|
||||
from fastapi import FastAPI, HTTPException, Request
|
||||
from fastapi.exceptions import RequestValidationError
|
||||
from fastapi.responses import JSONResponse, Response
|
||||
from starlette.middleware.base import BaseHTTPMiddleware
|
||||
|
||||
from option_mcp_common.auth import TokenStore
|
||||
|
||||
Lifespan = Callable[[FastAPI], AbstractAsyncContextManager[None]]
|
||||
|
||||
|
||||
def _error_envelope(
|
||||
*,
|
||||
type_: str,
|
||||
code: str,
|
||||
message: str,
|
||||
retryable: bool,
|
||||
suggested_fix: str | None = None,
|
||||
details: dict | None = None,
|
||||
request_id: str | None = None,
|
||||
) -> dict:
|
||||
env = {
|
||||
"error": {
|
||||
"type": type_,
|
||||
"code": code,
|
||||
"message": message,
|
||||
"retryable": retryable,
|
||||
},
|
||||
"request_id": request_id or uuid.uuid4().hex,
|
||||
"data_timestamp": datetime.now(UTC).isoformat(),
|
||||
}
|
||||
if suggested_fix:
|
||||
env["error"]["suggested_fix"] = suggested_fix
|
||||
if details:
|
||||
env["error"]["details"] = details
|
||||
return env
|
||||
|
||||
|
||||
class _TimestampInjectorMiddleware(BaseHTTPMiddleware):
|
||||
"""CER-P5-001: inietta data_timestamp nei response tool.
|
||||
|
||||
- Dict response: body gains `data_timestamp` se mancante.
|
||||
- List of dicts: ogni item gains `data_timestamp` se mancante.
|
||||
- Header `X-Data-Timestamp` sempre presente (universale per list primitive).
|
||||
Skips /health (già popolato) e /mcp (JSON-RPC bridge) e non-JSON responses.
|
||||
"""
|
||||
|
||||
async def dispatch(self, request: Request, call_next):
|
||||
response = await call_next(request)
|
||||
path = request.url.path
|
||||
if not path.startswith("/tools/"):
|
||||
return response
|
||||
ctype = response.headers.get("content-type", "")
|
||||
if "application/json" not in ctype:
|
||||
return response
|
||||
body = b""
|
||||
async for chunk in response.body_iterator:
|
||||
body += chunk
|
||||
ts = datetime.now(UTC).isoformat()
|
||||
try:
|
||||
data = json.loads(body) if body else None
|
||||
except Exception:
|
||||
headers = dict(response.headers)
|
||||
headers["X-Data-Timestamp"] = ts
|
||||
return Response(
|
||||
content=body,
|
||||
status_code=response.status_code,
|
||||
headers=headers,
|
||||
media_type=response.media_type,
|
||||
)
|
||||
|
||||
modified = False
|
||||
if isinstance(data, dict) and "data_timestamp" not in data:
|
||||
data["data_timestamp"] = ts
|
||||
modified = True
|
||||
elif isinstance(data, list):
|
||||
for item in data:
|
||||
if isinstance(item, dict) and "data_timestamp" not in item:
|
||||
item["data_timestamp"] = ts
|
||||
modified = True
|
||||
|
||||
headers = dict(response.headers)
|
||||
headers["X-Data-Timestamp"] = ts
|
||||
if modified:
|
||||
new_body = json.dumps(data, default=str).encode()
|
||||
headers.pop("content-length", None)
|
||||
return Response(
|
||||
content=new_body,
|
||||
status_code=response.status_code,
|
||||
headers=headers,
|
||||
media_type="application/json",
|
||||
)
|
||||
return Response(
|
||||
content=body,
|
||||
status_code=response.status_code,
|
||||
headers=headers,
|
||||
media_type=response.media_type,
|
||||
)
|
||||
|
||||
|
||||
def build_app(
|
||||
*,
|
||||
name: str,
|
||||
version: str,
|
||||
token_store: TokenStore,
|
||||
lifespan: Lifespan | None = None,
|
||||
) -> FastAPI:
|
||||
root_path = os.getenv("ROOT_PATH", "")
|
||||
app = FastAPI(title=name, version=version, root_path=root_path, lifespan=lifespan)
|
||||
app.state.token_store = token_store
|
||||
app.state.boot_at = time.time()
|
||||
|
||||
app.add_middleware(_TimestampInjectorMiddleware)
|
||||
|
||||
@app.middleware("http")
|
||||
async def _latency_header(request: Request, call_next):
|
||||
t0 = time.perf_counter()
|
||||
response = await call_next(request)
|
||||
dur_ms = (time.perf_counter() - t0) * 1000
|
||||
response.headers["X-Duration-Ms"] = f"{dur_ms:.2f}"
|
||||
return response
|
||||
|
||||
# CER-P5-002 error envelope: exception handlers globali
|
||||
@app.exception_handler(HTTPException)
|
||||
async def _http_exc(request: Request, exc: HTTPException):
|
||||
retryable = exc.status_code in (408, 429, 502, 503, 504)
|
||||
code_map = {
|
||||
400: "BAD_REQUEST", 401: "UNAUTHORIZED", 403: "FORBIDDEN",
|
||||
404: "NOT_FOUND", 408: "TIMEOUT", 409: "CONFLICT",
|
||||
422: "VALIDATION_ERROR", 429: "RATE_LIMIT",
|
||||
500: "INTERNAL_ERROR", 502: "UPSTREAM_ERROR",
|
||||
503: "UNAVAILABLE", 504: "GATEWAY_TIMEOUT",
|
||||
}
|
||||
code = code_map.get(exc.status_code, f"HTTP_{exc.status_code}")
|
||||
message = "HTTP error"
|
||||
details: dict | None = None
|
||||
detail = exc.detail
|
||||
# Preserve rail-style detail {"error": "..", "message": ".."} as code
|
||||
if isinstance(detail, dict):
|
||||
if isinstance(detail.get("error"), str):
|
||||
code = detail["error"].upper()
|
||||
message = str(detail.get("message") or detail.get("error") or message)
|
||||
details = detail
|
||||
elif isinstance(detail, str):
|
||||
message = detail
|
||||
return JSONResponse(
|
||||
status_code=exc.status_code,
|
||||
content=_error_envelope(
|
||||
type_="http_error",
|
||||
code=code,
|
||||
message=message,
|
||||
retryable=retryable,
|
||||
details=details,
|
||||
),
|
||||
)
|
||||
|
||||
@app.exception_handler(RequestValidationError)
|
||||
async def _validation_exc(request: Request, exc: RequestValidationError):
|
||||
errs = exc.errors()
|
||||
first_loc = ".".join(str(x) for x in errs[0]["loc"]) if errs else "body"
|
||||
suggestion = (
|
||||
f"check field '{first_loc}': "
|
||||
+ (errs[0]["msg"] if errs else "invalid input")
|
||||
)
|
||||
# Sanitize ctx values: pydantic v2 può mettere ValueError in ctx['error'],
|
||||
# non serializzabile JSON. Riduci a stringhe.
|
||||
safe_errs: list[dict] = []
|
||||
for e in errs[:5]:
|
||||
ne: dict = {}
|
||||
for k, v in e.items():
|
||||
if k == "ctx" and isinstance(v, dict):
|
||||
ne[k] = {ck: str(cv) for ck, cv in v.items()}
|
||||
else:
|
||||
ne[k] = v
|
||||
safe_errs.append(ne)
|
||||
return JSONResponse(
|
||||
status_code=422,
|
||||
content=_error_envelope(
|
||||
type_="validation_error",
|
||||
code="INVALID_INPUT",
|
||||
message=f"request body validation failed on {first_loc}",
|
||||
retryable=False,
|
||||
suggested_fix=suggestion,
|
||||
details={"errors": safe_errs},
|
||||
),
|
||||
)
|
||||
|
||||
@app.exception_handler(Exception)
|
||||
async def _unhandled(request: Request, exc: Exception):
|
||||
return JSONResponse(
|
||||
status_code=500,
|
||||
content=_error_envelope(
|
||||
type_="internal_error",
|
||||
code="UNHANDLED_EXCEPTION",
|
||||
message=f"{type(exc).__name__}: {str(exc)[:300]}",
|
||||
retryable=True,
|
||||
),
|
||||
)
|
||||
|
||||
@app.get("/health")
|
||||
def health():
|
||||
return {
|
||||
"status": "healthy",
|
||||
"name": name,
|
||||
"version": version,
|
||||
"uptime_seconds": int(time.time() - app.state.boot_at),
|
||||
"data_timestamp": datetime.now(UTC).isoformat(),
|
||||
}
|
||||
|
||||
return app
|
||||
@@ -0,0 +1,43 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import sqlite3
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
@dataclass
|
||||
class Database:
|
||||
path: Path
|
||||
conn: sqlite3.Connection | None = None
|
||||
|
||||
def connect(self) -> sqlite3.Connection:
|
||||
self.path.parent.mkdir(parents=True, exist_ok=True)
|
||||
self.conn = sqlite3.connect(
|
||||
str(self.path),
|
||||
isolation_level=None,
|
||||
check_same_thread=False,
|
||||
)
|
||||
self.conn.row_factory = sqlite3.Row
|
||||
self.conn.execute("PRAGMA journal_mode=WAL")
|
||||
self.conn.execute("PRAGMA synchronous=NORMAL")
|
||||
self.conn.execute("PRAGMA foreign_keys=ON")
|
||||
return self.conn
|
||||
|
||||
def close(self) -> None:
|
||||
if self.conn is not None:
|
||||
self.conn.close()
|
||||
self.conn = None
|
||||
|
||||
|
||||
def run_migrations(conn: sqlite3.Connection, migrations: dict[int, str]) -> None:
|
||||
"""Idempotent migrations. `migrations` keys are monotonic version numbers."""
|
||||
conn.execute(
|
||||
"CREATE TABLE IF NOT EXISTS _schema_version (version INTEGER PRIMARY KEY)"
|
||||
)
|
||||
cur = conn.execute("SELECT COALESCE(MAX(version), 0) FROM _schema_version")
|
||||
current = cur.fetchone()[0]
|
||||
for version in sorted(migrations):
|
||||
if version <= current:
|
||||
continue
|
||||
conn.executescript(migrations[version])
|
||||
conn.execute("INSERT INTO _schema_version (version) VALUES (?)", (version,))
|
||||
@@ -0,0 +1,84 @@
|
||||
import pytest
|
||||
from fastapi import Depends, FastAPI
|
||||
from fastapi.testclient import TestClient
|
||||
from option_mcp_common.auth import (
|
||||
Principal,
|
||||
TokenStore,
|
||||
acl_requires,
|
||||
require_principal,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def token_store():
|
||||
return TokenStore(tokens={
|
||||
"token-core-123": Principal(name="core", capabilities={"core"}),
|
||||
"token-obs-456": Principal(name="observer", capabilities={"observer"}),
|
||||
})
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def app(token_store):
|
||||
app = FastAPI()
|
||||
app.state.token_store = token_store
|
||||
|
||||
@app.get("/public")
|
||||
def public():
|
||||
return {"ok": True}
|
||||
|
||||
@app.get("/private")
|
||||
def private(principal: Principal = Depends(require_principal)):
|
||||
return {"name": principal.name}
|
||||
|
||||
@app.post("/core-only")
|
||||
@acl_requires(core=True, observer=False)
|
||||
def core_only(principal: Principal = Depends(require_principal)):
|
||||
return {"who": principal.name}
|
||||
|
||||
@app.post("/observer-only")
|
||||
@acl_requires(core=False, observer=True)
|
||||
def observer_only(principal: Principal = Depends(require_principal)):
|
||||
return {"who": principal.name}
|
||||
|
||||
return app
|
||||
|
||||
|
||||
def test_public_endpoint_no_auth(app):
|
||||
client = TestClient(app)
|
||||
assert client.get("/public").status_code == 200
|
||||
|
||||
|
||||
def test_private_without_header_401(app):
|
||||
client = TestClient(app)
|
||||
assert client.get("/private").status_code == 401
|
||||
|
||||
|
||||
def test_private_bad_token_403(app):
|
||||
client = TestClient(app)
|
||||
r = client.get("/private", headers={"Authorization": "Bearer nope"})
|
||||
assert r.status_code == 403
|
||||
|
||||
|
||||
def test_private_good_token_200(app):
|
||||
client = TestClient(app)
|
||||
r = client.get("/private", headers={"Authorization": "Bearer token-core-123"})
|
||||
assert r.status_code == 200
|
||||
assert r.json() == {"name": "core"}
|
||||
|
||||
|
||||
def test_acl_core_token_on_core_only_endpoint(app):
|
||||
client = TestClient(app)
|
||||
r = client.post("/core-only", headers={"Authorization": "Bearer token-core-123"})
|
||||
assert r.status_code == 200
|
||||
|
||||
|
||||
def test_acl_observer_on_core_only_rejected(app):
|
||||
client = TestClient(app)
|
||||
r = client.post("/core-only", headers={"Authorization": "Bearer token-obs-456"})
|
||||
assert r.status_code == 403
|
||||
|
||||
|
||||
def test_acl_observer_on_observer_only_ok(app):
|
||||
client = TestClient(app)
|
||||
r = client.post("/observer-only", headers={"Authorization": "Bearer token-obs-456"})
|
||||
assert r.status_code == 200
|
||||
@@ -0,0 +1,71 @@
|
||||
"""CER-P5-010 env validation tests."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import pytest
|
||||
from option_mcp_common.env_validation import (
|
||||
MissingEnvError,
|
||||
fail_fast_if_missing,
|
||||
optional_env,
|
||||
require_env,
|
||||
summarize,
|
||||
)
|
||||
|
||||
|
||||
def test_require_env_present(monkeypatch):
|
||||
monkeypatch.setenv("FOO_KEY", "value1")
|
||||
assert require_env("FOO_KEY") == "value1"
|
||||
|
||||
|
||||
def test_require_env_missing_raises(monkeypatch):
|
||||
monkeypatch.delenv("MISSING_REQ", raising=False)
|
||||
with pytest.raises(MissingEnvError):
|
||||
require_env("MISSING_REQ", "critical path")
|
||||
|
||||
|
||||
def test_require_env_empty_raises(monkeypatch):
|
||||
monkeypatch.setenv("EMPTY_REQ", "")
|
||||
with pytest.raises(MissingEnvError):
|
||||
require_env("EMPTY_REQ")
|
||||
|
||||
|
||||
def test_require_env_whitespace_only_raises(monkeypatch):
|
||||
monkeypatch.setenv("WS_REQ", " ")
|
||||
with pytest.raises(MissingEnvError):
|
||||
require_env("WS_REQ")
|
||||
|
||||
|
||||
def test_optional_env_default(monkeypatch):
|
||||
monkeypatch.delenv("OPT_A", raising=False)
|
||||
assert optional_env("OPT_A", default="fallback") == "fallback"
|
||||
|
||||
|
||||
def test_optional_env_set(monkeypatch):
|
||||
monkeypatch.setenv("OPT_B", "xx")
|
||||
assert optional_env("OPT_B", default="fallback") == "xx"
|
||||
|
||||
|
||||
def test_fail_fast_all_present(monkeypatch):
|
||||
monkeypatch.setenv("AA", "1")
|
||||
monkeypatch.setenv("BB", "2")
|
||||
fail_fast_if_missing(["AA", "BB"]) # no exit
|
||||
|
||||
|
||||
def test_fail_fast_missing_exits(monkeypatch):
|
||||
monkeypatch.setenv("HAVE_IT", "1")
|
||||
monkeypatch.delenv("MISSING_X", raising=False)
|
||||
with pytest.raises(SystemExit) as exc:
|
||||
fail_fast_if_missing(["HAVE_IT", "MISSING_X"])
|
||||
assert exc.value.code == 2
|
||||
|
||||
|
||||
def test_summarize_does_not_leak_secrets(monkeypatch, caplog):
|
||||
import logging
|
||||
monkeypatch.setenv("API_KEY_FOO", "super-secret-token-123456")
|
||||
monkeypatch.setenv("PORT", "9000")
|
||||
with caplog.at_level(logging.INFO, logger="option_mcp_common.env_validation"):
|
||||
summarize(["API_KEY_FOO", "PORT", "NOT_SET_XYZ"])
|
||||
log_text = "\n".join(caplog.messages)
|
||||
assert "super-secret-token-123456" not in log_text
|
||||
assert "9000" in log_text
|
||||
assert "<unset>" in log_text
|
||||
@@ -0,0 +1,80 @@
|
||||
|
||||
from option_mcp_common.indicators import adx, atr, macd, rsi, sma
|
||||
|
||||
|
||||
def test_rsi_simple():
|
||||
closes = [44, 44.34, 44.09, 44.15, 43.61, 44.33, 44.83, 45.10, 45.42, 45.84,
|
||||
46.08, 45.89, 46.03, 45.61, 46.28]
|
||||
r = rsi(closes, period=14)
|
||||
assert r is not None
|
||||
# Known textbook RSI value ballpark
|
||||
assert 65.0 < r < 75.0
|
||||
|
||||
|
||||
def test_rsi_insufficient_data():
|
||||
assert rsi([1, 2, 3], period=14) is None
|
||||
|
||||
|
||||
def test_sma_simple():
|
||||
assert sma([1, 2, 3, 4, 5], period=5) == 3.0
|
||||
assert sma([1, 2, 3], period=5) is None
|
||||
|
||||
|
||||
def test_atr_simple():
|
||||
# highs, lows, closes
|
||||
highs = [10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24]
|
||||
lows = [ 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23]
|
||||
closes = [9.5,10.5,11.5,12.5,13.5,14.5,15.5,16.5,17.5,18.5,19.5,20.5,21.5,22.5,23.5]
|
||||
a = atr(highs, lows, closes, period=14)
|
||||
assert a is not None
|
||||
assert 0.9 < a <= 1.5
|
||||
|
||||
|
||||
def test_macd_trend_up():
|
||||
# monotonic uptrend → MACD > 0, histogram > 0
|
||||
closes = [float(i) for i in range(1, 60)]
|
||||
m = macd(closes)
|
||||
assert m["macd"] is not None
|
||||
assert m["signal"] is not None
|
||||
assert m["hist"] is not None
|
||||
assert m["macd"] > 0
|
||||
assert m["hist"] >= 0
|
||||
|
||||
|
||||
def test_macd_insufficient_data():
|
||||
m = macd([1.0, 2.0, 3.0])
|
||||
assert m == {"macd": None, "signal": None, "hist": None}
|
||||
|
||||
|
||||
def test_macd_trend_down():
|
||||
closes = [float(i) for i in range(60, 1, -1)]
|
||||
m = macd(closes)
|
||||
assert m["macd"] < 0
|
||||
assert m["hist"] <= 0
|
||||
|
||||
|
||||
def test_adx_insufficient_data():
|
||||
a = adx([1.0] * 10, [0.5] * 10, [0.7] * 10, period=14)
|
||||
assert a == {"adx": None, "+di": None, "-di": None}
|
||||
|
||||
|
||||
def test_adx_strong_uptrend():
|
||||
highs = [float(i) + 1.0 for i in range(1, 40)]
|
||||
lows = [float(i) for i in range(1, 40)]
|
||||
closes = [float(i) + 0.5 for i in range(1, 40)]
|
||||
a = adx(highs, lows, closes, period=14)
|
||||
assert a["adx"] is not None
|
||||
assert a["+di"] is not None and a["-di"] is not None
|
||||
# strong uptrend → +DI >> -DI, ADX high
|
||||
assert a["+di"] > a["-di"]
|
||||
assert a["adx"] > 50.0
|
||||
|
||||
|
||||
def test_adx_flat_market():
|
||||
highs = [10.0] * 40
|
||||
lows = [9.0] * 40
|
||||
closes = [9.5] * 40
|
||||
a = adx(highs, lows, closes, period=14)
|
||||
# no directional movement → ADX near 0
|
||||
assert a["adx"] is not None
|
||||
assert a["adx"] < 5.0
|
||||
@@ -0,0 +1,77 @@
|
||||
import json
|
||||
import logging
|
||||
|
||||
from option_mcp_common.logging import (
|
||||
SecretsFilter,
|
||||
configure_root_logging,
|
||||
get_json_logger,
|
||||
)
|
||||
|
||||
|
||||
def test_secrets_filter_masks_bearer():
|
||||
f = SecretsFilter()
|
||||
rec = logging.LogRecord(
|
||||
name="t", level=logging.INFO, pathname="", lineno=0,
|
||||
msg="Got Bearer abcdef123456 from client",
|
||||
args=(), exc_info=None,
|
||||
)
|
||||
f.filter(rec)
|
||||
assert "abcdef" not in rec.msg
|
||||
assert "***" in rec.msg
|
||||
|
||||
|
||||
def test_secrets_filter_masks_api_key_json():
|
||||
f = SecretsFilter()
|
||||
rec = logging.LogRecord(
|
||||
name="t", level=logging.INFO, pathname="", lineno=0,
|
||||
msg='{"api_key": "sk-live-abc123xyz"}',
|
||||
args=(), exc_info=None,
|
||||
)
|
||||
f.filter(rec)
|
||||
assert "sk-live-abc123xyz" not in rec.msg
|
||||
|
||||
|
||||
def test_json_logger_outputs_json(capsys):
|
||||
logger = get_json_logger("test")
|
||||
logger.info("hello", extra={"user_id": 42})
|
||||
captured = capsys.readouterr()
|
||||
# output is on stderr by default for json logger
|
||||
line = (captured.err or captured.out).strip().splitlines()[-1]
|
||||
data = json.loads(line)
|
||||
assert data["message"] == "hello"
|
||||
assert data["user_id"] == 42
|
||||
|
||||
|
||||
def test_configure_root_json_format(monkeypatch, capsys):
|
||||
monkeypatch.setenv("LOG_FORMAT", "json")
|
||||
monkeypatch.setenv("LOG_LEVEL", "INFO")
|
||||
configure_root_logging()
|
||||
logging.info("root json test")
|
||||
line = capsys.readouterr().err.strip().splitlines()[-1]
|
||||
data = json.loads(line)
|
||||
assert data["message"] == "root json test"
|
||||
assert data["levelname"] == "INFO"
|
||||
|
||||
|
||||
def test_configure_root_text_format(monkeypatch, capsys):
|
||||
monkeypatch.setenv("LOG_FORMAT", "text")
|
||||
configure_root_logging()
|
||||
logging.info("root text test")
|
||||
line = capsys.readouterr().err.strip().splitlines()[-1]
|
||||
# text format non è JSON parseable
|
||||
try:
|
||||
json.loads(line)
|
||||
raise AssertionError("expected text format, got JSON")
|
||||
except json.JSONDecodeError:
|
||||
pass
|
||||
assert "root text test" in line
|
||||
|
||||
|
||||
def test_configure_root_applies_secrets_filter(monkeypatch, capsys):
|
||||
monkeypatch.setenv("LOG_FORMAT", "json")
|
||||
configure_root_logging()
|
||||
logging.info("calling with Bearer sk-live-leak123456 token")
|
||||
line = capsys.readouterr().err.strip().splitlines()[-1]
|
||||
data = json.loads(line)
|
||||
assert "sk-live-leak123456" not in data["message"]
|
||||
assert "***" in data["message"]
|
||||
@@ -0,0 +1,112 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from fastapi import Depends, FastAPI
|
||||
from fastapi.testclient import TestClient
|
||||
from option_mcp_common.auth import Principal, TokenStore, require_principal
|
||||
from option_mcp_common.mcp_bridge import _derive_input_schemas, mount_mcp_endpoint
|
||||
from option_mcp_common.server import build_app
|
||||
from pydantic import BaseModel
|
||||
|
||||
|
||||
class EchoBody(BaseModel):
|
||||
msg: str
|
||||
n: int = 1
|
||||
|
||||
|
||||
def _make_app() -> tuple[FastAPI, TokenStore]:
|
||||
store = TokenStore(tokens={"t": Principal("obs", {"observer"})})
|
||||
app = build_app(name="t", version="v", token_store=store)
|
||||
|
||||
@app.post("/tools/echo")
|
||||
def echo(body: EchoBody, principal: Principal = Depends(require_principal)):
|
||||
return {"echo": body.msg, "n": body.n}
|
||||
|
||||
@app.post("/tools/ping")
|
||||
def ping(principal: Principal = Depends(require_principal)):
|
||||
return {"pong": True}
|
||||
|
||||
return app, store
|
||||
|
||||
|
||||
def test_derive_input_schemas_resolves_lazy_annotations():
|
||||
app, _ = _make_app()
|
||||
schemas = _derive_input_schemas(app, ["echo", "ping"])
|
||||
assert "echo" in schemas
|
||||
echo_schema = schemas["echo"]
|
||||
assert echo_schema["type"] == "object"
|
||||
assert "msg" in echo_schema["properties"]
|
||||
assert "n" in echo_schema["properties"]
|
||||
assert "msg" in echo_schema["required"]
|
||||
# ping has no Pydantic body → not in map (fallback applied by caller)
|
||||
assert "ping" not in schemas
|
||||
|
||||
|
||||
def test_mount_mcp_endpoint_exposes_derived_schemas():
|
||||
app, store = _make_app()
|
||||
mount_mcp_endpoint(
|
||||
app,
|
||||
name="test",
|
||||
version="1.0",
|
||||
token_store=store,
|
||||
internal_base_url="http://localhost:0",
|
||||
tools=[
|
||||
{"name": "echo", "description": "Echo a message."},
|
||||
{"name": "ping", "description": "Ping."},
|
||||
],
|
||||
)
|
||||
c = TestClient(app)
|
||||
r = c.post(
|
||||
"/mcp",
|
||||
headers={"Authorization": "Bearer t"},
|
||||
json={"jsonrpc": "2.0", "id": 1, "method": "tools/list"},
|
||||
)
|
||||
assert r.status_code == 200
|
||||
tools = r.json()["result"]["tools"]
|
||||
by_name = {t["name"]: t for t in tools}
|
||||
assert set(by_name["echo"]["inputSchema"]["required"]) == {"msg"}
|
||||
# ping fallback su schema generico
|
||||
assert by_name["ping"]["inputSchema"] == {
|
||||
"type": "object",
|
||||
"additionalProperties": True,
|
||||
}
|
||||
|
||||
|
||||
def test_mount_mcp_endpoint_requires_auth():
|
||||
app, store = _make_app()
|
||||
mount_mcp_endpoint(
|
||||
app,
|
||||
name="test",
|
||||
version="1.0",
|
||||
token_store=store,
|
||||
internal_base_url="http://localhost:0",
|
||||
tools=[{"name": "echo"}],
|
||||
)
|
||||
c = TestClient(app)
|
||||
r = c.post("/mcp", json={"jsonrpc": "2.0", "id": 1, "method": "tools/list"})
|
||||
assert r.status_code == 401
|
||||
r = c.post(
|
||||
"/mcp",
|
||||
headers={"Authorization": "Bearer WRONG"},
|
||||
json={"jsonrpc": "2.0", "id": 1, "method": "tools/list"},
|
||||
)
|
||||
assert r.status_code == 403
|
||||
|
||||
|
||||
def test_explicit_input_schema_overrides_derived():
|
||||
app, store = _make_app()
|
||||
custom = {"type": "object", "properties": {"custom": {"type": "string"}}, "required": ["custom"]}
|
||||
mount_mcp_endpoint(
|
||||
app,
|
||||
name="test",
|
||||
version="1.0",
|
||||
token_store=store,
|
||||
internal_base_url="http://localhost:0",
|
||||
tools=[{"name": "echo", "input_schema": custom}],
|
||||
)
|
||||
c = TestClient(app)
|
||||
r = c.post(
|
||||
"/mcp",
|
||||
headers={"Authorization": "Bearer t"},
|
||||
json={"jsonrpc": "2.0", "id": 1, "method": "tools/list"},
|
||||
)
|
||||
assert r.json()["result"]["tools"][0]["inputSchema"] == custom
|
||||
@@ -0,0 +1,40 @@
|
||||
from option_mcp_common.models import EventPriority, EventType, L2Entry
|
||||
|
||||
|
||||
def test_l2_entry_minimal():
|
||||
entry = L2Entry(
|
||||
timestamp="2026-04-17T10:30:00Z",
|
||||
setup="bull put spread ETH 1800/1750 14d",
|
||||
tesi="IV alta post-CPI, attesa mean-reversion",
|
||||
esito="aperto"
|
||||
)
|
||||
assert entry.scostamento_sigma is None
|
||||
assert entry.tesi_check is None
|
||||
|
||||
|
||||
def test_l2_entry_full():
|
||||
entry = L2Entry(
|
||||
timestamp="2026-04-17T10:30:00Z",
|
||||
setup="bull put spread ETH 1800/1750 14d",
|
||||
tesi="IV alta post-CPI",
|
||||
tesi_check="ETH sopra 1820 per 24h con IV in calo",
|
||||
invalidation="rottura 1800 con volume > 2x media",
|
||||
esito="chiuso +12 USDC",
|
||||
scostamento="nessuno",
|
||||
scostamento_sigma=0.5,
|
||||
lezione="supporto ha tenuto, timing ok",
|
||||
sizing_note="size 80 USDC (ATR 1.3x media)",
|
||||
)
|
||||
assert entry.scostamento_sigma == 0.5
|
||||
dump = entry.model_dump()
|
||||
assert dump["lezione"] == "supporto ha tenuto, timing ok"
|
||||
|
||||
|
||||
def test_event_priority_enum():
|
||||
assert EventPriority.CRITICAL.value == "critical"
|
||||
assert EventPriority.LOW < EventPriority.CRITICAL # ordering
|
||||
|
||||
|
||||
def test_event_type_enum():
|
||||
assert EventType.ALERT.value == "alert"
|
||||
assert EventType.USER_INSTRUCTION.value == "user_instruction"
|
||||
@@ -0,0 +1,80 @@
|
||||
"""Tests for CER-016 risk guard."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import pytest
|
||||
from fastapi import HTTPException
|
||||
from option_mcp_common.risk_guard import (
|
||||
enforce_aggregate,
|
||||
enforce_leverage,
|
||||
enforce_single_notional,
|
||||
max_aggregate,
|
||||
max_leverage,
|
||||
max_notional,
|
||||
)
|
||||
|
||||
|
||||
def test_defaults(monkeypatch):
|
||||
for k in ("CERBERO_MAX_NOTIONAL", "CERBERO_MAX_AGGREGATE", "CERBERO_MAX_LEVERAGE"):
|
||||
monkeypatch.delenv(k, raising=False)
|
||||
assert max_notional() == 200.0
|
||||
assert max_aggregate() == 1000.0
|
||||
assert max_leverage() == 3
|
||||
|
||||
|
||||
def test_env_override(monkeypatch):
|
||||
monkeypatch.setenv("CERBERO_MAX_NOTIONAL", "50")
|
||||
monkeypatch.setenv("CERBERO_MAX_AGGREGATE", "150")
|
||||
monkeypatch.setenv("CERBERO_MAX_LEVERAGE", "2")
|
||||
assert max_notional() == 50.0
|
||||
assert max_aggregate() == 150.0
|
||||
assert max_leverage() == 2
|
||||
|
||||
|
||||
def test_leverage_default_when_none(monkeypatch):
|
||||
monkeypatch.delenv("CERBERO_MAX_LEVERAGE", raising=False)
|
||||
assert enforce_leverage(None) == 3
|
||||
|
||||
|
||||
def test_leverage_accepts_within_cap(monkeypatch):
|
||||
monkeypatch.delenv("CERBERO_MAX_LEVERAGE", raising=False)
|
||||
assert enforce_leverage(2) == 2
|
||||
assert enforce_leverage(3) == 3
|
||||
|
||||
|
||||
def test_leverage_rejects_above_cap(monkeypatch):
|
||||
monkeypatch.delenv("CERBERO_MAX_LEVERAGE", raising=False)
|
||||
with pytest.raises(HTTPException) as exc:
|
||||
enforce_leverage(50)
|
||||
assert exc.value.status_code == 403
|
||||
assert exc.value.detail["error"] == "HARD_PROHIBITION"
|
||||
|
||||
|
||||
def test_leverage_rejects_below_one(monkeypatch):
|
||||
with pytest.raises(HTTPException):
|
||||
enforce_leverage(0)
|
||||
|
||||
|
||||
def test_single_notional_ok(monkeypatch):
|
||||
monkeypatch.delenv("CERBERO_MAX_NOTIONAL", raising=False)
|
||||
enforce_single_notional(100.0, exchange="deribit", instrument="BTC-PERPETUAL")
|
||||
|
||||
|
||||
def test_single_notional_rejects(monkeypatch):
|
||||
monkeypatch.delenv("CERBERO_MAX_NOTIONAL", raising=False)
|
||||
with pytest.raises(HTTPException) as exc:
|
||||
enforce_single_notional(335.0, exchange="hyperliquid", instrument="ETH")
|
||||
assert exc.value.status_code == 403
|
||||
assert "335" in exc.value.detail["message"]
|
||||
|
||||
|
||||
def test_aggregate_ok(monkeypatch):
|
||||
monkeypatch.delenv("CERBERO_MAX_AGGREGATE", raising=False)
|
||||
enforce_aggregate(current_total=500.0, new_notional=200.0)
|
||||
|
||||
|
||||
def test_aggregate_rejects(monkeypatch):
|
||||
monkeypatch.delenv("CERBERO_MAX_AGGREGATE", raising=False)
|
||||
with pytest.raises(HTTPException) as exc:
|
||||
enforce_aggregate(current_total=900.0, new_notional=200.0)
|
||||
assert exc.value.status_code == 403
|
||||
@@ -0,0 +1,90 @@
|
||||
from fastapi.testclient import TestClient
|
||||
from option_mcp_common.auth import Principal, TokenStore
|
||||
from option_mcp_common.server import build_app
|
||||
|
||||
|
||||
def test_build_app_health():
|
||||
store = TokenStore(tokens={})
|
||||
app = build_app(name="test-mcp", version="0.0.1", token_store=store)
|
||||
client = TestClient(app)
|
||||
r = client.get("/health")
|
||||
assert r.status_code == 200
|
||||
body = r.json()
|
||||
assert body["status"] == "healthy"
|
||||
assert body["name"] == "test-mcp"
|
||||
assert body["version"] == "0.0.1"
|
||||
assert "uptime_seconds" in body
|
||||
assert "data_timestamp" in body
|
||||
assert r.headers.get("X-Duration-Ms") is not None
|
||||
|
||||
|
||||
def test_build_app_adds_token_store():
|
||||
store = TokenStore(tokens={"t1": Principal("x", {"core"})})
|
||||
app = build_app(name="t", version="v", token_store=store)
|
||||
assert app.state.token_store is store
|
||||
|
||||
|
||||
def test_timestamp_injector_dict_response():
|
||||
"""CER-P5-001: dict response gets data_timestamp + X-Data-Timestamp header."""
|
||||
store = TokenStore(tokens={})
|
||||
app = build_app(name="t", version="v", token_store=store)
|
||||
|
||||
@app.post("/tools/foo")
|
||||
def foo():
|
||||
return {"ok": True}
|
||||
|
||||
client = TestClient(app)
|
||||
r = client.post("/tools/foo")
|
||||
assert r.status_code == 200
|
||||
body = r.json()
|
||||
assert body["ok"] is True
|
||||
assert "data_timestamp" in body
|
||||
assert r.headers.get("X-Data-Timestamp") is not None
|
||||
|
||||
|
||||
def test_timestamp_injector_list_of_dicts():
|
||||
"""CER-P5-001: list of dicts → each item gets data_timestamp."""
|
||||
store = TokenStore(tokens={})
|
||||
app = build_app(name="t", version="v", token_store=store)
|
||||
|
||||
@app.post("/tools/list_items")
|
||||
def list_items():
|
||||
return [{"x": 1}, {"x": 2}]
|
||||
|
||||
client = TestClient(app)
|
||||
r = client.post("/tools/list_items")
|
||||
body = r.json()
|
||||
assert isinstance(body, list)
|
||||
assert len(body) == 2
|
||||
for item in body:
|
||||
assert "data_timestamp" in item
|
||||
assert r.headers.get("X-Data-Timestamp") is not None
|
||||
|
||||
|
||||
def test_timestamp_injector_preserves_existing():
|
||||
"""CER-P5-001: se già presente, non override."""
|
||||
store = TokenStore(tokens={})
|
||||
app = build_app(name="t", version="v", token_store=store)
|
||||
|
||||
@app.post("/tools/already")
|
||||
def already():
|
||||
return {"data_timestamp": "2020-01-01T00:00:00Z", "x": 1}
|
||||
|
||||
client = TestClient(app)
|
||||
body = client.post("/tools/already").json()
|
||||
assert body["data_timestamp"] == "2020-01-01T00:00:00Z"
|
||||
|
||||
|
||||
def test_timestamp_injector_empty_list_gets_header_only():
|
||||
"""CER-P5-001: list vuota — no body modification, ma header presente."""
|
||||
store = TokenStore(tokens={})
|
||||
app = build_app(name="t", version="v", token_store=store)
|
||||
|
||||
@app.post("/tools/empty_list")
|
||||
def empty_list():
|
||||
return []
|
||||
|
||||
client = TestClient(app)
|
||||
r = client.post("/tools/empty_list")
|
||||
assert r.json() == []
|
||||
assert r.headers.get("X-Data-Timestamp") is not None
|
||||
@@ -0,0 +1,48 @@
|
||||
from pathlib import Path
|
||||
|
||||
from option_mcp_common.storage import Database, run_migrations
|
||||
|
||||
|
||||
def test_database_creates_wal(tmp_path: Path):
|
||||
db_path = tmp_path / "test.db"
|
||||
db = Database(db_path)
|
||||
db.connect()
|
||||
# WAL mode attivo
|
||||
mode = db.conn.execute("PRAGMA journal_mode").fetchone()[0]
|
||||
assert mode.lower() == "wal"
|
||||
db.close()
|
||||
|
||||
|
||||
def test_database_migrations_run_once(tmp_path: Path):
|
||||
db_path = tmp_path / "test.db"
|
||||
db = Database(db_path)
|
||||
db.connect()
|
||||
migrations = {
|
||||
1: "CREATE TABLE foo (id INTEGER PRIMARY KEY, name TEXT);",
|
||||
2: "ALTER TABLE foo ADD COLUMN value INTEGER DEFAULT 0;",
|
||||
}
|
||||
run_migrations(db.conn, migrations)
|
||||
# Second run: should be no-op
|
||||
run_migrations(db.conn, migrations)
|
||||
cols = [r[1] for r in db.conn.execute("PRAGMA table_info(foo)").fetchall()]
|
||||
assert "name" in cols
|
||||
assert "value" in cols
|
||||
version = db.conn.execute("SELECT MAX(version) FROM _schema_version").fetchone()[0]
|
||||
assert version == 2
|
||||
db.close()
|
||||
|
||||
|
||||
def test_database_partial_migration(tmp_path: Path):
|
||||
db_path = tmp_path / "test.db"
|
||||
db = Database(db_path)
|
||||
db.connect()
|
||||
migrations_v1 = {1: "CREATE TABLE foo (id INTEGER);"}
|
||||
run_migrations(db.conn, migrations_v1)
|
||||
migrations_v2 = {**migrations_v1, 2: "CREATE TABLE bar (id INTEGER);"}
|
||||
run_migrations(db.conn, migrations_v2)
|
||||
tables = {r[0] for r in db.conn.execute(
|
||||
"SELECT name FROM sqlite_master WHERE type='table'"
|
||||
).fetchall()}
|
||||
assert "foo" in tables
|
||||
assert "bar" in tables
|
||||
db.close()
|
||||
@@ -0,0 +1,29 @@
|
||||
[project]
|
||||
name = "mcp-alpaca"
|
||||
version = "0.1.0"
|
||||
requires-python = ">=3.11"
|
||||
dependencies = [
|
||||
"option-mcp-common",
|
||||
"fastapi>=0.115",
|
||||
"uvicorn[standard]>=0.30",
|
||||
"httpx>=0.27",
|
||||
"pydantic>=2.6",
|
||||
"alpaca-py>=0.32",
|
||||
"pytz>=2024.1",
|
||||
]
|
||||
|
||||
[project.optional-dependencies]
|
||||
dev = ["pytest>=8", "pytest-asyncio>=0.23"]
|
||||
|
||||
[build-system]
|
||||
requires = ["hatchling"]
|
||||
build-backend = "hatchling.build"
|
||||
|
||||
[tool.hatch.build.targets.wheel]
|
||||
packages = ["src/mcp_alpaca"]
|
||||
|
||||
[tool.uv.sources]
|
||||
option-mcp-common = { workspace = true }
|
||||
|
||||
[project.scripts]
|
||||
mcp-alpaca = "mcp_alpaca.__main__:main"
|
||||
@@ -0,0 +1,45 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import os
|
||||
|
||||
import uvicorn
|
||||
from option_mcp_common.auth import load_token_store_from_files
|
||||
from option_mcp_common.logging import configure_root_logging
|
||||
|
||||
from mcp_alpaca.client import AlpacaClient
|
||||
from mcp_alpaca.server import create_app
|
||||
|
||||
|
||||
configure_root_logging()
|
||||
|
||||
|
||||
def main():
|
||||
creds_file = os.environ["ALPACA_CREDENTIALS_FILE"]
|
||||
with open(creds_file) as f:
|
||||
creds = json.load(f)
|
||||
|
||||
paper_env = os.environ.get("ALPACA_PAPER", "true").lower()
|
||||
paper = paper_env not in ("0", "false", "no")
|
||||
|
||||
client = AlpacaClient(
|
||||
api_key=creds["api_key_id"],
|
||||
secret_key=creds["secret_key"],
|
||||
paper=paper,
|
||||
)
|
||||
|
||||
token_store = load_token_store_from_files(
|
||||
core_token_file=os.environ.get("CORE_TOKEN_FILE"),
|
||||
observer_token_file=os.environ.get("OBSERVER_TOKEN_FILE"),
|
||||
)
|
||||
app = create_app(client=client, token_store=token_store)
|
||||
uvicorn.run(
|
||||
app,
|
||||
log_config=None,
|
||||
host=os.environ.get("HOST", "0.0.0.0"),
|
||||
port=int(os.environ.get("PORT", "9020")),
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -0,0 +1,388 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import datetime as _dt
|
||||
from typing import Any
|
||||
|
||||
from alpaca.data.historical import (
|
||||
CryptoHistoricalDataClient,
|
||||
OptionHistoricalDataClient,
|
||||
StockHistoricalDataClient,
|
||||
)
|
||||
from alpaca.data.requests import (
|
||||
CryptoBarsRequest,
|
||||
CryptoLatestQuoteRequest,
|
||||
CryptoLatestTradeRequest,
|
||||
OptionBarsRequest,
|
||||
OptionChainRequest,
|
||||
OptionLatestQuoteRequest,
|
||||
StockBarsRequest,
|
||||
StockLatestQuoteRequest,
|
||||
StockLatestTradeRequest,
|
||||
StockSnapshotRequest,
|
||||
)
|
||||
from alpaca.data.timeframe import TimeFrame, TimeFrameUnit
|
||||
from alpaca.trading.client import TradingClient
|
||||
from alpaca.trading.enums import (
|
||||
AssetClass,
|
||||
OrderSide,
|
||||
OrderStatus,
|
||||
OrderType,
|
||||
QueryOrderStatus,
|
||||
TimeInForce,
|
||||
)
|
||||
from alpaca.trading.requests import (
|
||||
ClosePositionRequest,
|
||||
GetAssetsRequest,
|
||||
GetOrdersRequest,
|
||||
LimitOrderRequest,
|
||||
MarketOrderRequest,
|
||||
ReplaceOrderRequest,
|
||||
StopOrderRequest,
|
||||
)
|
||||
|
||||
|
||||
_TF_MAP = {
|
||||
"1min": TimeFrame(1, TimeFrameUnit.Minute),
|
||||
"5min": TimeFrame(5, TimeFrameUnit.Minute),
|
||||
"15min": TimeFrame(15, TimeFrameUnit.Minute),
|
||||
"30min": TimeFrame(30, TimeFrameUnit.Minute),
|
||||
"1h": TimeFrame(1, TimeFrameUnit.Hour),
|
||||
"1d": TimeFrame(1, TimeFrameUnit.Day),
|
||||
"1w": TimeFrame(1, TimeFrameUnit.Week),
|
||||
}
|
||||
|
||||
_ASSET_CLASSES = {"stocks", "crypto", "options"}
|
||||
|
||||
|
||||
def _tf(interval: str) -> TimeFrame:
|
||||
if interval in _TF_MAP:
|
||||
return _TF_MAP[interval]
|
||||
raise ValueError(f"unsupported timeframe: {interval}")
|
||||
|
||||
|
||||
def _asset_class_enum(ac: str) -> AssetClass:
|
||||
ac = ac.lower()
|
||||
if ac == "stocks":
|
||||
return AssetClass.US_EQUITY
|
||||
if ac == "crypto":
|
||||
return AssetClass.CRYPTO
|
||||
if ac == "options":
|
||||
return AssetClass.US_OPTION
|
||||
raise ValueError(f"invalid asset_class: {ac}")
|
||||
|
||||
|
||||
def _serialize(obj: Any) -> Any:
|
||||
"""Recursively convert pydantic/datetime objects → json-safe."""
|
||||
if obj is None or isinstance(obj, (str, int, float, bool)):
|
||||
return obj
|
||||
if isinstance(obj, (_dt.datetime, _dt.date)):
|
||||
return obj.isoformat()
|
||||
if isinstance(obj, dict):
|
||||
return {k: _serialize(v) for k, v in obj.items()}
|
||||
if isinstance(obj, (list, tuple)):
|
||||
return [_serialize(v) for v in obj]
|
||||
if hasattr(obj, "model_dump"):
|
||||
return _serialize(obj.model_dump())
|
||||
if hasattr(obj, "__dict__"):
|
||||
return _serialize(vars(obj))
|
||||
return str(obj)
|
||||
|
||||
|
||||
class AlpacaClient:
|
||||
def __init__(
|
||||
self,
|
||||
api_key: str,
|
||||
secret_key: str,
|
||||
paper: bool = True,
|
||||
trading: Any | None = None,
|
||||
stock_data: Any | None = None,
|
||||
crypto_data: Any | None = None,
|
||||
option_data: Any | None = None,
|
||||
) -> None:
|
||||
self.api_key = api_key
|
||||
self.secret_key = secret_key
|
||||
self.paper = paper
|
||||
self._trading = trading or TradingClient(
|
||||
api_key=api_key, secret_key=secret_key, paper=paper
|
||||
)
|
||||
self._stock = stock_data or StockHistoricalDataClient(
|
||||
api_key=api_key, secret_key=secret_key
|
||||
)
|
||||
self._crypto = crypto_data or CryptoHistoricalDataClient(
|
||||
api_key=api_key, secret_key=secret_key
|
||||
)
|
||||
self._option = option_data or OptionHistoricalDataClient(
|
||||
api_key=api_key, secret_key=secret_key
|
||||
)
|
||||
|
||||
async def _run(self, fn, /, *args, **kwargs):
|
||||
return await asyncio.to_thread(fn, *args, **kwargs)
|
||||
|
||||
# ── Account / positions ──────────────────────────────────────
|
||||
|
||||
async def get_account(self) -> dict:
|
||||
acc = await self._run(self._trading.get_account)
|
||||
return _serialize(acc)
|
||||
|
||||
async def get_positions(self) -> list[dict]:
|
||||
pos = await self._run(self._trading.get_all_positions)
|
||||
return [_serialize(p) for p in pos]
|
||||
|
||||
async def get_activities(self, limit: int = 50) -> list[dict]:
|
||||
acts = await self._run(self._trading.get_account_activities)
|
||||
data = [_serialize(a) for a in acts]
|
||||
return data[:limit]
|
||||
|
||||
# ── Assets ──────────────────────────────────────────────────
|
||||
|
||||
async def get_assets(
|
||||
self, asset_class: str = "stocks", status: str = "active"
|
||||
) -> list[dict]:
|
||||
req = GetAssetsRequest(
|
||||
asset_class=_asset_class_enum(asset_class),
|
||||
status=status,
|
||||
)
|
||||
assets = await self._run(self._trading.get_all_assets, req)
|
||||
return [_serialize(a) for a in assets[:500]]
|
||||
|
||||
# ── Market data ─────────────────────────────────────────────
|
||||
|
||||
async def get_ticker(self, symbol: str, asset_class: str = "stocks") -> dict:
|
||||
ac = asset_class.lower()
|
||||
if ac == "stocks":
|
||||
req = StockLatestTradeRequest(symbol_or_symbols=symbol)
|
||||
data = await self._run(self._stock.get_stock_latest_trade, req)
|
||||
trade = data.get(symbol)
|
||||
q_req = StockLatestQuoteRequest(symbol_or_symbols=symbol)
|
||||
qdata = await self._run(self._stock.get_stock_latest_quote, q_req)
|
||||
quote = qdata.get(symbol)
|
||||
return {
|
||||
"symbol": symbol,
|
||||
"asset_class": "stocks",
|
||||
"last_price": getattr(trade, "price", None),
|
||||
"bid": getattr(quote, "bid_price", None),
|
||||
"ask": getattr(quote, "ask_price", None),
|
||||
"bid_size": getattr(quote, "bid_size", None),
|
||||
"ask_size": getattr(quote, "ask_size", None),
|
||||
"timestamp": _serialize(getattr(trade, "timestamp", None)),
|
||||
}
|
||||
if ac == "crypto":
|
||||
req = CryptoLatestTradeRequest(symbol_or_symbols=symbol)
|
||||
data = await self._run(self._crypto.get_crypto_latest_trade, req)
|
||||
trade = data.get(symbol)
|
||||
q_req = CryptoLatestQuoteRequest(symbol_or_symbols=symbol)
|
||||
qdata = await self._run(self._crypto.get_crypto_latest_quote, q_req)
|
||||
quote = qdata.get(symbol)
|
||||
return {
|
||||
"symbol": symbol,
|
||||
"asset_class": "crypto",
|
||||
"last_price": getattr(trade, "price", None),
|
||||
"bid": getattr(quote, "bid_price", None),
|
||||
"ask": getattr(quote, "ask_price", None),
|
||||
"timestamp": _serialize(getattr(trade, "timestamp", None)),
|
||||
}
|
||||
if ac == "options":
|
||||
req = OptionLatestQuoteRequest(symbol_or_symbols=symbol)
|
||||
data = await self._run(self._option.get_option_latest_quote, req)
|
||||
quote = data.get(symbol)
|
||||
return {
|
||||
"symbol": symbol,
|
||||
"asset_class": "options",
|
||||
"bid": getattr(quote, "bid_price", None),
|
||||
"ask": getattr(quote, "ask_price", None),
|
||||
"timestamp": _serialize(getattr(quote, "timestamp", None)),
|
||||
}
|
||||
raise ValueError(f"invalid asset_class: {asset_class}")
|
||||
|
||||
async def get_bars(
|
||||
self,
|
||||
symbol: str,
|
||||
asset_class: str = "stocks",
|
||||
interval: str = "1d",
|
||||
start: str | None = None,
|
||||
end: str | None = None,
|
||||
limit: int = 1000,
|
||||
) -> dict:
|
||||
tf = _tf(interval)
|
||||
start_dt = _dt.datetime.fromisoformat(start) if start else (
|
||||
_dt.datetime.now(_dt.UTC) - _dt.timedelta(days=30)
|
||||
)
|
||||
end_dt = _dt.datetime.fromisoformat(end) if end else _dt.datetime.now(_dt.UTC)
|
||||
ac = asset_class.lower()
|
||||
if ac == "stocks":
|
||||
req = StockBarsRequest(
|
||||
symbol_or_symbols=symbol, timeframe=tf,
|
||||
start=start_dt, end=end_dt, limit=limit,
|
||||
)
|
||||
data = await self._run(self._stock.get_stock_bars, req)
|
||||
elif ac == "crypto":
|
||||
req = CryptoBarsRequest(
|
||||
symbol_or_symbols=symbol, timeframe=tf,
|
||||
start=start_dt, end=end_dt, limit=limit,
|
||||
)
|
||||
data = await self._run(self._crypto.get_crypto_bars, req)
|
||||
elif ac == "options":
|
||||
req = OptionBarsRequest(
|
||||
symbol_or_symbols=symbol, timeframe=tf,
|
||||
start=start_dt, end=end_dt, limit=limit,
|
||||
)
|
||||
data = await self._run(self._option.get_option_bars, req)
|
||||
else:
|
||||
raise ValueError(f"invalid asset_class: {asset_class}")
|
||||
bars_dict = getattr(data, "data", {}) or {}
|
||||
rows = bars_dict.get(symbol, []) or []
|
||||
bars = [
|
||||
{
|
||||
"timestamp": _serialize(getattr(b, "timestamp", None)),
|
||||
"open": getattr(b, "open", None),
|
||||
"high": getattr(b, "high", None),
|
||||
"low": getattr(b, "low", None),
|
||||
"close": getattr(b, "close", None),
|
||||
"volume": getattr(b, "volume", None),
|
||||
}
|
||||
for b in rows
|
||||
]
|
||||
return {"symbol": symbol, "asset_class": ac, "interval": interval, "bars": bars}
|
||||
|
||||
async def get_snapshot(self, symbol: str) -> dict:
|
||||
req = StockSnapshotRequest(symbol_or_symbols=symbol)
|
||||
data = await self._run(self._stock.get_stock_snapshot, req)
|
||||
return _serialize(data.get(symbol))
|
||||
|
||||
async def get_option_chain(
|
||||
self,
|
||||
underlying: str,
|
||||
expiry: str | None = None,
|
||||
) -> dict:
|
||||
kwargs: dict[str, Any] = {"underlying_symbol": underlying}
|
||||
if expiry:
|
||||
kwargs["expiration_date"] = _dt.date.fromisoformat(expiry)
|
||||
req = OptionChainRequest(**kwargs)
|
||||
data = await self._run(self._option.get_option_chain, req)
|
||||
return {
|
||||
"underlying": underlying,
|
||||
"expiry": expiry,
|
||||
"contracts": _serialize(data),
|
||||
}
|
||||
|
||||
# ── Orders ──────────────────────────────────────────────────
|
||||
|
||||
async def get_open_orders(self, limit: int = 50) -> list[dict]:
|
||||
req = GetOrdersRequest(status=QueryOrderStatus.OPEN, limit=limit)
|
||||
orders = await self._run(self._trading.get_orders, filter=req)
|
||||
return [_serialize(o) for o in orders]
|
||||
|
||||
async def place_order(
|
||||
self,
|
||||
symbol: str,
|
||||
side: str,
|
||||
qty: float | None = None,
|
||||
notional: float | None = None,
|
||||
order_type: str = "market",
|
||||
limit_price: float | None = None,
|
||||
stop_price: float | None = None,
|
||||
tif: str = "day",
|
||||
asset_class: str = "stocks",
|
||||
) -> dict:
|
||||
side_enum = OrderSide.BUY if side.lower() == "buy" else OrderSide.SELL
|
||||
tif_enum = TimeInForce(tif.lower())
|
||||
ot = order_type.lower()
|
||||
common = {
|
||||
"symbol": symbol,
|
||||
"side": side_enum,
|
||||
"time_in_force": tif_enum,
|
||||
}
|
||||
if qty is not None:
|
||||
common["qty"] = qty
|
||||
if notional is not None:
|
||||
common["notional"] = notional
|
||||
if ot == "market":
|
||||
req = MarketOrderRequest(**common)
|
||||
elif ot == "limit":
|
||||
if limit_price is None:
|
||||
raise ValueError("limit_price required for limit order")
|
||||
req = LimitOrderRequest(**common, limit_price=limit_price)
|
||||
elif ot == "stop":
|
||||
if stop_price is None:
|
||||
raise ValueError("stop_price required for stop order")
|
||||
req = StopOrderRequest(**common, stop_price=stop_price)
|
||||
else:
|
||||
raise ValueError(f"unsupported order_type: {order_type}")
|
||||
order = await self._run(self._trading.submit_order, req)
|
||||
return _serialize(order)
|
||||
|
||||
async def amend_order(
|
||||
self,
|
||||
order_id: str,
|
||||
qty: float | None = None,
|
||||
limit_price: float | None = None,
|
||||
stop_price: float | None = None,
|
||||
tif: str | None = None,
|
||||
) -> dict:
|
||||
kwargs: dict[str, Any] = {}
|
||||
if qty is not None:
|
||||
kwargs["qty"] = qty
|
||||
if limit_price is not None:
|
||||
kwargs["limit_price"] = limit_price
|
||||
if stop_price is not None:
|
||||
kwargs["stop_price"] = stop_price
|
||||
if tif is not None:
|
||||
kwargs["time_in_force"] = TimeInForce(tif.lower())
|
||||
req = ReplaceOrderRequest(**kwargs)
|
||||
order = await self._run(self._trading.replace_order_by_id, order_id, req)
|
||||
return _serialize(order)
|
||||
|
||||
async def cancel_order(self, order_id: str) -> dict:
|
||||
await self._run(self._trading.cancel_order_by_id, order_id)
|
||||
return {"order_id": order_id, "canceled": True}
|
||||
|
||||
async def cancel_all_orders(self) -> list[dict]:
|
||||
resp = await self._run(self._trading.cancel_orders)
|
||||
return [_serialize(r) for r in resp]
|
||||
|
||||
# ── Position close ──────────────────────────────────────────
|
||||
|
||||
async def close_position(
|
||||
self, symbol: str, qty: float | None = None, percentage: float | None = None
|
||||
) -> dict:
|
||||
req = None
|
||||
if qty is not None or percentage is not None:
|
||||
kwargs: dict[str, Any] = {}
|
||||
if qty is not None:
|
||||
kwargs["qty"] = str(qty)
|
||||
if percentage is not None:
|
||||
kwargs["percentage"] = str(percentage)
|
||||
req = ClosePositionRequest(**kwargs)
|
||||
order = await self._run(
|
||||
self._trading.close_position, symbol, close_options=req
|
||||
)
|
||||
return _serialize(order)
|
||||
|
||||
async def close_all_positions(self, cancel_orders: bool = True) -> list[dict]:
|
||||
resp = await self._run(
|
||||
self._trading.close_all_positions, cancel_orders=cancel_orders
|
||||
)
|
||||
return [_serialize(r) for r in resp]
|
||||
|
||||
# ── Clock / calendar ────────────────────────────────────────
|
||||
|
||||
async def get_clock(self) -> dict:
|
||||
clock = await self._run(self._trading.get_clock)
|
||||
return _serialize(clock)
|
||||
|
||||
async def get_calendar(
|
||||
self, start: str | None = None, end: str | None = None
|
||||
) -> list[dict]:
|
||||
from alpaca.trading.requests import GetCalendarRequest
|
||||
|
||||
kwargs: dict[str, Any] = {}
|
||||
if start:
|
||||
kwargs["start"] = _dt.date.fromisoformat(start)
|
||||
if end:
|
||||
kwargs["end"] = _dt.date.fromisoformat(end)
|
||||
req = GetCalendarRequest(**kwargs) if kwargs else None
|
||||
cal = await self._run(
|
||||
self._trading.get_calendar, filters=req
|
||||
) if req else await self._run(self._trading.get_calendar)
|
||||
return [_serialize(c) for c in cal]
|
||||
@@ -0,0 +1,250 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
|
||||
from fastapi import Depends, HTTPException
|
||||
from option_mcp_common.auth import Principal, TokenStore, require_principal
|
||||
from option_mcp_common.mcp_bridge import mount_mcp_endpoint
|
||||
from option_mcp_common.server import build_app
|
||||
from pydantic import BaseModel
|
||||
|
||||
from mcp_alpaca.client import AlpacaClient
|
||||
|
||||
|
||||
# --- Body models: reads ---
|
||||
|
||||
class AccountReq(BaseModel):
|
||||
pass
|
||||
|
||||
|
||||
class PositionsReq(BaseModel):
|
||||
pass
|
||||
|
||||
|
||||
class ActivitiesReq(BaseModel):
|
||||
limit: int = 50
|
||||
|
||||
|
||||
class AssetsReq(BaseModel):
|
||||
asset_class: str = "stocks"
|
||||
status: str = "active"
|
||||
|
||||
|
||||
class TickerReq(BaseModel):
|
||||
symbol: str
|
||||
asset_class: str = "stocks"
|
||||
|
||||
|
||||
class BarsReq(BaseModel):
|
||||
symbol: str
|
||||
asset_class: str = "stocks"
|
||||
interval: str = "1d"
|
||||
start: str | None = None
|
||||
end: str | None = None
|
||||
limit: int = 1000
|
||||
|
||||
|
||||
class SnapshotReq(BaseModel):
|
||||
symbol: str
|
||||
|
||||
|
||||
class OptionChainReq(BaseModel):
|
||||
underlying: str
|
||||
expiry: str | None = None
|
||||
|
||||
|
||||
class OpenOrdersReq(BaseModel):
|
||||
limit: int = 50
|
||||
|
||||
|
||||
class ClockReq(BaseModel):
|
||||
pass
|
||||
|
||||
|
||||
class CalendarReq(BaseModel):
|
||||
start: str | None = None
|
||||
end: str | None = None
|
||||
|
||||
|
||||
# --- Body models: writes ---
|
||||
|
||||
class PlaceOrderReq(BaseModel):
|
||||
symbol: str
|
||||
side: str
|
||||
qty: float | None = None
|
||||
notional: float | None = None
|
||||
order_type: str = "market"
|
||||
limit_price: float | None = None
|
||||
stop_price: float | None = None
|
||||
tif: str = "day"
|
||||
asset_class: str = "stocks"
|
||||
|
||||
|
||||
class AmendOrderReq(BaseModel):
|
||||
order_id: str
|
||||
qty: float | None = None
|
||||
limit_price: float | None = None
|
||||
stop_price: float | None = None
|
||||
tif: str | None = None
|
||||
|
||||
|
||||
class CancelOrderReq(BaseModel):
|
||||
order_id: str
|
||||
|
||||
|
||||
class CancelAllReq(BaseModel):
|
||||
pass
|
||||
|
||||
|
||||
class ClosePositionReq(BaseModel):
|
||||
symbol: str
|
||||
qty: float | None = None
|
||||
percentage: float | None = None
|
||||
|
||||
|
||||
class CloseAllPositionsReq(BaseModel):
|
||||
cancel_orders: bool = True
|
||||
|
||||
|
||||
# --- ACL helper ---
|
||||
|
||||
def _check(principal: Principal, *, core: bool = False, observer: bool = False) -> None:
|
||||
allowed: set[str] = set()
|
||||
if core:
|
||||
allowed.add("core")
|
||||
if observer:
|
||||
allowed.add("observer")
|
||||
if not (principal.capabilities & allowed):
|
||||
raise HTTPException(status_code=403, detail="forbidden")
|
||||
|
||||
|
||||
def create_app(*, client: AlpacaClient, token_store: TokenStore):
|
||||
app = build_app(name="mcp-alpaca", version="0.1.0", token_store=token_store)
|
||||
|
||||
# ── Reads ──────────────────────────────────────────────
|
||||
|
||||
@app.post("/tools/get_account", tags=["reads"])
|
||||
async def t_get_account(body: AccountReq, principal: Principal = Depends(require_principal)):
|
||||
_check(principal, core=True, observer=True)
|
||||
return await client.get_account()
|
||||
|
||||
@app.post("/tools/get_positions", tags=["reads"])
|
||||
async def t_get_positions(body: PositionsReq, principal: Principal = Depends(require_principal)):
|
||||
_check(principal, core=True, observer=True)
|
||||
return {"positions": await client.get_positions()}
|
||||
|
||||
@app.post("/tools/get_activities", tags=["reads"])
|
||||
async def t_get_activities(body: ActivitiesReq, principal: Principal = Depends(require_principal)):
|
||||
_check(principal, core=True, observer=True)
|
||||
return {"activities": await client.get_activities(body.limit)}
|
||||
|
||||
@app.post("/tools/get_assets", tags=["reads"])
|
||||
async def t_get_assets(body: AssetsReq, principal: Principal = Depends(require_principal)):
|
||||
_check(principal, core=True, observer=True)
|
||||
return {"assets": await client.get_assets(body.asset_class, body.status)}
|
||||
|
||||
@app.post("/tools/get_ticker", tags=["reads"])
|
||||
async def t_get_ticker(body: TickerReq, principal: Principal = Depends(require_principal)):
|
||||
_check(principal, core=True, observer=True)
|
||||
return await client.get_ticker(body.symbol, body.asset_class)
|
||||
|
||||
@app.post("/tools/get_bars", tags=["reads"])
|
||||
async def t_get_bars(body: BarsReq, principal: Principal = Depends(require_principal)):
|
||||
_check(principal, core=True, observer=True)
|
||||
return await client.get_bars(
|
||||
body.symbol, body.asset_class, body.interval, body.start, body.end, body.limit,
|
||||
)
|
||||
|
||||
@app.post("/tools/get_snapshot", tags=["reads"])
|
||||
async def t_get_snapshot(body: SnapshotReq, principal: Principal = Depends(require_principal)):
|
||||
_check(principal, core=True, observer=True)
|
||||
return await client.get_snapshot(body.symbol)
|
||||
|
||||
@app.post("/tools/get_option_chain", tags=["reads"])
|
||||
async def t_get_option_chain(body: OptionChainReq, principal: Principal = Depends(require_principal)):
|
||||
_check(principal, core=True, observer=True)
|
||||
return await client.get_option_chain(body.underlying, body.expiry)
|
||||
|
||||
@app.post("/tools/get_open_orders", tags=["reads"])
|
||||
async def t_get_open_orders(body: OpenOrdersReq, principal: Principal = Depends(require_principal)):
|
||||
_check(principal, core=True, observer=True)
|
||||
return {"orders": await client.get_open_orders(body.limit)}
|
||||
|
||||
@app.post("/tools/get_clock", tags=["reads"])
|
||||
async def t_get_clock(body: ClockReq, principal: Principal = Depends(require_principal)):
|
||||
_check(principal, core=True, observer=True)
|
||||
return await client.get_clock()
|
||||
|
||||
@app.post("/tools/get_calendar", tags=["reads"])
|
||||
async def t_get_calendar(body: CalendarReq, principal: Principal = Depends(require_principal)):
|
||||
_check(principal, core=True, observer=True)
|
||||
return {"calendar": await client.get_calendar(body.start, body.end)}
|
||||
|
||||
# ── Writes ─────────────────────────────────────────────
|
||||
|
||||
@app.post("/tools/place_order", tags=["writes"])
|
||||
async def t_place_order(body: PlaceOrderReq, principal: Principal = Depends(require_principal)):
|
||||
_check(principal, core=True)
|
||||
return await client.place_order(
|
||||
body.symbol, body.side, body.qty, body.notional,
|
||||
body.order_type, body.limit_price, body.stop_price, body.tif, body.asset_class,
|
||||
)
|
||||
|
||||
@app.post("/tools/amend_order", tags=["writes"])
|
||||
async def t_amend_order(body: AmendOrderReq, principal: Principal = Depends(require_principal)):
|
||||
_check(principal, core=True)
|
||||
return await client.amend_order(
|
||||
body.order_id, body.qty, body.limit_price, body.stop_price, body.tif,
|
||||
)
|
||||
|
||||
@app.post("/tools/cancel_order", tags=["writes"])
|
||||
async def t_cancel_order(body: CancelOrderReq, principal: Principal = Depends(require_principal)):
|
||||
_check(principal, core=True)
|
||||
return await client.cancel_order(body.order_id)
|
||||
|
||||
@app.post("/tools/cancel_all_orders", tags=["writes"])
|
||||
async def t_cancel_all(body: CancelAllReq, principal: Principal = Depends(require_principal)):
|
||||
_check(principal, core=True)
|
||||
return {"canceled": await client.cancel_all_orders()}
|
||||
|
||||
@app.post("/tools/close_position", tags=["writes"])
|
||||
async def t_close(body: ClosePositionReq, principal: Principal = Depends(require_principal)):
|
||||
_check(principal, core=True)
|
||||
return await client.close_position(body.symbol, body.qty, body.percentage)
|
||||
|
||||
@app.post("/tools/close_all_positions", tags=["writes"])
|
||||
async def t_close_all(body: CloseAllPositionsReq, principal: Principal = Depends(require_principal)):
|
||||
_check(principal, core=True)
|
||||
return {"closed": await client.close_all_positions(body.cancel_orders)}
|
||||
|
||||
# ── MCP mount ──────────────────────────────────────────
|
||||
|
||||
port = int(os.environ.get("PORT", "9020"))
|
||||
mount_mcp_endpoint(
|
||||
app,
|
||||
name="cerbero-alpaca",
|
||||
version="0.1.0",
|
||||
token_store=token_store,
|
||||
internal_base_url=f"http://localhost:{port}",
|
||||
tools=[
|
||||
{"name": "get_account", "description": "Alpaca account summary (equity, cash, buying_power)."},
|
||||
{"name": "get_positions", "description": "Posizioni aperte (stocks/crypto/options)."},
|
||||
{"name": "get_activities", "description": "Activity log (fills, dividends, transfers)."},
|
||||
{"name": "get_assets", "description": "Universo asset per asset_class."},
|
||||
{"name": "get_ticker", "description": "Last trade + quote per simbolo (stocks/crypto/options)."},
|
||||
{"name": "get_bars", "description": "OHLCV candles (stocks/crypto/options)."},
|
||||
{"name": "get_snapshot", "description": "Snapshot completo stock (last trade+quote+bar)."},
|
||||
{"name": "get_option_chain", "description": "Option chain per underlying."},
|
||||
{"name": "get_open_orders", "description": "Ordini pending."},
|
||||
{"name": "get_clock", "description": "Market clock (open/close, next_open)."},
|
||||
{"name": "get_calendar", "description": "Calendar sessioni trading."},
|
||||
{"name": "place_order", "description": "Invia ordine (CORE only)."},
|
||||
{"name": "amend_order", "description": "Replace ordine esistente."},
|
||||
{"name": "cancel_order", "description": "Cancella ordine."},
|
||||
{"name": "cancel_all_orders", "description": "Cancella tutti ordini aperti."},
|
||||
{"name": "close_position", "description": "Chiude posizione (tutta o parziale)."},
|
||||
{"name": "close_all_positions", "description": "Liquida tutto il portafoglio."},
|
||||
],
|
||||
)
|
||||
|
||||
return app
|
||||
@@ -0,0 +1,40 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
import pytest
|
||||
|
||||
from mcp_alpaca.client import AlpacaClient
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_trading():
|
||||
return MagicMock(name="alpaca_TradingClient")
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_stock():
|
||||
return MagicMock(name="alpaca_StockHistoricalDataClient")
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_crypto():
|
||||
return MagicMock(name="alpaca_CryptoHistoricalDataClient")
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_option():
|
||||
return MagicMock(name="alpaca_OptionHistoricalDataClient")
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def client(mock_trading, mock_stock, mock_crypto, mock_option):
|
||||
return AlpacaClient(
|
||||
api_key="test_key",
|
||||
secret_key="test_secret",
|
||||
paper=True,
|
||||
trading=mock_trading,
|
||||
stock_data=mock_stock,
|
||||
crypto_data=mock_crypto,
|
||||
option_data=mock_option,
|
||||
)
|
||||
@@ -0,0 +1,80 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_init_paper_mode(client, mock_trading):
|
||||
assert client.paper is True
|
||||
assert client._trading is mock_trading
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_account_calls_trading(client, mock_trading):
|
||||
mock_trading.get_account.return_value = MagicMock(
|
||||
model_dump=lambda: {"equity": 100000, "cash": 50000}
|
||||
)
|
||||
result = await client.get_account()
|
||||
mock_trading.get_account.assert_called_once()
|
||||
assert result["equity"] == 100000
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_positions_returns_list(client, mock_trading):
|
||||
pos_mock = MagicMock(model_dump=lambda: {"symbol": "AAPL", "qty": 10})
|
||||
mock_trading.get_all_positions.return_value = [pos_mock]
|
||||
result = await client.get_positions()
|
||||
assert len(result) == 1
|
||||
assert result[0]["symbol"] == "AAPL"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_place_market_order_stocks(client, mock_trading):
|
||||
order_mock = MagicMock(model_dump=lambda: {"id": "o123", "symbol": "AAPL"})
|
||||
mock_trading.submit_order.return_value = order_mock
|
||||
result = await client.place_order(
|
||||
symbol="AAPL", side="buy", qty=1, order_type="market", asset_class="stocks",
|
||||
)
|
||||
assert result["id"] == "o123"
|
||||
assert mock_trading.submit_order.called
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_place_limit_order_requires_price(client):
|
||||
with pytest.raises(ValueError, match="limit_price"):
|
||||
await client.place_order(
|
||||
symbol="AAPL", side="buy", qty=1, order_type="limit",
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_cancel_order(client, mock_trading):
|
||||
mock_trading.cancel_order_by_id.return_value = None
|
||||
result = await client.cancel_order("o1")
|
||||
mock_trading.cancel_order_by_id.assert_called_once_with("o1")
|
||||
assert result == {"order_id": "o1", "canceled": True}
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_close_position_no_options(client, mock_trading):
|
||||
order_mock = MagicMock(model_dump=lambda: {"id": "close-1"})
|
||||
mock_trading.close_position.return_value = order_mock
|
||||
result = await client.close_position("AAPL")
|
||||
assert mock_trading.close_position.called
|
||||
assert result["id"] == "close-1"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_clock(client, mock_trading):
|
||||
clock_mock = MagicMock(model_dump=lambda: {"is_open": True, "next_close": "2026-04-21T20:00:00Z"})
|
||||
mock_trading.get_clock.return_value = clock_mock
|
||||
result = await client.get_clock()
|
||||
assert result["is_open"] is True
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_invalid_asset_class(client):
|
||||
with pytest.raises(ValueError, match="invalid asset_class"):
|
||||
await client.get_ticker("AAPL", asset_class="forex")
|
||||
@@ -0,0 +1,111 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from unittest.mock import AsyncMock, MagicMock
|
||||
|
||||
import pytest
|
||||
from fastapi.testclient import TestClient
|
||||
from option_mcp_common.auth import Principal, TokenStore
|
||||
|
||||
from mcp_alpaca.server import create_app
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def token_store():
|
||||
return TokenStore(
|
||||
tokens={
|
||||
"core-tok": Principal("core", {"core"}),
|
||||
"obs-tok": Principal("observer", {"observer"}),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_client():
|
||||
c = MagicMock()
|
||||
c.get_account = AsyncMock(return_value={"equity": 100000})
|
||||
c.get_positions = AsyncMock(return_value=[])
|
||||
c.get_activities = AsyncMock(return_value=[])
|
||||
c.get_assets = AsyncMock(return_value=[])
|
||||
c.get_ticker = AsyncMock(return_value={"symbol": "AAPL"})
|
||||
c.get_bars = AsyncMock(return_value={"bars": []})
|
||||
c.get_snapshot = AsyncMock(return_value={})
|
||||
c.get_option_chain = AsyncMock(return_value={"contracts": []})
|
||||
c.get_open_orders = AsyncMock(return_value=[])
|
||||
c.get_clock = AsyncMock(return_value={"is_open": True})
|
||||
c.get_calendar = AsyncMock(return_value=[])
|
||||
c.place_order = AsyncMock(return_value={"id": "o1"})
|
||||
c.amend_order = AsyncMock(return_value={"id": "o1"})
|
||||
c.cancel_order = AsyncMock(return_value={"canceled": True})
|
||||
c.cancel_all_orders = AsyncMock(return_value=[])
|
||||
c.close_position = AsyncMock(return_value={"id": "close1"})
|
||||
c.close_all_positions = AsyncMock(return_value=[])
|
||||
return c
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def http(mock_client, token_store):
|
||||
app = create_app(client=mock_client, token_store=token_store)
|
||||
return TestClient(app)
|
||||
|
||||
|
||||
CORE = {"Authorization": "Bearer core-tok"}
|
||||
OBS = {"Authorization": "Bearer obs-tok"}
|
||||
|
||||
READ_ENDPOINTS = [
|
||||
("/tools/get_account", {}),
|
||||
("/tools/get_positions", {}),
|
||||
("/tools/get_activities", {}),
|
||||
("/tools/get_assets", {}),
|
||||
("/tools/get_ticker", {"symbol": "AAPL"}),
|
||||
("/tools/get_bars", {"symbol": "AAPL"}),
|
||||
("/tools/get_snapshot", {"symbol": "AAPL"}),
|
||||
("/tools/get_option_chain", {"underlying": "AAPL"}),
|
||||
("/tools/get_open_orders", {}),
|
||||
("/tools/get_clock", {}),
|
||||
("/tools/get_calendar", {}),
|
||||
]
|
||||
|
||||
WRITE_ENDPOINTS = [
|
||||
("/tools/place_order", {"symbol": "AAPL", "side": "buy", "qty": 1}),
|
||||
("/tools/amend_order", {"order_id": "o1", "qty": 2}),
|
||||
("/tools/cancel_order", {"order_id": "o1"}),
|
||||
("/tools/cancel_all_orders", {}),
|
||||
("/tools/close_position", {"symbol": "AAPL"}),
|
||||
("/tools/close_all_positions", {}),
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.parametrize("path,payload", READ_ENDPOINTS)
|
||||
def test_read_core_ok(http, path, payload):
|
||||
r = http.post(path, json=payload, headers=CORE)
|
||||
assert r.status_code == 200, (path, r.text)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("path,payload", READ_ENDPOINTS)
|
||||
def test_read_observer_ok(http, path, payload):
|
||||
r = http.post(path, json=payload, headers=OBS)
|
||||
assert r.status_code == 200, (path, r.text)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("path,payload", READ_ENDPOINTS)
|
||||
def test_read_no_auth_401(http, path, payload):
|
||||
r = http.post(path, json=payload)
|
||||
assert r.status_code == 401, (path, r.text)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("path,payload", WRITE_ENDPOINTS)
|
||||
def test_write_core_ok(http, path, payload):
|
||||
r = http.post(path, json=payload, headers=CORE)
|
||||
assert r.status_code == 200, (path, r.text)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("path,payload", WRITE_ENDPOINTS)
|
||||
def test_write_observer_403(http, path, payload):
|
||||
r = http.post(path, json=payload, headers=OBS)
|
||||
assert r.status_code == 403, (path, r.text)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("path,payload", WRITE_ENDPOINTS)
|
||||
def test_write_no_auth_401(http, path, payload):
|
||||
r = http.post(path, json=payload)
|
||||
assert r.status_code == 401, (path, r.text)
|
||||
@@ -0,0 +1,28 @@
|
||||
[project]
|
||||
name = "mcp-bybit"
|
||||
version = "0.1.0"
|
||||
requires-python = ">=3.11"
|
||||
dependencies = [
|
||||
"option-mcp-common",
|
||||
"fastapi>=0.115",
|
||||
"uvicorn[standard]>=0.30",
|
||||
"httpx>=0.27",
|
||||
"pydantic>=2.6",
|
||||
"pybit>=5.8",
|
||||
]
|
||||
|
||||
[project.optional-dependencies]
|
||||
dev = ["pytest>=8", "pytest-asyncio>=0.23"]
|
||||
|
||||
[build-system]
|
||||
requires = ["hatchling"]
|
||||
build-backend = "hatchling.build"
|
||||
|
||||
[tool.hatch.build.targets.wheel]
|
||||
packages = ["src/mcp_bybit"]
|
||||
|
||||
[tool.uv.sources]
|
||||
option-mcp-common = { workspace = true }
|
||||
|
||||
[project.scripts]
|
||||
mcp-bybit = "mcp_bybit.__main__:main"
|
||||
@@ -0,0 +1,45 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import os
|
||||
|
||||
import uvicorn
|
||||
from option_mcp_common.auth import load_token_store_from_files
|
||||
from option_mcp_common.logging import configure_root_logging
|
||||
|
||||
from mcp_bybit.client import BybitClient
|
||||
from mcp_bybit.server import create_app
|
||||
|
||||
|
||||
configure_root_logging()
|
||||
|
||||
|
||||
def main():
|
||||
creds_file = os.environ["BYBIT_CREDENTIALS_FILE"]
|
||||
with open(creds_file) as f:
|
||||
creds = json.load(f)
|
||||
|
||||
testnet_env = os.environ.get("BYBIT_TESTNET", "true").lower()
|
||||
testnet = testnet_env not in ("0", "false", "no")
|
||||
|
||||
client = BybitClient(
|
||||
api_key=creds["api_key"],
|
||||
api_secret=creds["api_secret"],
|
||||
testnet=testnet,
|
||||
)
|
||||
|
||||
token_store = load_token_store_from_files(
|
||||
core_token_file=os.environ.get("CORE_TOKEN_FILE"),
|
||||
observer_token_file=os.environ.get("OBSERVER_TOKEN_FILE"),
|
||||
)
|
||||
app = create_app(client=client, token_store=token_store)
|
||||
uvicorn.run(
|
||||
app,
|
||||
log_config=None,
|
||||
host=os.environ.get("HOST", "0.0.0.0"),
|
||||
port=int(os.environ.get("PORT", "9019")),
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -0,0 +1,558 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from typing import Any
|
||||
|
||||
from option_mcp_common import indicators as ind
|
||||
from pybit.unified_trading import HTTP
|
||||
|
||||
|
||||
def _f(v: Any) -> float | None:
|
||||
try:
|
||||
return float(v)
|
||||
except (TypeError, ValueError):
|
||||
return None
|
||||
|
||||
|
||||
def _i(v: Any) -> int | None:
|
||||
try:
|
||||
return int(v)
|
||||
except (TypeError, ValueError):
|
||||
return None
|
||||
|
||||
|
||||
class BybitClient:
|
||||
def __init__(
|
||||
self,
|
||||
api_key: str,
|
||||
api_secret: str,
|
||||
testnet: bool = True,
|
||||
http: Any | None = None,
|
||||
) -> None:
|
||||
self.api_key = api_key
|
||||
self.api_secret = api_secret
|
||||
self.testnet = testnet
|
||||
self._http = http or HTTP(
|
||||
api_key=api_key,
|
||||
api_secret=api_secret,
|
||||
testnet=testnet,
|
||||
)
|
||||
|
||||
async def _run(self, fn, /, **kwargs):
|
||||
return await asyncio.to_thread(fn, **kwargs)
|
||||
|
||||
@staticmethod
|
||||
def _parse_ticker(row: dict) -> dict:
|
||||
return {
|
||||
"symbol": row.get("symbol"),
|
||||
"last_price": _f(row.get("lastPrice")),
|
||||
"mark_price": _f(row.get("markPrice")),
|
||||
"bid": _f(row.get("bid1Price")),
|
||||
"ask": _f(row.get("ask1Price")),
|
||||
"volume_24h": _f(row.get("volume24h")),
|
||||
"turnover_24h": _f(row.get("turnover24h")),
|
||||
"funding_rate": _f(row.get("fundingRate")),
|
||||
"open_interest": _f(row.get("openInterest")),
|
||||
}
|
||||
|
||||
async def get_ticker(self, symbol: str, category: str = "linear") -> dict:
|
||||
resp = await self._run(
|
||||
self._http.get_tickers, category=category, symbol=symbol
|
||||
)
|
||||
rows = (resp.get("result") or {}).get("list") or []
|
||||
if not rows:
|
||||
return {"symbol": symbol, "error": "not_found"}
|
||||
return self._parse_ticker(rows[0])
|
||||
|
||||
async def get_ticker_batch(
|
||||
self, symbols: list[str], category: str = "linear"
|
||||
) -> dict[str, dict]:
|
||||
out: dict[str, dict] = {}
|
||||
for sym in symbols:
|
||||
out[sym] = await self.get_ticker(sym, category=category)
|
||||
return out
|
||||
|
||||
async def get_orderbook(
|
||||
self, symbol: str, category: str = "linear", limit: int = 50
|
||||
) -> dict:
|
||||
resp = await self._run(
|
||||
self._http.get_orderbook, category=category, symbol=symbol, limit=limit
|
||||
)
|
||||
r = resp.get("result") or {}
|
||||
return {
|
||||
"symbol": r.get("s"),
|
||||
"bids": [[float(p), float(q)] for p, q in (r.get("b") or [])],
|
||||
"asks": [[float(p), float(q)] for p, q in (r.get("a") or [])],
|
||||
"timestamp": r.get("ts"),
|
||||
}
|
||||
|
||||
async def get_historical(
|
||||
self,
|
||||
symbol: str,
|
||||
category: str = "linear",
|
||||
interval: str = "60",
|
||||
start: int | None = None,
|
||||
end: int | None = None,
|
||||
limit: int = 1000,
|
||||
) -> dict:
|
||||
kwargs = dict(
|
||||
category=category,
|
||||
symbol=symbol,
|
||||
interval=interval,
|
||||
limit=limit,
|
||||
)
|
||||
if start is not None:
|
||||
kwargs["start"] = start
|
||||
if end is not None:
|
||||
kwargs["end"] = end
|
||||
resp = await self._run(self._http.get_kline, **kwargs)
|
||||
rows = (resp.get("result") or {}).get("list") or []
|
||||
rows_sorted = sorted(rows, key=lambda r: int(r[0]))
|
||||
candles = [
|
||||
{
|
||||
"timestamp": int(r[0]),
|
||||
"open": float(r[1]),
|
||||
"high": float(r[2]),
|
||||
"low": float(r[3]),
|
||||
"close": float(r[4]),
|
||||
"volume": float(r[5]),
|
||||
}
|
||||
for r in rows_sorted
|
||||
]
|
||||
return {"symbol": symbol, "candles": candles}
|
||||
|
||||
async def get_indicators(
|
||||
self,
|
||||
symbol: str,
|
||||
category: str = "linear",
|
||||
indicators: list[str] | None = None,
|
||||
interval: str = "60",
|
||||
start: int | None = None,
|
||||
end: int | None = None,
|
||||
) -> dict:
|
||||
indicators = indicators or ["rsi", "atr", "macd", "adx"]
|
||||
historical = await self.get_historical(
|
||||
symbol, category=category, interval=interval, start=start, end=end
|
||||
)
|
||||
candles = historical.get("candles", [])
|
||||
closes = [c["close"] for c in candles]
|
||||
highs = [c["high"] for c in candles]
|
||||
lows = [c["low"] for c in candles]
|
||||
|
||||
out: dict[str, Any] = {"symbol": symbol, "category": category}
|
||||
for name in indicators:
|
||||
n = name.lower()
|
||||
if n == "sma":
|
||||
out["sma"] = ind.sma(closes, 20)
|
||||
elif n == "rsi":
|
||||
out["rsi"] = ind.rsi(closes)
|
||||
elif n == "atr":
|
||||
out["atr"] = ind.atr(highs, lows, closes)
|
||||
elif n == "macd":
|
||||
out["macd"] = ind.macd(closes)
|
||||
elif n == "adx":
|
||||
out["adx"] = ind.adx(highs, lows, closes)
|
||||
else:
|
||||
out[n] = None
|
||||
return out
|
||||
|
||||
async def get_funding_rate(self, symbol: str, category: str = "linear") -> dict:
|
||||
resp = await self._run(
|
||||
self._http.get_tickers, category=category, symbol=symbol
|
||||
)
|
||||
rows = (resp.get("result") or {}).get("list") or []
|
||||
if not rows:
|
||||
return {"symbol": symbol, "error": "not_found"}
|
||||
row = rows[0]
|
||||
return {
|
||||
"symbol": row.get("symbol"),
|
||||
"funding_rate": _f(row.get("fundingRate")),
|
||||
"next_funding_time": _i(row.get("nextFundingTime")),
|
||||
}
|
||||
|
||||
async def get_funding_history(
|
||||
self, symbol: str, category: str = "linear", limit: int = 100
|
||||
) -> dict:
|
||||
resp = await self._run(
|
||||
self._http.get_funding_rate_history,
|
||||
category=category, symbol=symbol, limit=limit,
|
||||
)
|
||||
rows = (resp.get("result") or {}).get("list") or []
|
||||
hist = [
|
||||
{
|
||||
"timestamp": int(r.get("fundingRateTimestamp", 0)),
|
||||
"rate": float(r.get("fundingRate", 0)),
|
||||
}
|
||||
for r in rows
|
||||
]
|
||||
return {"symbol": symbol, "history": hist}
|
||||
|
||||
async def get_open_interest(
|
||||
self,
|
||||
symbol: str,
|
||||
category: str = "linear",
|
||||
interval: str = "5min",
|
||||
limit: int = 288,
|
||||
) -> dict:
|
||||
resp = await self._run(
|
||||
self._http.get_open_interest,
|
||||
category=category, symbol=symbol, intervalTime=interval, limit=limit,
|
||||
)
|
||||
rows = (resp.get("result") or {}).get("list") or []
|
||||
points = [
|
||||
{
|
||||
"timestamp": int(r.get("timestamp", 0)),
|
||||
"oi": float(r.get("openInterest", 0)),
|
||||
}
|
||||
for r in rows
|
||||
]
|
||||
current_oi = points[0]["oi"] if points else None
|
||||
return {
|
||||
"symbol": symbol,
|
||||
"category": category,
|
||||
"interval": interval,
|
||||
"current_oi": current_oi,
|
||||
"points": points,
|
||||
}
|
||||
|
||||
async def get_instruments(self, category: str = "linear", symbol: str | None = None) -> dict:
|
||||
kwargs: dict[str, Any] = {"category": category}
|
||||
if symbol:
|
||||
kwargs["symbol"] = symbol
|
||||
resp = await self._run(self._http.get_instruments_info, **kwargs)
|
||||
rows = (resp.get("result") or {}).get("list") or []
|
||||
instruments = []
|
||||
for r in rows:
|
||||
pf = r.get("priceFilter") or {}
|
||||
lf = r.get("lotSizeFilter") or {}
|
||||
instruments.append({
|
||||
"symbol": r.get("symbol"),
|
||||
"status": r.get("status"),
|
||||
"base_coin": r.get("baseCoin"),
|
||||
"quote_coin": r.get("quoteCoin"),
|
||||
"tick_size": _f(pf.get("tickSize")),
|
||||
"qty_step": _f(lf.get("qtyStep")),
|
||||
"min_qty": _f(lf.get("minOrderQty")),
|
||||
})
|
||||
return {"category": category, "instruments": instruments}
|
||||
|
||||
async def get_option_chain(self, base_coin: str, expiry: str | None = None) -> dict:
|
||||
kwargs: dict[str, Any] = {"category": "option", "baseCoin": base_coin.upper()}
|
||||
resp = await self._run(self._http.get_instruments_info, **kwargs)
|
||||
rows = (resp.get("result") or {}).get("list") or []
|
||||
options = []
|
||||
for r in rows:
|
||||
delivery = r.get("deliveryTime")
|
||||
if expiry and expiry not in r.get("symbol", ""):
|
||||
continue
|
||||
options.append({
|
||||
"symbol": r.get("symbol"),
|
||||
"base_coin": r.get("baseCoin"),
|
||||
"settle_coin": r.get("settleCoin"),
|
||||
"type": r.get("optionsType"),
|
||||
"launch_time": int(r.get("launchTime", 0)),
|
||||
"delivery_time": int(delivery) if delivery else None,
|
||||
})
|
||||
return {"base_coin": base_coin.upper(), "options": options}
|
||||
|
||||
async def get_positions(
|
||||
self, category: str = "linear", settle_coin: str = "USDT"
|
||||
) -> list[dict]:
|
||||
kwargs: dict[str, Any] = {"category": category}
|
||||
if category in ("linear", "inverse"):
|
||||
kwargs["settleCoin"] = settle_coin
|
||||
resp = await self._run(self._http.get_positions, **kwargs)
|
||||
rows = (resp.get("result") or {}).get("list") or []
|
||||
out = []
|
||||
for r in rows:
|
||||
out.append({
|
||||
"symbol": r.get("symbol"),
|
||||
"side": r.get("side"),
|
||||
"size": _f(r.get("size")),
|
||||
"entry_price": _f(r.get("avgPrice")),
|
||||
"unrealized_pnl": _f(r.get("unrealisedPnl")),
|
||||
"leverage": _f(r.get("leverage")),
|
||||
"liquidation_price": _f(r.get("liqPrice")),
|
||||
"position_value": _f(r.get("positionValue")),
|
||||
})
|
||||
return out
|
||||
|
||||
async def get_account_summary(self, account_type: str = "UNIFIED") -> dict:
|
||||
resp = await self._run(
|
||||
self._http.get_wallet_balance, accountType=account_type
|
||||
)
|
||||
rows = (resp.get("result") or {}).get("list") or []
|
||||
if not rows:
|
||||
return {"error": "no_account"}
|
||||
a = rows[0]
|
||||
coins = []
|
||||
for c in a.get("coin") or []:
|
||||
coins.append({
|
||||
"coin": c.get("coin"),
|
||||
"wallet_balance": _f(c.get("walletBalance")),
|
||||
"equity": _f(c.get("equity")),
|
||||
})
|
||||
return {
|
||||
"account_type": a.get("accountType"),
|
||||
"equity": _f(a.get("totalEquity")),
|
||||
"wallet_balance": _f(a.get("totalWalletBalance")),
|
||||
"margin_balance": _f(a.get("totalMarginBalance")),
|
||||
"available_balance": _f(a.get("totalAvailableBalance")),
|
||||
"unrealized_pnl": _f(a.get("totalPerpUPL")),
|
||||
"coins": coins,
|
||||
}
|
||||
|
||||
async def get_trade_history(
|
||||
self, category: str = "linear", limit: int = 50
|
||||
) -> list[dict]:
|
||||
resp = await self._run(
|
||||
self._http.get_executions, category=category, limit=limit
|
||||
)
|
||||
rows = (resp.get("result") or {}).get("list") or []
|
||||
return [
|
||||
{
|
||||
"symbol": r.get("symbol"),
|
||||
"side": r.get("side"),
|
||||
"size": _f(r.get("execQty")),
|
||||
"price": _f(r.get("execPrice")),
|
||||
"fee": _f(r.get("execFee")),
|
||||
"timestamp": _i(r.get("execTime")),
|
||||
"order_id": r.get("orderId"),
|
||||
}
|
||||
for r in rows
|
||||
]
|
||||
|
||||
async def get_open_orders(
|
||||
self,
|
||||
category: str = "linear",
|
||||
symbol: str | None = None,
|
||||
settle_coin: str = "USDT",
|
||||
) -> list[dict]:
|
||||
kwargs: dict[str, Any] = {"category": category}
|
||||
if category in ("linear", "inverse") and not symbol:
|
||||
kwargs["settleCoin"] = settle_coin
|
||||
if symbol:
|
||||
kwargs["symbol"] = symbol
|
||||
resp = await self._run(self._http.get_open_orders, **kwargs)
|
||||
rows = (resp.get("result") or {}).get("list") or []
|
||||
return [
|
||||
{
|
||||
"order_id": r.get("orderId"),
|
||||
"symbol": r.get("symbol"),
|
||||
"side": r.get("side"),
|
||||
"qty": _f(r.get("qty")),
|
||||
"price": _f(r.get("price")),
|
||||
"type": r.get("orderType"),
|
||||
"status": r.get("orderStatus"),
|
||||
"reduce_only": bool(r.get("reduceOnly")),
|
||||
}
|
||||
for r in rows
|
||||
]
|
||||
|
||||
async def get_basis_spot_perp(self, asset: str) -> dict:
|
||||
asset = asset.upper()
|
||||
symbol = f"{asset}USDT"
|
||||
spot = await self.get_ticker(symbol, category="spot")
|
||||
perp = await self.get_ticker(symbol, category="linear")
|
||||
sp = spot.get("last_price")
|
||||
pp = perp.get("last_price")
|
||||
basis_abs = basis_pct = None
|
||||
if sp and pp:
|
||||
basis_abs = pp - sp
|
||||
basis_pct = 100.0 * basis_abs / sp
|
||||
return {
|
||||
"asset": asset,
|
||||
"symbol": symbol,
|
||||
"spot_price": sp,
|
||||
"perp_price": pp,
|
||||
"basis_abs": basis_abs,
|
||||
"basis_pct": basis_pct,
|
||||
"funding_rate": perp.get("funding_rate"),
|
||||
}
|
||||
|
||||
def _envelope(self, resp: dict, payload: dict) -> dict:
|
||||
code = resp.get("retCode", 0)
|
||||
if code != 0:
|
||||
return {"error": resp.get("retMsg", "bybit_error"), "code": code}
|
||||
return payload
|
||||
|
||||
async def place_order(
|
||||
self,
|
||||
category: str,
|
||||
symbol: str,
|
||||
side: str,
|
||||
qty: float,
|
||||
order_type: str = "Limit",
|
||||
price: float | None = None,
|
||||
tif: str = "GTC",
|
||||
reduce_only: bool = False,
|
||||
position_idx: int | None = None,
|
||||
) -> dict:
|
||||
kwargs: dict[str, Any] = {
|
||||
"category": category,
|
||||
"symbol": symbol,
|
||||
"side": side,
|
||||
"qty": str(qty),
|
||||
"orderType": order_type,
|
||||
"timeInForce": tif,
|
||||
"reduceOnly": reduce_only,
|
||||
}
|
||||
if price is not None:
|
||||
kwargs["price"] = str(price)
|
||||
if position_idx is not None:
|
||||
kwargs["positionIdx"] = position_idx
|
||||
if category == "option":
|
||||
import uuid
|
||||
kwargs["orderLinkId"] = f"cerbero-{uuid.uuid4().hex[:16]}"
|
||||
resp = await self._run(self._http.place_order, **kwargs)
|
||||
r = resp.get("result") or {}
|
||||
return self._envelope(resp, {
|
||||
"order_id": r.get("orderId"),
|
||||
"order_link_id": r.get("orderLinkId"),
|
||||
"status": "submitted",
|
||||
})
|
||||
|
||||
async def amend_order(
|
||||
self,
|
||||
category: str,
|
||||
symbol: str,
|
||||
order_id: str,
|
||||
new_qty: float | None = None,
|
||||
new_price: float | None = None,
|
||||
) -> dict:
|
||||
kwargs: dict[str, Any] = {
|
||||
"category": category,
|
||||
"symbol": symbol,
|
||||
"orderId": order_id,
|
||||
}
|
||||
if new_qty is not None:
|
||||
kwargs["qty"] = str(new_qty)
|
||||
if new_price is not None:
|
||||
kwargs["price"] = str(new_price)
|
||||
resp = await self._run(self._http.amend_order, **kwargs)
|
||||
r = resp.get("result") or {}
|
||||
return self._envelope(resp, {
|
||||
"order_id": r.get("orderId", order_id),
|
||||
"status": "amended",
|
||||
})
|
||||
|
||||
async def cancel_order(
|
||||
self, category: str, symbol: str, order_id: str
|
||||
) -> dict:
|
||||
resp = await self._run(
|
||||
self._http.cancel_order,
|
||||
category=category, symbol=symbol, orderId=order_id,
|
||||
)
|
||||
r = resp.get("result") or {}
|
||||
return self._envelope(resp, {
|
||||
"order_id": r.get("orderId", order_id),
|
||||
"status": "cancelled",
|
||||
})
|
||||
|
||||
async def cancel_all_orders(
|
||||
self, category: str, symbol: str | None = None
|
||||
) -> dict:
|
||||
kwargs: dict[str, Any] = {"category": category}
|
||||
if symbol:
|
||||
kwargs["symbol"] = symbol
|
||||
resp = await self._run(self._http.cancel_all_orders, **kwargs)
|
||||
r = resp.get("result") or {}
|
||||
ids = [x.get("orderId") for x in (r.get("list") or [])]
|
||||
return self._envelope(resp, {
|
||||
"cancelled_ids": ids,
|
||||
"count": len(ids),
|
||||
})
|
||||
|
||||
async def set_stop_loss(
|
||||
self, category: str, symbol: str, stop_loss: float,
|
||||
position_idx: int = 0,
|
||||
) -> dict:
|
||||
resp = await self._run(
|
||||
self._http.set_trading_stop,
|
||||
category=category, symbol=symbol,
|
||||
stopLoss=str(stop_loss), positionIdx=position_idx,
|
||||
)
|
||||
return self._envelope(resp, {
|
||||
"symbol": symbol, "stop_loss": stop_loss,
|
||||
"status": "stop_loss_set",
|
||||
})
|
||||
|
||||
async def set_take_profit(
|
||||
self, category: str, symbol: str, take_profit: float,
|
||||
position_idx: int = 0,
|
||||
) -> dict:
|
||||
resp = await self._run(
|
||||
self._http.set_trading_stop,
|
||||
category=category, symbol=symbol,
|
||||
takeProfit=str(take_profit), positionIdx=position_idx,
|
||||
)
|
||||
return self._envelope(resp, {
|
||||
"symbol": symbol, "take_profit": take_profit,
|
||||
"status": "take_profit_set",
|
||||
})
|
||||
|
||||
async def close_position(self, category: str, symbol: str) -> dict:
|
||||
positions = await self.get_positions(category=category)
|
||||
target = next((p for p in positions if p["symbol"] == symbol and (p["size"] or 0) > 0), None)
|
||||
if not target:
|
||||
return {"error": "no_open_position", "symbol": symbol}
|
||||
close_side = "Sell" if target["side"] == "Buy" else "Buy"
|
||||
return await self.place_order(
|
||||
category=category,
|
||||
symbol=symbol,
|
||||
side=close_side,
|
||||
qty=target["size"],
|
||||
order_type="Market",
|
||||
reduce_only=True,
|
||||
tif="IOC",
|
||||
)
|
||||
|
||||
async def set_leverage(
|
||||
self, category: str, symbol: str, leverage: int
|
||||
) -> dict:
|
||||
resp = await self._run(
|
||||
self._http.set_leverage,
|
||||
category=category, symbol=symbol,
|
||||
buyLeverage=str(leverage), sellLeverage=str(leverage),
|
||||
)
|
||||
return self._envelope(resp, {
|
||||
"symbol": symbol, "leverage": leverage,
|
||||
"status": "leverage_set",
|
||||
})
|
||||
|
||||
async def switch_position_mode(
|
||||
self, category: str, symbol: str, mode: str
|
||||
) -> dict:
|
||||
mode_code = 3 if mode.lower() == "hedge" else 0
|
||||
resp = await self._run(
|
||||
self._http.switch_position_mode,
|
||||
category=category, symbol=symbol, mode=mode_code,
|
||||
)
|
||||
return self._envelope(resp, {
|
||||
"symbol": symbol, "mode": mode,
|
||||
"status": "mode_switched",
|
||||
})
|
||||
|
||||
async def transfer_asset(
|
||||
self,
|
||||
coin: str,
|
||||
amount: float,
|
||||
from_type: str,
|
||||
to_type: str,
|
||||
) -> dict:
|
||||
import uuid
|
||||
resp = await self._run(
|
||||
self._http.create_internal_transfer,
|
||||
transferId=str(uuid.uuid4()),
|
||||
coin=coin,
|
||||
amount=str(amount),
|
||||
fromAccountType=from_type,
|
||||
toAccountType=to_type,
|
||||
)
|
||||
r = resp.get("result") or {}
|
||||
return self._envelope(resp, {
|
||||
"transfer_id": r.get("transferId"),
|
||||
"coin": coin,
|
||||
"amount": amount,
|
||||
"status": "submitted",
|
||||
})
|
||||
@@ -0,0 +1,363 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
|
||||
from fastapi import Depends, HTTPException
|
||||
from option_mcp_common.auth import Principal, TokenStore, require_principal
|
||||
from option_mcp_common.mcp_bridge import mount_mcp_endpoint
|
||||
from option_mcp_common.server import build_app
|
||||
from pydantic import BaseModel
|
||||
|
||||
from mcp_bybit.client import BybitClient
|
||||
|
||||
|
||||
# --- Body models: reads ---
|
||||
|
||||
class TickerReq(BaseModel):
|
||||
symbol: str
|
||||
category: str = "linear"
|
||||
|
||||
|
||||
class TickerBatchReq(BaseModel):
|
||||
symbols: list[str]
|
||||
category: str = "linear"
|
||||
|
||||
|
||||
class OrderbookReq(BaseModel):
|
||||
symbol: str
|
||||
category: str = "linear"
|
||||
limit: int = 50
|
||||
|
||||
|
||||
class HistoricalReq(BaseModel):
|
||||
symbol: str
|
||||
category: str = "linear"
|
||||
interval: str = "60"
|
||||
start: int | None = None
|
||||
end: int | None = None
|
||||
limit: int = 1000
|
||||
|
||||
|
||||
class IndicatorsReq(BaseModel):
|
||||
symbol: str
|
||||
category: str = "linear"
|
||||
indicators: list[str] = ["rsi", "atr", "macd", "adx"]
|
||||
interval: str = "60"
|
||||
start: int | None = None
|
||||
end: int | None = None
|
||||
|
||||
|
||||
class FundingRateReq(BaseModel):
|
||||
symbol: str
|
||||
category: str = "linear"
|
||||
|
||||
|
||||
class FundingHistoryReq(BaseModel):
|
||||
symbol: str
|
||||
category: str = "linear"
|
||||
limit: int = 100
|
||||
|
||||
|
||||
class OpenInterestReq(BaseModel):
|
||||
symbol: str
|
||||
category: str = "linear"
|
||||
interval: str = "5min"
|
||||
limit: int = 288
|
||||
|
||||
|
||||
class InstrumentsReq(BaseModel):
|
||||
category: str = "linear"
|
||||
symbol: str | None = None
|
||||
|
||||
|
||||
class OptionChainReq(BaseModel):
|
||||
base_coin: str
|
||||
expiry: str | None = None
|
||||
|
||||
|
||||
class PositionsReq(BaseModel):
|
||||
category: str = "linear"
|
||||
|
||||
|
||||
class AccountSummaryReq(BaseModel):
|
||||
pass
|
||||
|
||||
|
||||
class TradeHistoryReq(BaseModel):
|
||||
category: str = "linear"
|
||||
limit: int = 50
|
||||
|
||||
|
||||
class OpenOrdersReq(BaseModel):
|
||||
category: str = "linear"
|
||||
symbol: str | None = None
|
||||
|
||||
|
||||
class BasisSpotPerpReq(BaseModel):
|
||||
asset: str
|
||||
|
||||
|
||||
# --- Body models: writes ---
|
||||
|
||||
class PlaceOrderReq(BaseModel):
|
||||
category: str
|
||||
symbol: str
|
||||
side: str
|
||||
qty: float
|
||||
order_type: str = "Limit"
|
||||
price: float | None = None
|
||||
tif: str = "GTC"
|
||||
reduce_only: bool = False
|
||||
position_idx: int | None = None
|
||||
|
||||
|
||||
class AmendOrderReq(BaseModel):
|
||||
category: str
|
||||
symbol: str
|
||||
order_id: str
|
||||
new_qty: float | None = None
|
||||
new_price: float | None = None
|
||||
|
||||
|
||||
class CancelOrderReq(BaseModel):
|
||||
category: str
|
||||
symbol: str
|
||||
order_id: str
|
||||
|
||||
|
||||
class CancelAllReq(BaseModel):
|
||||
category: str
|
||||
symbol: str | None = None
|
||||
|
||||
|
||||
class SetStopLossReq(BaseModel):
|
||||
category: str
|
||||
symbol: str
|
||||
stop_loss: float
|
||||
position_idx: int = 0
|
||||
|
||||
|
||||
class SetTakeProfitReq(BaseModel):
|
||||
category: str
|
||||
symbol: str
|
||||
take_profit: float
|
||||
position_idx: int = 0
|
||||
|
||||
|
||||
class ClosePositionReq(BaseModel):
|
||||
category: str
|
||||
symbol: str
|
||||
|
||||
|
||||
class SetLeverageReq(BaseModel):
|
||||
category: str
|
||||
symbol: str
|
||||
leverage: int
|
||||
|
||||
|
||||
class SwitchModeReq(BaseModel):
|
||||
category: str
|
||||
symbol: str
|
||||
mode: str
|
||||
|
||||
|
||||
class TransferReq(BaseModel):
|
||||
coin: str
|
||||
amount: float
|
||||
from_type: str
|
||||
to_type: str
|
||||
|
||||
|
||||
# --- ACL helper ---
|
||||
|
||||
def _check(principal: Principal, *, core: bool = False, observer: bool = False) -> None:
|
||||
allowed: set[str] = set()
|
||||
if core:
|
||||
allowed.add("core")
|
||||
if observer:
|
||||
allowed.add("observer")
|
||||
if not (principal.capabilities & allowed):
|
||||
raise HTTPException(status_code=403, detail="forbidden")
|
||||
|
||||
|
||||
def create_app(*, client: BybitClient, token_store: TokenStore):
|
||||
app = build_app(name="mcp-bybit", version="0.1.0", token_store=token_store)
|
||||
|
||||
# ── Reads ──────────────────────────────────────────────
|
||||
|
||||
@app.post("/tools/get_ticker", tags=["reads"])
|
||||
async def t_get_ticker(body: TickerReq, principal: Principal = Depends(require_principal)):
|
||||
_check(principal, core=True, observer=True)
|
||||
return await client.get_ticker(body.symbol, body.category)
|
||||
|
||||
@app.post("/tools/get_ticker_batch", tags=["reads"])
|
||||
async def t_get_ticker_batch(body: TickerBatchReq, principal: Principal = Depends(require_principal)):
|
||||
_check(principal, core=True, observer=True)
|
||||
return await client.get_ticker_batch(body.symbols, body.category)
|
||||
|
||||
@app.post("/tools/get_orderbook", tags=["reads"])
|
||||
async def t_get_orderbook(body: OrderbookReq, principal: Principal = Depends(require_principal)):
|
||||
_check(principal, core=True, observer=True)
|
||||
return await client.get_orderbook(body.symbol, body.category, body.limit)
|
||||
|
||||
@app.post("/tools/get_historical", tags=["reads"])
|
||||
async def t_get_historical(body: HistoricalReq, principal: Principal = Depends(require_principal)):
|
||||
_check(principal, core=True, observer=True)
|
||||
return await client.get_historical(
|
||||
body.symbol, body.category, body.interval, body.start, body.end, body.limit,
|
||||
)
|
||||
|
||||
@app.post("/tools/get_indicators", tags=["reads"])
|
||||
async def t_get_indicators(body: IndicatorsReq, principal: Principal = Depends(require_principal)):
|
||||
_check(principal, core=True, observer=True)
|
||||
return await client.get_indicators(
|
||||
body.symbol, body.category, body.indicators,
|
||||
body.interval, body.start, body.end,
|
||||
)
|
||||
|
||||
@app.post("/tools/get_funding_rate", tags=["reads"])
|
||||
async def t_get_funding_rate(body: FundingRateReq, principal: Principal = Depends(require_principal)):
|
||||
_check(principal, core=True, observer=True)
|
||||
return await client.get_funding_rate(body.symbol, body.category)
|
||||
|
||||
@app.post("/tools/get_funding_history", tags=["reads"])
|
||||
async def t_get_funding_history(body: FundingHistoryReq, principal: Principal = Depends(require_principal)):
|
||||
_check(principal, core=True, observer=True)
|
||||
return await client.get_funding_history(body.symbol, body.category, body.limit)
|
||||
|
||||
@app.post("/tools/get_open_interest", tags=["reads"])
|
||||
async def t_get_open_interest(body: OpenInterestReq, principal: Principal = Depends(require_principal)):
|
||||
_check(principal, core=True, observer=True)
|
||||
return await client.get_open_interest(body.symbol, body.category, body.interval, body.limit)
|
||||
|
||||
@app.post("/tools/get_instruments", tags=["reads"])
|
||||
async def t_get_instruments(body: InstrumentsReq, principal: Principal = Depends(require_principal)):
|
||||
_check(principal, core=True, observer=True)
|
||||
return await client.get_instruments(body.category, body.symbol)
|
||||
|
||||
@app.post("/tools/get_option_chain", tags=["reads"])
|
||||
async def t_get_option_chain(body: OptionChainReq, principal: Principal = Depends(require_principal)):
|
||||
_check(principal, core=True, observer=True)
|
||||
return await client.get_option_chain(body.base_coin, body.expiry)
|
||||
|
||||
@app.post("/tools/get_positions", tags=["reads"])
|
||||
async def t_get_positions(body: PositionsReq, principal: Principal = Depends(require_principal)):
|
||||
_check(principal, core=True, observer=True)
|
||||
return {"positions": await client.get_positions(body.category)}
|
||||
|
||||
@app.post("/tools/get_account_summary", tags=["reads"])
|
||||
async def t_get_account_summary(body: AccountSummaryReq, principal: Principal = Depends(require_principal)):
|
||||
_check(principal, core=True, observer=True)
|
||||
return await client.get_account_summary()
|
||||
|
||||
@app.post("/tools/get_trade_history", tags=["reads"])
|
||||
async def t_get_trade_history(body: TradeHistoryReq, principal: Principal = Depends(require_principal)):
|
||||
_check(principal, core=True, observer=True)
|
||||
return {"trades": await client.get_trade_history(body.category, body.limit)}
|
||||
|
||||
@app.post("/tools/get_open_orders", tags=["reads"])
|
||||
async def t_get_open_orders(body: OpenOrdersReq, principal: Principal = Depends(require_principal)):
|
||||
_check(principal, core=True, observer=True)
|
||||
return {"orders": await client.get_open_orders(body.category, body.symbol)}
|
||||
|
||||
@app.post("/tools/get_basis_spot_perp", tags=["reads"])
|
||||
async def t_get_basis_spot_perp(body: BasisSpotPerpReq, principal: Principal = Depends(require_principal)):
|
||||
_check(principal, core=True, observer=True)
|
||||
return await client.get_basis_spot_perp(body.asset)
|
||||
|
||||
# ── Writes ─────────────────────────────────────────────
|
||||
|
||||
@app.post("/tools/place_order", tags=["writes"])
|
||||
async def t_place_order(body: PlaceOrderReq, principal: Principal = Depends(require_principal)):
|
||||
_check(principal, core=True)
|
||||
return await client.place_order(
|
||||
body.category, body.symbol, body.side, body.qty,
|
||||
body.order_type, body.price, body.tif, body.reduce_only, body.position_idx,
|
||||
)
|
||||
|
||||
@app.post("/tools/amend_order", tags=["writes"])
|
||||
async def t_amend_order(body: AmendOrderReq, principal: Principal = Depends(require_principal)):
|
||||
_check(principal, core=True)
|
||||
return await client.amend_order(
|
||||
body.category, body.symbol, body.order_id, body.new_qty, body.new_price,
|
||||
)
|
||||
|
||||
@app.post("/tools/cancel_order", tags=["writes"])
|
||||
async def t_cancel_order(body: CancelOrderReq, principal: Principal = Depends(require_principal)):
|
||||
_check(principal, core=True)
|
||||
return await client.cancel_order(body.category, body.symbol, body.order_id)
|
||||
|
||||
@app.post("/tools/cancel_all_orders", tags=["writes"])
|
||||
async def t_cancel_all(body: CancelAllReq, principal: Principal = Depends(require_principal)):
|
||||
_check(principal, core=True)
|
||||
return await client.cancel_all_orders(body.category, body.symbol)
|
||||
|
||||
@app.post("/tools/set_stop_loss", tags=["writes"])
|
||||
async def t_set_sl(body: SetStopLossReq, principal: Principal = Depends(require_principal)):
|
||||
_check(principal, core=True)
|
||||
return await client.set_stop_loss(body.category, body.symbol, body.stop_loss, body.position_idx)
|
||||
|
||||
@app.post("/tools/set_take_profit", tags=["writes"])
|
||||
async def t_set_tp(body: SetTakeProfitReq, principal: Principal = Depends(require_principal)):
|
||||
_check(principal, core=True)
|
||||
return await client.set_take_profit(body.category, body.symbol, body.take_profit, body.position_idx)
|
||||
|
||||
@app.post("/tools/close_position", tags=["writes"])
|
||||
async def t_close(body: ClosePositionReq, principal: Principal = Depends(require_principal)):
|
||||
_check(principal, core=True)
|
||||
return await client.close_position(body.category, body.symbol)
|
||||
|
||||
@app.post("/tools/set_leverage", tags=["writes"])
|
||||
async def t_set_leverage(body: SetLeverageReq, principal: Principal = Depends(require_principal)):
|
||||
_check(principal, core=True)
|
||||
return await client.set_leverage(body.category, body.symbol, body.leverage)
|
||||
|
||||
@app.post("/tools/switch_position_mode", tags=["writes"])
|
||||
async def t_switch_mode(body: SwitchModeReq, principal: Principal = Depends(require_principal)):
|
||||
_check(principal, core=True)
|
||||
return await client.switch_position_mode(body.category, body.symbol, body.mode)
|
||||
|
||||
@app.post("/tools/transfer_asset", tags=["writes"])
|
||||
async def t_transfer(body: TransferReq, principal: Principal = Depends(require_principal)):
|
||||
_check(principal, core=True)
|
||||
return await client.transfer_asset(body.coin, body.amount, body.from_type, body.to_type)
|
||||
|
||||
# ── MCP mount ──────────────────────────────────────────
|
||||
|
||||
port = int(os.environ.get("PORT", "9019"))
|
||||
mount_mcp_endpoint(
|
||||
app,
|
||||
name="cerbero-bybit",
|
||||
version="0.1.0",
|
||||
token_store=token_store,
|
||||
internal_base_url=f"http://localhost:{port}",
|
||||
tools=[
|
||||
{"name": "get_ticker", "description": "Ticker Bybit (spot/linear/inverse/option)."},
|
||||
{"name": "get_ticker_batch", "description": "Ticker per più simboli."},
|
||||
{"name": "get_orderbook", "description": "Orderbook profondità N."},
|
||||
{"name": "get_historical", "description": "OHLCV candles Bybit."},
|
||||
{"name": "get_indicators", "description": "Indicatori tecnici (RSI, ATR, MACD, ADX)."},
|
||||
{"name": "get_funding_rate", "description": "Funding corrente perp."},
|
||||
{"name": "get_funding_history", "description": "Funding storico perp."},
|
||||
{"name": "get_open_interest", "description": "Open interest history perp."},
|
||||
{"name": "get_instruments", "description": "Specs contratti."},
|
||||
{"name": "get_option_chain", "description": "Option chain BTC/ETH/SOL."},
|
||||
{"name": "get_positions", "description": "Posizioni aperte."},
|
||||
{"name": "get_account_summary", "description": "Wallet balance e margine."},
|
||||
{"name": "get_trade_history", "description": "Fills recenti."},
|
||||
{"name": "get_open_orders", "description": "Ordini pending."},
|
||||
{"name": "get_basis_spot_perp", "description": "Basis spot vs linear perp."},
|
||||
{"name": "place_order", "description": "Invia ordine (CORE only)."},
|
||||
{"name": "amend_order", "description": "Modifica ordine esistente."},
|
||||
{"name": "cancel_order", "description": "Cancella ordine."},
|
||||
{"name": "cancel_all_orders", "description": "Cancella tutti ordini."},
|
||||
{"name": "set_stop_loss", "description": "Setta stop loss su posizione."},
|
||||
{"name": "set_take_profit", "description": "Setta take profit su posizione."},
|
||||
{"name": "close_position", "description": "Chiude posizione aperta."},
|
||||
{"name": "set_leverage", "description": "Leva buy+sell uniforme."},
|
||||
{"name": "switch_position_mode", "description": "Hedge vs one-way."},
|
||||
{"name": "transfer_asset", "description": "Trasferimento interno tra account types."},
|
||||
],
|
||||
)
|
||||
|
||||
return app
|
||||
@@ -0,0 +1,22 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
import pytest
|
||||
|
||||
from mcp_bybit.client import BybitClient
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_http():
|
||||
return MagicMock(name="pybit_HTTP")
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def client(mock_http):
|
||||
return BybitClient(
|
||||
api_key="test_key",
|
||||
api_secret="test_secret",
|
||||
testnet=True,
|
||||
http=mock_http,
|
||||
)
|
||||
@@ -0,0 +1,531 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import pytest
|
||||
|
||||
from mcp_bybit.client import BybitClient
|
||||
|
||||
|
||||
def test_client_init_stores_attrs(client, mock_http):
|
||||
assert client.testnet is True
|
||||
assert client._http is mock_http
|
||||
|
||||
|
||||
def test_client_init_default_http(monkeypatch):
|
||||
created = {}
|
||||
|
||||
class FakeHTTP:
|
||||
def __init__(self, **kwargs):
|
||||
created.update(kwargs)
|
||||
|
||||
monkeypatch.setattr("mcp_bybit.client.HTTP", FakeHTTP)
|
||||
BybitClient(api_key="k", api_secret="s", testnet=False)
|
||||
assert created["api_key"] == "k"
|
||||
assert created["api_secret"] == "s"
|
||||
assert created["testnet"] is False
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_ticker(client, mock_http):
|
||||
mock_http.get_tickers.return_value = {
|
||||
"retCode": 0,
|
||||
"result": {
|
||||
"list": [{
|
||||
"symbol": "BTCUSDT",
|
||||
"lastPrice": "60000",
|
||||
"markPrice": "60010",
|
||||
"bid1Price": "59995",
|
||||
"ask1Price": "60005",
|
||||
"volume24h": "1500.5",
|
||||
"turnover24h": "90000000",
|
||||
"fundingRate": "0.0001",
|
||||
"openInterest": "50000",
|
||||
}]
|
||||
},
|
||||
}
|
||||
t = await client.get_ticker("BTCUSDT", category="linear")
|
||||
mock_http.get_tickers.assert_called_once_with(category="linear", symbol="BTCUSDT")
|
||||
assert t["symbol"] == "BTCUSDT"
|
||||
assert t["last_price"] == 60000.0
|
||||
assert t["mark_price"] == 60010.0
|
||||
assert t["bid"] == 59995.0
|
||||
assert t["ask"] == 60005.0
|
||||
assert t["volume_24h"] == 1500.5
|
||||
assert t["funding_rate"] == 0.0001
|
||||
assert t["open_interest"] == 50000.0
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_ticker_batch(client, mock_http):
|
||||
def side_effect(**kwargs):
|
||||
symbol = kwargs["symbol"]
|
||||
return {"retCode": 0, "result": {"list": [{
|
||||
"symbol": symbol, "lastPrice": "1", "markPrice": "1",
|
||||
"bid1Price": "1", "ask1Price": "1", "volume24h": "0",
|
||||
"turnover24h": "0", "fundingRate": "0", "openInterest": "0",
|
||||
}]}}
|
||||
mock_http.get_tickers.side_effect = side_effect
|
||||
out = await client.get_ticker_batch(["BTCUSDT", "ETHUSDT"], category="linear")
|
||||
assert set(out.keys()) == {"BTCUSDT", "ETHUSDT"}
|
||||
assert mock_http.get_tickers.call_count == 2
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_ticker_not_found(client, mock_http):
|
||||
mock_http.get_tickers.return_value = {"retCode": 0, "result": {"list": []}}
|
||||
t = await client.get_ticker("UNKNOWNUSDT", category="linear")
|
||||
assert t == {"symbol": "UNKNOWNUSDT", "error": "not_found"}
|
||||
|
||||
|
||||
def test_parse_helpers():
|
||||
from mcp_bybit.client import _f, _i
|
||||
assert _f("1.5") == 1.5
|
||||
assert _f("") is None
|
||||
assert _f(None) is None
|
||||
assert _i("42") == 42
|
||||
assert _i("") is None
|
||||
assert _i(None) is None
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_orderbook(client, mock_http):
|
||||
mock_http.get_orderbook.return_value = {
|
||||
"retCode": 0,
|
||||
"result": {
|
||||
"s": "BTCUSDT",
|
||||
"b": [["59990", "0.5"], ["59980", "1.0"]],
|
||||
"a": [["60010", "0.3"], ["60020", "0.7"]],
|
||||
"ts": 1700000000000,
|
||||
},
|
||||
}
|
||||
ob = await client.get_orderbook("BTCUSDT", category="linear", limit=25)
|
||||
mock_http.get_orderbook.assert_called_once_with(
|
||||
category="linear", symbol="BTCUSDT", limit=25
|
||||
)
|
||||
assert ob["symbol"] == "BTCUSDT"
|
||||
assert ob["bids"] == [[59990.0, 0.5], [59980.0, 1.0]]
|
||||
assert ob["asks"] == [[60010.0, 0.3], [60020.0, 0.7]]
|
||||
assert ob["timestamp"] == 1700000000000
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_historical(client, mock_http):
|
||||
mock_http.get_kline.return_value = {
|
||||
"retCode": 0,
|
||||
"result": {
|
||||
"list": [
|
||||
["1700000000000", "60000", "60500", "59500", "60200", "100", "6020000"],
|
||||
["1700003600000", "60200", "60700", "60000", "60400", "80", "4832000"],
|
||||
]
|
||||
},
|
||||
}
|
||||
out = await client.get_historical(
|
||||
"BTCUSDT", category="linear", interval="60",
|
||||
start=1700000000000, end=1700003600000,
|
||||
)
|
||||
mock_http.get_kline.assert_called_once_with(
|
||||
category="linear", symbol="BTCUSDT", interval="60",
|
||||
start=1700000000000, end=1700003600000, limit=1000,
|
||||
)
|
||||
assert len(out["candles"]) == 2
|
||||
c0 = out["candles"][0]
|
||||
assert c0["timestamp"] == 1700000000000
|
||||
assert c0["open"] == 60000.0
|
||||
assert c0["high"] == 60500.0
|
||||
assert c0["low"] == 59500.0
|
||||
assert c0["close"] == 60200.0
|
||||
assert c0["volume"] == 100.0
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_indicators(client, mock_http):
|
||||
rows = [
|
||||
[str(1700000000000 + i * 3600_000),
|
||||
str(60000 + i * 10), str(60000 + i * 10 + 5),
|
||||
str(60000 + i * 10 - 5), str(60000 + i * 10 + 2),
|
||||
"100", "6000000"]
|
||||
for i in range(35)
|
||||
]
|
||||
mock_http.get_kline.return_value = {"retCode": 0, "result": {"list": rows}}
|
||||
out = await client.get_indicators(
|
||||
"BTCUSDT", category="linear",
|
||||
indicators=["rsi", "atr", "macd", "adx"],
|
||||
interval="60",
|
||||
)
|
||||
assert "rsi" in out and out["rsi"] is not None
|
||||
assert "atr" in out and out["atr"] is not None
|
||||
assert "macd" in out and out["macd"]["macd"] is not None
|
||||
assert "adx" in out and out["adx"]["adx"] is not None
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_funding_rate(client, mock_http):
|
||||
mock_http.get_tickers.return_value = {
|
||||
"retCode": 0,
|
||||
"result": {"list": [{
|
||||
"symbol": "BTCUSDT", "fundingRate": "0.0001",
|
||||
"nextFundingTime": "1700003600000",
|
||||
"lastPrice": "60000", "markPrice": "60000",
|
||||
"bid1Price": "0", "ask1Price": "0",
|
||||
"volume24h": "0", "turnover24h": "0", "openInterest": "0",
|
||||
}]},
|
||||
}
|
||||
out = await client.get_funding_rate("BTCUSDT", category="linear")
|
||||
assert out["symbol"] == "BTCUSDT"
|
||||
assert out["funding_rate"] == 0.0001
|
||||
assert out["next_funding_time"] == 1700003600000
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_funding_history(client, mock_http):
|
||||
mock_http.get_funding_rate_history.return_value = {
|
||||
"retCode": 0,
|
||||
"result": {"list": [
|
||||
{"symbol": "BTCUSDT", "fundingRate": "0.0001", "fundingRateTimestamp": "1700000000000"},
|
||||
{"symbol": "BTCUSDT", "fundingRate": "0.00008", "fundingRateTimestamp": "1699996400000"},
|
||||
]},
|
||||
}
|
||||
out = await client.get_funding_history("BTCUSDT", category="linear", limit=50)
|
||||
mock_http.get_funding_rate_history.assert_called_once_with(
|
||||
category="linear", symbol="BTCUSDT", limit=50
|
||||
)
|
||||
assert len(out["history"]) == 2
|
||||
assert out["history"][0]["rate"] == 0.0001
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_open_interest(client, mock_http):
|
||||
mock_http.get_open_interest.return_value = {
|
||||
"retCode": 0,
|
||||
"result": {"list": [
|
||||
{"openInterest": "50000", "timestamp": "1700000000000"},
|
||||
{"openInterest": "49000", "timestamp": "1699996400000"},
|
||||
]},
|
||||
}
|
||||
out = await client.get_open_interest("BTCUSDT", category="linear", interval="5min", limit=100)
|
||||
mock_http.get_open_interest.assert_called_once_with(
|
||||
category="linear", symbol="BTCUSDT", intervalTime="5min", limit=100
|
||||
)
|
||||
assert len(out["points"]) == 2
|
||||
assert out["current_oi"] == 50000.0
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_instruments(client, mock_http):
|
||||
mock_http.get_instruments_info.return_value = {
|
||||
"retCode": 0,
|
||||
"result": {"list": [
|
||||
{"symbol": "BTCUSDT", "status": "Trading", "baseCoin": "BTC",
|
||||
"quoteCoin": "USDT", "priceFilter": {"tickSize": "0.1"},
|
||||
"lotSizeFilter": {"qtyStep": "0.001", "minOrderQty": "0.001"}},
|
||||
]},
|
||||
}
|
||||
out = await client.get_instruments(category="linear")
|
||||
mock_http.get_instruments_info.assert_called_once_with(category="linear")
|
||||
assert len(out["instruments"]) == 1
|
||||
inst = out["instruments"][0]
|
||||
assert inst["symbol"] == "BTCUSDT"
|
||||
assert inst["tick_size"] == 0.1
|
||||
assert inst["qty_step"] == 0.001
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_option_chain(client, mock_http):
|
||||
mock_http.get_instruments_info.return_value = {
|
||||
"retCode": 0,
|
||||
"result": {"list": [
|
||||
{"symbol": "BTC-30JUN25-50000-C", "baseCoin": "BTC",
|
||||
"settleCoin": "USDC", "optionsType": "Call",
|
||||
"launchTime": "1700000000000", "deliveryTime": "1719734400000"},
|
||||
{"symbol": "BTC-30JUN25-50000-P", "baseCoin": "BTC",
|
||||
"settleCoin": "USDC", "optionsType": "Put",
|
||||
"launchTime": "1700000000000", "deliveryTime": "1719734400000"},
|
||||
]},
|
||||
}
|
||||
out = await client.get_option_chain(base_coin="BTC")
|
||||
mock_http.get_instruments_info.assert_called_once_with(category="option", baseCoin="BTC")
|
||||
assert len(out["options"]) == 2
|
||||
assert out["options"][0]["type"] == "Call"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_positions(client, mock_http):
|
||||
mock_http.get_positions.return_value = {
|
||||
"retCode": 0,
|
||||
"result": {"list": [
|
||||
{"symbol": "BTCUSDT", "side": "Buy", "size": "0.1",
|
||||
"avgPrice": "60000", "unrealisedPnl": "50",
|
||||
"leverage": "10", "liqPrice": "50000", "positionValue": "6000"},
|
||||
]},
|
||||
}
|
||||
out = await client.get_positions(category="linear")
|
||||
mock_http.get_positions.assert_called_once_with(category="linear", settleCoin="USDT")
|
||||
assert len(out) == 1
|
||||
p = out[0]
|
||||
assert p["symbol"] == "BTCUSDT"
|
||||
assert p["side"] == "Buy"
|
||||
assert p["size"] == 0.1
|
||||
assert p["entry_price"] == 60000.0
|
||||
assert p["liquidation_price"] == 50000.0
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_account_summary(client, mock_http):
|
||||
mock_http.get_wallet_balance.return_value = {
|
||||
"retCode": 0,
|
||||
"result": {"list": [{
|
||||
"accountType": "UNIFIED",
|
||||
"totalEquity": "10000",
|
||||
"totalWalletBalance": "9500",
|
||||
"totalMarginBalance": "9800",
|
||||
"totalAvailableBalance": "9000",
|
||||
"totalPerpUPL": "200",
|
||||
"coin": [
|
||||
{"coin": "USDT", "walletBalance": "9500", "equity": "9700"}
|
||||
],
|
||||
}]},
|
||||
}
|
||||
out = await client.get_account_summary()
|
||||
mock_http.get_wallet_balance.assert_called_once_with(accountType="UNIFIED")
|
||||
assert out["equity"] == 10000.0
|
||||
assert out["available_balance"] == 9000.0
|
||||
assert out["unrealized_pnl"] == 200.0
|
||||
assert len(out["coins"]) == 1
|
||||
assert out["coins"][0]["coin"] == "USDT"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_trade_history(client, mock_http):
|
||||
mock_http.get_executions.return_value = {
|
||||
"retCode": 0,
|
||||
"result": {"list": [
|
||||
{"symbol": "BTCUSDT", "side": "Buy", "execQty": "0.01",
|
||||
"execPrice": "60000", "execFee": "0.1",
|
||||
"execTime": "1700000000000", "orderId": "abc"},
|
||||
]},
|
||||
}
|
||||
out = await client.get_trade_history(category="linear", limit=50)
|
||||
mock_http.get_executions.assert_called_once_with(category="linear", limit=50)
|
||||
assert len(out) == 1
|
||||
assert out[0]["symbol"] == "BTCUSDT"
|
||||
assert out[0]["size"] == 0.01
|
||||
assert out[0]["price"] == 60000.0
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_open_orders(client, mock_http):
|
||||
mock_http.get_open_orders.return_value = {
|
||||
"retCode": 0,
|
||||
"result": {"list": [
|
||||
{"symbol": "BTCUSDT", "orderId": "o1", "side": "Buy",
|
||||
"qty": "0.1", "price": "59000", "orderType": "Limit",
|
||||
"orderStatus": "New", "reduceOnly": False},
|
||||
]},
|
||||
}
|
||||
out = await client.get_open_orders(category="linear")
|
||||
mock_http.get_open_orders.assert_called_once_with(category="linear", settleCoin="USDT")
|
||||
assert len(out) == 1
|
||||
assert out[0]["order_id"] == "o1"
|
||||
assert out[0]["price"] == 59000.0
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_basis_spot_perp(client, mock_http):
|
||||
def side(**kwargs):
|
||||
if kwargs["category"] == "spot":
|
||||
return {"retCode": 0, "result": {"list": [{
|
||||
"symbol": "BTCUSDT", "lastPrice": "60000", "markPrice": "60000",
|
||||
"bid1Price": "59995", "ask1Price": "60005",
|
||||
"volume24h": "0", "turnover24h": "0",
|
||||
"fundingRate": "0", "openInterest": "0",
|
||||
}]}}
|
||||
else:
|
||||
return {"retCode": 0, "result": {"list": [{
|
||||
"symbol": "BTCUSDT", "lastPrice": "60120", "markPrice": "60120",
|
||||
"bid1Price": "60115", "ask1Price": "60125",
|
||||
"volume24h": "0", "turnover24h": "0",
|
||||
"fundingRate": "0.0001", "openInterest": "0",
|
||||
}]}}
|
||||
mock_http.get_tickers.side_effect = side
|
||||
out = await client.get_basis_spot_perp("BTC")
|
||||
assert out["asset"] == "BTC"
|
||||
assert out["spot_price"] == 60000.0
|
||||
assert out["perp_price"] == 60120.0
|
||||
assert out["basis_abs"] == 120.0
|
||||
assert round(out["basis_pct"], 3) == 0.2
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_place_order_limit(client, mock_http):
|
||||
mock_http.place_order.return_value = {
|
||||
"retCode": 0,
|
||||
"result": {"orderId": "ord123", "orderLinkId": ""},
|
||||
}
|
||||
out = await client.place_order(
|
||||
category="linear", symbol="BTCUSDT", side="Buy",
|
||||
qty=0.01, order_type="Limit", price=60000.0, tif="GTC",
|
||||
)
|
||||
assert out["order_id"] == "ord123"
|
||||
kwargs = mock_http.place_order.call_args.kwargs
|
||||
assert kwargs["category"] == "linear"
|
||||
assert kwargs["symbol"] == "BTCUSDT"
|
||||
assert kwargs["side"] == "Buy"
|
||||
assert kwargs["qty"] == "0.01"
|
||||
assert kwargs["orderType"] == "Limit"
|
||||
assert kwargs["price"] == "60000.0"
|
||||
assert kwargs["timeInForce"] == "GTC"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_place_order_error(client, mock_http):
|
||||
mock_http.place_order.return_value = {"retCode": 10001, "retMsg": "insufficient balance"}
|
||||
out = await client.place_order(
|
||||
category="linear", symbol="BTCUSDT", side="Buy", qty=0.01, order_type="Market"
|
||||
)
|
||||
assert out.get("error") == "insufficient balance"
|
||||
assert out.get("code") == 10001
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_amend_order(client, mock_http):
|
||||
mock_http.amend_order.return_value = {"retCode": 0, "result": {"orderId": "ord1"}}
|
||||
out = await client.amend_order(
|
||||
category="linear", symbol="BTCUSDT", order_id="ord1", new_qty=0.02
|
||||
)
|
||||
assert out["order_id"] == "ord1"
|
||||
kwargs = mock_http.amend_order.call_args.kwargs
|
||||
assert kwargs["orderId"] == "ord1"
|
||||
assert kwargs["qty"] == "0.02"
|
||||
assert "price" not in kwargs
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_place_order_option_adds_link_id(client, mock_http):
|
||||
mock_http.place_order.return_value = {
|
||||
"retCode": 0,
|
||||
"result": {"orderId": "opt1", "orderLinkId": "cerbero-abc"},
|
||||
}
|
||||
await client.place_order(
|
||||
category="option", symbol="BTC-24APR26-96000-C-USDT",
|
||||
side="Buy", qty=0.01, order_type="Limit", price=5.0,
|
||||
)
|
||||
kwargs = mock_http.place_order.call_args.kwargs
|
||||
assert "orderLinkId" in kwargs
|
||||
assert kwargs["orderLinkId"].startswith("cerbero-")
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_place_order_linear_no_link_id(client, mock_http):
|
||||
mock_http.place_order.return_value = {"retCode": 0, "result": {"orderId": "x"}}
|
||||
await client.place_order(
|
||||
category="linear", symbol="BTCUSDT", side="Buy", qty=0.01, order_type="Market"
|
||||
)
|
||||
kwargs = mock_http.place_order.call_args.kwargs
|
||||
assert "orderLinkId" not in kwargs
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_cancel_order(client, mock_http):
|
||||
mock_http.cancel_order.return_value = {"retCode": 0, "result": {"orderId": "ord1"}}
|
||||
out = await client.cancel_order(category="linear", symbol="BTCUSDT", order_id="ord1")
|
||||
mock_http.cancel_order.assert_called_once_with(
|
||||
category="linear", symbol="BTCUSDT", orderId="ord1"
|
||||
)
|
||||
assert out["order_id"] == "ord1"
|
||||
assert out["status"] == "cancelled"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_cancel_all_orders(client, mock_http):
|
||||
mock_http.cancel_all_orders.return_value = {
|
||||
"retCode": 0,
|
||||
"result": {"list": [{"orderId": "o1"}, {"orderId": "o2"}]},
|
||||
}
|
||||
out = await client.cancel_all_orders(category="linear", symbol="BTCUSDT")
|
||||
mock_http.cancel_all_orders.assert_called_once_with(
|
||||
category="linear", symbol="BTCUSDT"
|
||||
)
|
||||
assert out["cancelled_ids"] == ["o1", "o2"]
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_set_stop_loss(client, mock_http):
|
||||
mock_http.set_trading_stop.return_value = {"retCode": 0, "result": {}}
|
||||
out = await client.set_stop_loss(
|
||||
category="linear", symbol="BTCUSDT", stop_loss=55000.0
|
||||
)
|
||||
mock_http.set_trading_stop.assert_called_once()
|
||||
kwargs = mock_http.set_trading_stop.call_args.kwargs
|
||||
assert kwargs["category"] == "linear"
|
||||
assert kwargs["symbol"] == "BTCUSDT"
|
||||
assert kwargs["stopLoss"] == "55000.0"
|
||||
assert kwargs.get("positionIdx", 0) == 0
|
||||
assert out["status"] == "stop_loss_set"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_set_take_profit(client, mock_http):
|
||||
mock_http.set_trading_stop.return_value = {"retCode": 0, "result": {}}
|
||||
out = await client.set_take_profit(
|
||||
category="linear", symbol="BTCUSDT", take_profit=65000.0
|
||||
)
|
||||
kwargs = mock_http.set_trading_stop.call_args.kwargs
|
||||
assert kwargs["takeProfit"] == "65000.0"
|
||||
assert out["status"] == "take_profit_set"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_close_position(client, mock_http):
|
||||
mock_http.get_positions.return_value = {
|
||||
"retCode": 0, "result": {"list": [
|
||||
{"symbol": "BTCUSDT", "side": "Buy", "size": "0.1",
|
||||
"avgPrice": "60000", "unrealisedPnl": "0",
|
||||
"leverage": "10", "liqPrice": "0", "positionValue": "6000"},
|
||||
]},
|
||||
}
|
||||
mock_http.place_order.return_value = {
|
||||
"retCode": 0, "result": {"orderId": "closeord", "orderLinkId": ""},
|
||||
}
|
||||
out = await client.close_position(category="linear", symbol="BTCUSDT")
|
||||
assert out["status"] == "submitted"
|
||||
kwargs = mock_http.place_order.call_args.kwargs
|
||||
assert kwargs["side"] == "Sell"
|
||||
assert kwargs["qty"] == "0.1"
|
||||
assert kwargs["reduceOnly"] is True
|
||||
assert kwargs["orderType"] == "Market"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_set_leverage(client, mock_http):
|
||||
mock_http.set_leverage.return_value = {"retCode": 0, "result": {}}
|
||||
out = await client.set_leverage(category="linear", symbol="BTCUSDT", leverage=5)
|
||||
mock_http.set_leverage.assert_called_once_with(
|
||||
category="linear", symbol="BTCUSDT", buyLeverage="5", sellLeverage="5"
|
||||
)
|
||||
assert out["status"] == "leverage_set"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_switch_position_mode(client, mock_http):
|
||||
mock_http.switch_position_mode.return_value = {"retCode": 0, "result": {}}
|
||||
out = await client.switch_position_mode(
|
||||
category="linear", symbol="BTCUSDT", mode="hedge"
|
||||
)
|
||||
kwargs = mock_http.switch_position_mode.call_args.kwargs
|
||||
assert kwargs["mode"] == 3
|
||||
assert out["status"] == "mode_switched"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_transfer_asset(client, mock_http):
|
||||
mock_http.create_internal_transfer.return_value = {
|
||||
"retCode": 0, "result": {"transferId": "tx123"},
|
||||
}
|
||||
out = await client.transfer_asset(
|
||||
coin="USDT", amount=100.0, from_type="UNIFIED", to_type="FUND"
|
||||
)
|
||||
kwargs = mock_http.create_internal_transfer.call_args.kwargs
|
||||
assert kwargs["coin"] == "USDT"
|
||||
assert kwargs["amount"] == "100.0"
|
||||
assert kwargs["fromAccountType"] == "UNIFIED"
|
||||
assert kwargs["toAccountType"] == "FUND"
|
||||
assert out["transfer_id"] == "tx123"
|
||||
@@ -0,0 +1,127 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from unittest.mock import AsyncMock, MagicMock
|
||||
|
||||
import pytest
|
||||
from fastapi.testclient import TestClient
|
||||
from option_mcp_common.auth import Principal, TokenStore
|
||||
|
||||
from mcp_bybit.server import create_app
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def token_store():
|
||||
return TokenStore(
|
||||
tokens={
|
||||
"core-tok": Principal("core", {"core"}),
|
||||
"obs-tok": Principal("observer", {"observer"}),
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_client():
|
||||
c = MagicMock()
|
||||
c.get_ticker = AsyncMock(return_value={"symbol": "BTCUSDT"})
|
||||
c.get_ticker_batch = AsyncMock(return_value={"BTCUSDT": {}})
|
||||
c.get_orderbook = AsyncMock(return_value={"bids": [], "asks": []})
|
||||
c.get_historical = AsyncMock(return_value={"candles": []})
|
||||
c.get_indicators = AsyncMock(return_value={"rsi": 50.0})
|
||||
c.get_funding_rate = AsyncMock(return_value={"funding_rate": 0.0001})
|
||||
c.get_funding_history = AsyncMock(return_value={"history": []})
|
||||
c.get_open_interest = AsyncMock(return_value={"points": []})
|
||||
c.get_instruments = AsyncMock(return_value={"instruments": []})
|
||||
c.get_option_chain = AsyncMock(return_value={"options": []})
|
||||
c.get_positions = AsyncMock(return_value=[])
|
||||
c.get_account_summary = AsyncMock(return_value={"equity": 0})
|
||||
c.get_trade_history = AsyncMock(return_value=[])
|
||||
c.get_open_orders = AsyncMock(return_value=[])
|
||||
c.get_basis_spot_perp = AsyncMock(return_value={"basis_pct": 0})
|
||||
c.place_order = AsyncMock(return_value={"order_id": "x"})
|
||||
c.amend_order = AsyncMock(return_value={"order_id": "x"})
|
||||
c.cancel_order = AsyncMock(return_value={"status": "cancelled"})
|
||||
c.cancel_all_orders = AsyncMock(return_value={"cancelled_ids": []})
|
||||
c.set_stop_loss = AsyncMock(return_value={"status": "stop_loss_set"})
|
||||
c.set_take_profit = AsyncMock(return_value={"status": "take_profit_set"})
|
||||
c.close_position = AsyncMock(return_value={"status": "submitted"})
|
||||
c.set_leverage = AsyncMock(return_value={"status": "leverage_set"})
|
||||
c.switch_position_mode = AsyncMock(return_value={"status": "mode_switched"})
|
||||
c.transfer_asset = AsyncMock(return_value={"transfer_id": "tx"})
|
||||
return c
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def http(mock_client, token_store):
|
||||
app = create_app(client=mock_client, token_store=token_store)
|
||||
return TestClient(app)
|
||||
|
||||
|
||||
CORE = {"Authorization": "Bearer core-tok"}
|
||||
OBS = {"Authorization": "Bearer obs-tok"}
|
||||
|
||||
READ_ENDPOINTS = [
|
||||
("/tools/get_ticker", {"symbol": "BTCUSDT"}),
|
||||
("/tools/get_ticker_batch", {"symbols": ["BTCUSDT"]}),
|
||||
("/tools/get_orderbook", {"symbol": "BTCUSDT"}),
|
||||
("/tools/get_historical", {"symbol": "BTCUSDT"}),
|
||||
("/tools/get_indicators", {"symbol": "BTCUSDT"}),
|
||||
("/tools/get_funding_rate", {"symbol": "BTCUSDT"}),
|
||||
("/tools/get_funding_history", {"symbol": "BTCUSDT"}),
|
||||
("/tools/get_open_interest", {"symbol": "BTCUSDT"}),
|
||||
("/tools/get_instruments", {}),
|
||||
("/tools/get_option_chain", {"base_coin": "BTC"}),
|
||||
("/tools/get_positions", {}),
|
||||
("/tools/get_account_summary", {}),
|
||||
("/tools/get_trade_history", {}),
|
||||
("/tools/get_open_orders", {}),
|
||||
("/tools/get_basis_spot_perp", {"asset": "BTC"}),
|
||||
]
|
||||
|
||||
WRITE_ENDPOINTS = [
|
||||
("/tools/place_order", {"category": "linear", "symbol": "BTCUSDT", "side": "Buy", "qty": 0.01}),
|
||||
("/tools/amend_order", {"category": "linear", "symbol": "BTCUSDT", "order_id": "o1"}),
|
||||
("/tools/cancel_order", {"category": "linear", "symbol": "BTCUSDT", "order_id": "o1"}),
|
||||
("/tools/cancel_all_orders", {"category": "linear"}),
|
||||
("/tools/set_stop_loss", {"category": "linear", "symbol": "BTCUSDT", "stop_loss": 55000}),
|
||||
("/tools/set_take_profit", {"category": "linear", "symbol": "BTCUSDT", "take_profit": 65000}),
|
||||
("/tools/close_position", {"category": "linear", "symbol": "BTCUSDT"}),
|
||||
("/tools/set_leverage", {"category": "linear", "symbol": "BTCUSDT", "leverage": 5}),
|
||||
("/tools/switch_position_mode", {"category": "linear", "symbol": "BTCUSDT", "mode": "hedge"}),
|
||||
("/tools/transfer_asset", {"coin": "USDT", "amount": 10.0, "from_type": "UNIFIED", "to_type": "FUND"}),
|
||||
]
|
||||
|
||||
|
||||
@pytest.mark.parametrize("path,payload", READ_ENDPOINTS)
|
||||
def test_read_core_ok(http, path, payload):
|
||||
r = http.post(path, json=payload, headers=CORE)
|
||||
assert r.status_code == 200, (path, r.text)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("path,payload", READ_ENDPOINTS)
|
||||
def test_read_observer_ok(http, path, payload):
|
||||
r = http.post(path, json=payload, headers=OBS)
|
||||
assert r.status_code == 200, (path, r.text)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("path,payload", READ_ENDPOINTS)
|
||||
def test_read_no_auth_401(http, path, payload):
|
||||
r = http.post(path, json=payload)
|
||||
assert r.status_code == 401, (path, r.text)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("path,payload", WRITE_ENDPOINTS)
|
||||
def test_write_core_ok(http, path, payload):
|
||||
r = http.post(path, json=payload, headers=CORE)
|
||||
assert r.status_code == 200, (path, r.text)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("path,payload", WRITE_ENDPOINTS)
|
||||
def test_write_observer_403(http, path, payload):
|
||||
r = http.post(path, json=payload, headers=OBS)
|
||||
assert r.status_code == 403, (path, r.text)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("path,payload", WRITE_ENDPOINTS)
|
||||
def test_write_no_auth_401(http, path, payload):
|
||||
r = http.post(path, json=payload)
|
||||
assert r.status_code == 401, (path, r.text)
|
||||
@@ -0,0 +1,27 @@
|
||||
[project]
|
||||
name = "mcp-deribit"
|
||||
version = "0.1.0"
|
||||
requires-python = ">=3.11"
|
||||
dependencies = [
|
||||
"option-mcp-common",
|
||||
"fastapi>=0.115",
|
||||
"uvicorn[standard]>=0.30",
|
||||
"httpx>=0.27",
|
||||
"pydantic>=2.6",
|
||||
]
|
||||
|
||||
[project.optional-dependencies]
|
||||
dev = ["pytest>=8", "pytest-asyncio>=0.23", "pytest-httpx>=0.30"]
|
||||
|
||||
[build-system]
|
||||
requires = ["hatchling"]
|
||||
build-backend = "hatchling.build"
|
||||
|
||||
[tool.hatch.build.targets.wheel]
|
||||
packages = ["src/mcp_deribit"]
|
||||
|
||||
[tool.uv.sources]
|
||||
option-mcp-common = { workspace = true }
|
||||
|
||||
[project.scripts]
|
||||
mcp-deribit = "mcp_deribit.__main__:main"
|
||||
@@ -0,0 +1,49 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import os
|
||||
|
||||
import uvicorn
|
||||
from option_mcp_common.auth import load_token_store_from_files
|
||||
from option_mcp_common.env_validation import (
|
||||
fail_fast_if_missing,
|
||||
require_env,
|
||||
summarize,
|
||||
)
|
||||
from option_mcp_common.logging import configure_root_logging
|
||||
|
||||
from mcp_deribit.client import DeribitClient
|
||||
from mcp_deribit.server import create_app
|
||||
|
||||
configure_root_logging() # CER-P5-009: JSON default, env LOG_FORMAT=text per dev
|
||||
|
||||
|
||||
def main():
|
||||
# CER-P5-010: fail-fast boot su env mandatory
|
||||
fail_fast_if_missing(["CREDENTIALS_FILE"])
|
||||
summarize(["CREDENTIALS_FILE", "CORE_TOKEN_FILE", "OBSERVER_TOKEN_FILE", "PORT", "HOST"])
|
||||
creds_file = require_env("CREDENTIALS_FILE", "deribit credentials JSON path")
|
||||
with open(creds_file) as f:
|
||||
creds = json.load(f)
|
||||
|
||||
client = DeribitClient(
|
||||
client_id=creds["client_id"],
|
||||
client_secret=creds["client_secret"],
|
||||
testnet=bool(creds.get("testnet", True)),
|
||||
)
|
||||
|
||||
token_store = load_token_store_from_files(
|
||||
core_token_file=os.environ.get("CORE_TOKEN_FILE"),
|
||||
observer_token_file=os.environ.get("OBSERVER_TOKEN_FILE"),
|
||||
)
|
||||
app = create_app(client=client, token_store=token_store)
|
||||
uvicorn.run(
|
||||
app,
|
||||
log_config=None, # CER-P5-009: delega al root JSON logger
|
||||
host=os.environ.get("HOST", "0.0.0.0"),
|
||||
port=int(os.environ.get("PORT", "9011")),
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,569 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
|
||||
from fastapi import Depends, FastAPI, HTTPException
|
||||
from option_mcp_common.auth import Principal, TokenStore, require_principal
|
||||
from option_mcp_common.mcp_bridge import mount_mcp_endpoint
|
||||
from option_mcp_common.risk_guard import (
|
||||
enforce_aggregate,
|
||||
enforce_leverage,
|
||||
enforce_single_notional,
|
||||
)
|
||||
from option_mcp_common.server import build_app
|
||||
from pydantic import BaseModel, field_validator, model_validator
|
||||
|
||||
from mcp_deribit.client import DeribitClient
|
||||
|
||||
# --- Body models ---
|
||||
|
||||
class GetTickerReq(BaseModel):
|
||||
instrument_name: str | None = None
|
||||
instrument: str | None = None
|
||||
|
||||
model_config = {"extra": "allow"}
|
||||
|
||||
@model_validator(mode="after")
|
||||
def _normalize(self):
|
||||
sym = self.instrument_name or self.instrument
|
||||
if not sym:
|
||||
raise ValueError("instrument_name (or instrument) is required")
|
||||
self.instrument_name = sym
|
||||
return self
|
||||
|
||||
|
||||
class GetTickerBatchReq(BaseModel):
|
||||
instrument_names: list[str] | None = None
|
||||
instruments: list[str] | None = None
|
||||
|
||||
model_config = {"extra": "allow"}
|
||||
|
||||
@model_validator(mode="after")
|
||||
def _normalize(self):
|
||||
names = self.instrument_names or self.instruments
|
||||
if not names:
|
||||
raise ValueError("instrument_names (or instruments) is required")
|
||||
self.instrument_names = names
|
||||
return self
|
||||
|
||||
|
||||
class GetInstrumentsReq(BaseModel):
|
||||
currency: str
|
||||
kind: str | None = None
|
||||
expiry_from: str | None = None
|
||||
expiry_to: str | None = None
|
||||
strike_min: float | None = None
|
||||
strike_max: float | None = None
|
||||
min_open_interest: float | None = None
|
||||
limit: int = 100
|
||||
offset: int = 0
|
||||
|
||||
|
||||
class GetOrderbookReq(BaseModel):
|
||||
instrument_name: str
|
||||
depth: int = 10
|
||||
|
||||
|
||||
class GetPositionsReq(BaseModel):
|
||||
currency: str = "USDC"
|
||||
|
||||
|
||||
class GetAccountSummaryReq(BaseModel):
|
||||
currency: str = "USDC"
|
||||
|
||||
|
||||
class GetTradeHistoryReq(BaseModel):
|
||||
limit: int = 100
|
||||
instrument_name: str | None = None
|
||||
|
||||
|
||||
class GetHistoricalReq(BaseModel):
|
||||
instrument: str
|
||||
start_date: str
|
||||
end_date: str
|
||||
resolution: str = "1h"
|
||||
|
||||
|
||||
class GetDvolReq(BaseModel):
|
||||
currency: str = "BTC"
|
||||
start_date: str
|
||||
end_date: str
|
||||
resolution: str = "1D"
|
||||
|
||||
|
||||
class GetDvolHistoryReq(BaseModel):
|
||||
currency: str = "BTC"
|
||||
lookback_days: int = 90
|
||||
|
||||
|
||||
class GetIvRankReq(BaseModel):
|
||||
instrument: str
|
||||
|
||||
|
||||
class GetRealizedVolReq(BaseModel):
|
||||
currency: str = "BTC"
|
||||
windows: list[int] = [14, 30]
|
||||
|
||||
|
||||
class GetGexReq(BaseModel):
|
||||
currency: str
|
||||
expiry_from: str | None = None
|
||||
expiry_to: str | None = None
|
||||
top_n_strikes: int = 50
|
||||
|
||||
|
||||
class GetPcRatioReq(BaseModel):
|
||||
currency: str
|
||||
|
||||
|
||||
class GetSkew25dReq(BaseModel):
|
||||
currency: str
|
||||
expiry: str
|
||||
|
||||
|
||||
class GetTermStructureReq(BaseModel):
|
||||
currency: str
|
||||
|
||||
|
||||
class CalculateSpreadPayoffReq(BaseModel):
|
||||
legs: list[dict]
|
||||
quote_currency: str = "USD"
|
||||
|
||||
|
||||
class RunBacktestReq(BaseModel):
|
||||
strategy_name: str
|
||||
underlying: str = "BTC"
|
||||
lookback_days: int = 30
|
||||
resolution: str = "4h"
|
||||
entry_rules: dict | None = None
|
||||
exit_rules: dict | None = None
|
||||
|
||||
|
||||
class FindByDeltaReq(BaseModel):
|
||||
currency: str
|
||||
expiry: str
|
||||
target_delta: float
|
||||
option_type: str
|
||||
max_results: int = 3
|
||||
min_open_interest: float = 100.0
|
||||
min_volume_24h: float = 20.0
|
||||
|
||||
|
||||
class GetIndicatorsReq(BaseModel):
|
||||
instrument: str
|
||||
indicators: list[str]
|
||||
start_date: str
|
||||
end_date: str
|
||||
resolution: str = "1h"
|
||||
|
||||
@field_validator("indicators", mode="before")
|
||||
@classmethod
|
||||
def _coerce_indicators(cls, v):
|
||||
if isinstance(v, str):
|
||||
import json
|
||||
s = v.strip()
|
||||
if s.startswith("["):
|
||||
try:
|
||||
parsed = json.loads(s)
|
||||
if isinstance(parsed, list):
|
||||
return [str(x).strip() for x in parsed if str(x).strip()]
|
||||
except json.JSONDecodeError:
|
||||
pass
|
||||
return [x.strip() for x in s.split(",") if x.strip()]
|
||||
if isinstance(v, list):
|
||||
return v
|
||||
raise ValueError(
|
||||
"indicators must be a list like ['rsi','atr','macd'] "
|
||||
"or a comma-separated string like 'rsi,atr,macd'"
|
||||
)
|
||||
|
||||
|
||||
class PlaceOrderReq(BaseModel):
|
||||
instrument_name: str
|
||||
side: str # "buy" | "sell"
|
||||
amount: float
|
||||
type: str = "limit"
|
||||
price: float | None = None
|
||||
reduce_only: bool = False
|
||||
post_only: bool = False
|
||||
label: str | None = None
|
||||
leverage: int | None = None # CER-016: None → default cap (3x)
|
||||
|
||||
|
||||
class CancelOrderReq(BaseModel):
|
||||
order_id: str
|
||||
|
||||
|
||||
class SetStopLossReq(BaseModel):
|
||||
order_id: str
|
||||
stop_price: float
|
||||
|
||||
|
||||
class SetTakeProfitReq(BaseModel):
|
||||
order_id: str
|
||||
tp_price: float
|
||||
|
||||
|
||||
class ClosePositionReq(BaseModel):
|
||||
instrument_name: str
|
||||
|
||||
|
||||
# --- CER-016 notional helpers ---
|
||||
|
||||
async def _compute_notional_deribit(client: DeribitClient, body: PlaceOrderReq) -> float:
|
||||
"""Stima notional in USD per un ordine Deribit.
|
||||
|
||||
- Perp USDC: contract size = 1 USD → amount è già notional USD.
|
||||
- Options: amount è in base asset (BTC/ETH) → moltiplica per index price.
|
||||
- Altri perp BTC/ETH: amount in USD notional.
|
||||
"""
|
||||
name = body.instrument_name.upper()
|
||||
if name.endswith("-PERPETUAL"):
|
||||
return float(body.amount)
|
||||
ref_price: float | None = body.price
|
||||
if ref_price is None:
|
||||
try:
|
||||
tk = await client.get_ticker(body.instrument_name)
|
||||
ref_price = tk.get("mark_price") or tk.get("last_price")
|
||||
except Exception:
|
||||
ref_price = None
|
||||
if not ref_price:
|
||||
return float(body.amount)
|
||||
return float(body.amount) * float(ref_price)
|
||||
|
||||
|
||||
async def _current_aggregate_deribit(client: DeribitClient) -> float:
|
||||
"""Somma notional posizioni aperte su Deribit (USDC)."""
|
||||
try:
|
||||
positions = await client.get_positions("USDC")
|
||||
except Exception:
|
||||
return 0.0
|
||||
total = 0.0
|
||||
for p in positions or []:
|
||||
size = abs(float(p.get("size") or 0))
|
||||
name = str(p.get("instrument") or "").upper()
|
||||
if name.endswith("-PERPETUAL"):
|
||||
total += size
|
||||
else:
|
||||
mark = float(p.get("mark_price") or 0)
|
||||
total += size * mark
|
||||
return total
|
||||
|
||||
|
||||
# --- ACL helper ---
|
||||
|
||||
def _check(principal: Principal, *, core: bool = False, observer: bool = False) -> None:
|
||||
allowed: set[str] = set()
|
||||
if core:
|
||||
allowed.add("core")
|
||||
if observer:
|
||||
allowed.add("observer")
|
||||
if not (principal.capabilities & allowed):
|
||||
raise HTTPException(403, f"capability required: {allowed}")
|
||||
|
||||
|
||||
# --- App factory ---
|
||||
|
||||
def create_app(*, client: DeribitClient, token_store: TokenStore) -> FastAPI:
|
||||
from contextlib import asynccontextmanager
|
||||
|
||||
# CER-016: pre-set leverage 3x su perp principali al boot (best-effort).
|
||||
@asynccontextmanager
|
||||
async def _lifespan(_app: FastAPI):
|
||||
cap = enforce_leverage(None)
|
||||
for inst in ("BTC-PERPETUAL", "ETH-PERPETUAL"):
|
||||
try:
|
||||
await client.set_leverage(inst, cap)
|
||||
except Exception:
|
||||
pass
|
||||
yield
|
||||
|
||||
app = build_app(
|
||||
name="mcp-deribit",
|
||||
version="0.1.0",
|
||||
token_store=token_store,
|
||||
lifespan=_lifespan,
|
||||
)
|
||||
|
||||
# --- Read tools: core + observer ---
|
||||
|
||||
@app.post("/tools/is_testnet", tags=["reads"])
|
||||
async def t_is_testnet(principal: Principal = Depends(require_principal)):
|
||||
_check(principal, core=True, observer=True)
|
||||
return client.is_testnet()
|
||||
|
||||
@app.post("/tools/get_ticker", tags=["reads"])
|
||||
async def t_get_ticker(
|
||||
body: GetTickerReq, principal: Principal = Depends(require_principal)
|
||||
):
|
||||
_check(principal, core=True, observer=True)
|
||||
return await client.get_ticker(body.instrument_name)
|
||||
|
||||
@app.post("/tools/get_ticker_batch", tags=["reads"])
|
||||
async def t_get_ticker_batch(
|
||||
body: GetTickerBatchReq, principal: Principal = Depends(require_principal)
|
||||
):
|
||||
_check(principal, core=True, observer=True)
|
||||
return await client.get_ticker_batch(body.instrument_names)
|
||||
|
||||
@app.post("/tools/get_instruments", tags=["reads"])
|
||||
async def t_get_instruments(
|
||||
body: GetInstrumentsReq, principal: Principal = Depends(require_principal)
|
||||
):
|
||||
_check(principal, core=True, observer=True)
|
||||
return await client.get_instruments(
|
||||
currency=body.currency,
|
||||
kind=body.kind,
|
||||
expiry_from=body.expiry_from,
|
||||
expiry_to=body.expiry_to,
|
||||
strike_min=body.strike_min,
|
||||
strike_max=body.strike_max,
|
||||
min_open_interest=body.min_open_interest,
|
||||
limit=body.limit,
|
||||
offset=body.offset,
|
||||
)
|
||||
|
||||
@app.post("/tools/get_orderbook", tags=["reads"])
|
||||
async def t_get_orderbook(
|
||||
body: GetOrderbookReq, principal: Principal = Depends(require_principal)
|
||||
):
|
||||
_check(principal, core=True, observer=True)
|
||||
return await client.get_orderbook(body.instrument_name, body.depth)
|
||||
|
||||
@app.post("/tools/get_positions", tags=["reads"])
|
||||
async def t_get_positions(
|
||||
body: GetPositionsReq, principal: Principal = Depends(require_principal)
|
||||
):
|
||||
_check(principal, core=True, observer=True)
|
||||
return await client.get_positions(body.currency)
|
||||
|
||||
@app.post("/tools/get_account_summary", tags=["reads"])
|
||||
async def t_get_account_summary(
|
||||
body: GetAccountSummaryReq, principal: Principal = Depends(require_principal)
|
||||
):
|
||||
_check(principal, core=True, observer=True)
|
||||
return await client.get_account_summary(body.currency)
|
||||
|
||||
@app.post("/tools/get_trade_history", tags=["reads"])
|
||||
async def t_get_trade_history(
|
||||
body: GetTradeHistoryReq, principal: Principal = Depends(require_principal)
|
||||
):
|
||||
_check(principal, core=True, observer=True)
|
||||
return await client.get_trade_history(body.limit, body.instrument_name)
|
||||
|
||||
@app.post("/tools/get_historical", tags=["reads"])
|
||||
async def t_get_historical(
|
||||
body: GetHistoricalReq, principal: Principal = Depends(require_principal)
|
||||
):
|
||||
_check(principal, core=True, observer=True)
|
||||
return await client.get_historical(
|
||||
body.instrument, body.start_date, body.end_date, body.resolution
|
||||
)
|
||||
|
||||
@app.post("/tools/get_dvol", tags=["reads"])
|
||||
async def t_get_dvol(
|
||||
body: GetDvolReq, principal: Principal = Depends(require_principal)
|
||||
):
|
||||
_check(principal, core=True, observer=True)
|
||||
return await client.get_dvol(
|
||||
body.currency, body.start_date, body.end_date, body.resolution
|
||||
)
|
||||
|
||||
@app.post("/tools/get_gex", tags=["reads"])
|
||||
async def t_get_gex(
|
||||
body: GetGexReq, principal: Principal = Depends(require_principal)
|
||||
):
|
||||
_check(principal, core=True, observer=True)
|
||||
return await client.get_gex(
|
||||
body.currency, body.expiry_from, body.expiry_to, body.top_n_strikes
|
||||
)
|
||||
|
||||
@app.post("/tools/get_pc_ratio", tags=["reads"])
|
||||
async def t_get_pc_ratio(
|
||||
body: GetPcRatioReq, principal: Principal = Depends(require_principal)
|
||||
):
|
||||
_check(principal, core=True, observer=True)
|
||||
return await client.get_pc_ratio(body.currency)
|
||||
|
||||
@app.post("/tools/get_skew_25d", tags=["reads"])
|
||||
async def t_get_skew_25d(
|
||||
body: GetSkew25dReq, principal: Principal = Depends(require_principal)
|
||||
):
|
||||
_check(principal, core=True, observer=True)
|
||||
return await client.get_skew_25d(body.currency, body.expiry)
|
||||
|
||||
@app.post("/tools/get_term_structure", tags=["reads"])
|
||||
async def t_get_term_structure(
|
||||
body: GetTermStructureReq, principal: Principal = Depends(require_principal)
|
||||
):
|
||||
_check(principal, core=True, observer=True)
|
||||
return await client.get_term_structure(body.currency)
|
||||
|
||||
@app.post("/tools/run_backtest", tags=["writes"])
|
||||
async def t_run_backtest(
|
||||
body: RunBacktestReq, principal: Principal = Depends(require_principal)
|
||||
):
|
||||
_check(principal, core=True, observer=True)
|
||||
return await client.run_backtest(
|
||||
strategy_name=body.strategy_name,
|
||||
underlying=body.underlying,
|
||||
lookback_days=body.lookback_days,
|
||||
resolution=body.resolution,
|
||||
entry_rules=body.entry_rules,
|
||||
exit_rules=body.exit_rules,
|
||||
)
|
||||
|
||||
@app.post("/tools/calculate_spread_payoff", tags=["writes"])
|
||||
async def t_calculate_spread_payoff(
|
||||
body: CalculateSpreadPayoffReq, principal: Principal = Depends(require_principal)
|
||||
):
|
||||
_check(principal, core=True, observer=True)
|
||||
return await client.calculate_spread_payoff(body.legs, body.quote_currency)
|
||||
|
||||
@app.post("/tools/find_by_delta", tags=["writes"])
|
||||
async def t_find_by_delta(
|
||||
body: FindByDeltaReq, principal: Principal = Depends(require_principal)
|
||||
):
|
||||
_check(principal, core=True, observer=True)
|
||||
return await client.find_by_delta(
|
||||
currency=body.currency,
|
||||
expiry=body.expiry,
|
||||
target_delta=body.target_delta,
|
||||
option_type=body.option_type,
|
||||
max_results=body.max_results,
|
||||
min_open_interest=body.min_open_interest,
|
||||
min_volume_24h=body.min_volume_24h,
|
||||
)
|
||||
|
||||
@app.post("/tools/get_iv_rank", tags=["reads"])
|
||||
async def t_get_iv_rank(
|
||||
body: GetIvRankReq, principal: Principal = Depends(require_principal)
|
||||
):
|
||||
_check(principal, core=True, observer=True)
|
||||
return await client.get_iv_rank(body.instrument)
|
||||
|
||||
@app.post("/tools/get_dvol_history", tags=["reads"])
|
||||
async def t_get_dvol_history(
|
||||
body: GetDvolHistoryReq, principal: Principal = Depends(require_principal)
|
||||
):
|
||||
_check(principal, core=True, observer=True)
|
||||
return await client.get_dvol_history(body.currency, body.lookback_days)
|
||||
|
||||
@app.post("/tools/get_realized_vol", tags=["reads"])
|
||||
async def t_get_realized_vol(
|
||||
body: GetRealizedVolReq, principal: Principal = Depends(require_principal)
|
||||
):
|
||||
_check(principal, core=True, observer=True)
|
||||
return await client.get_realized_vol(body.currency, body.windows)
|
||||
|
||||
@app.post("/tools/get_technical_indicators", tags=["reads"])
|
||||
async def t_get_indicators(
|
||||
body: GetIndicatorsReq, principal: Principal = Depends(require_principal)
|
||||
):
|
||||
_check(principal, core=True, observer=True)
|
||||
return await client.get_technical_indicators(
|
||||
body.instrument,
|
||||
body.indicators,
|
||||
body.start_date,
|
||||
body.end_date,
|
||||
body.resolution,
|
||||
)
|
||||
|
||||
# --- Write tools: core only ---
|
||||
|
||||
@app.post("/tools/place_order", tags=["writes"])
|
||||
async def t_place_order(
|
||||
body: PlaceOrderReq, principal: Principal = Depends(require_principal)
|
||||
):
|
||||
_check(principal, core=True)
|
||||
lev = enforce_leverage(body.leverage)
|
||||
if not body.reduce_only:
|
||||
notional = await _compute_notional_deribit(client, body)
|
||||
enforce_single_notional(
|
||||
notional, exchange="deribit", instrument=body.instrument_name
|
||||
)
|
||||
agg = await _current_aggregate_deribit(client)
|
||||
enforce_aggregate(agg, notional)
|
||||
if lev != enforce_leverage(None):
|
||||
try:
|
||||
await client.set_leverage(body.instrument_name, lev)
|
||||
except Exception:
|
||||
pass
|
||||
return await client.place_order(
|
||||
instrument_name=body.instrument_name,
|
||||
side=body.side,
|
||||
amount=body.amount,
|
||||
type=body.type,
|
||||
price=body.price,
|
||||
reduce_only=body.reduce_only,
|
||||
post_only=body.post_only,
|
||||
label=body.label,
|
||||
)
|
||||
|
||||
@app.post("/tools/cancel_order", tags=["writes"])
|
||||
async def t_cancel_order(
|
||||
body: CancelOrderReq, principal: Principal = Depends(require_principal)
|
||||
):
|
||||
_check(principal, core=True)
|
||||
return await client.cancel_order(body.order_id)
|
||||
|
||||
@app.post("/tools/set_stop_loss", tags=["writes"])
|
||||
async def t_set_sl(
|
||||
body: SetStopLossReq, principal: Principal = Depends(require_principal)
|
||||
):
|
||||
_check(principal, core=True)
|
||||
return await client.set_stop_loss(body.order_id, body.stop_price)
|
||||
|
||||
@app.post("/tools/set_take_profit", tags=["writes"])
|
||||
async def t_set_tp(
|
||||
body: SetTakeProfitReq, principal: Principal = Depends(require_principal)
|
||||
):
|
||||
_check(principal, core=True)
|
||||
return await client.set_take_profit(body.order_id, body.tp_price)
|
||||
|
||||
@app.post("/tools/close_position", tags=["writes"])
|
||||
async def t_close_position(
|
||||
body: ClosePositionReq, principal: Principal = Depends(require_principal)
|
||||
):
|
||||
_check(principal, core=True)
|
||||
return await client.close_position(body.instrument_name)
|
||||
|
||||
# ───── MCP endpoint (/mcp) — bridge verso /tools/* ─────
|
||||
port = int(os.environ.get("PORT", "9011"))
|
||||
mount_mcp_endpoint(
|
||||
app,
|
||||
name="cerbero-deribit",
|
||||
version="0.1.0",
|
||||
token_store=token_store,
|
||||
internal_base_url=f"http://localhost:{port}",
|
||||
tools=[
|
||||
{"name": "is_testnet", "description": "True se client Deribit è in modalità testnet."},
|
||||
{"name": "get_ticker", "description": "Ticker di un instrument Deribit."},
|
||||
{"name": "get_ticker_batch", "description": "Ticker per N instruments in parallelo (max 20)."},
|
||||
{"name": "get_instruments", "description": "Lista instruments per currency."},
|
||||
{"name": "get_orderbook", "description": "Orderbook L1/L2 per instrument."},
|
||||
{"name": "get_positions", "description": "Posizioni aperte."},
|
||||
{"name": "get_account_summary", "description": "Summary account (equity, balance)."},
|
||||
{"name": "get_trade_history", "description": "Storia trade recenti."},
|
||||
{"name": "get_historical", "description": "OHLCV storico."},
|
||||
{"name": "get_dvol", "description": "Deribit Volatility Index (DVOL) OHLC per currency (BTC/ETH)."},
|
||||
{"name": "get_dvol_history", "description": "DVOL time series + percentili su lookback_days."},
|
||||
{"name": "get_iv_rank", "description": "IV rank 30/90/365d di un instrument vs DVOL storico della currency."},
|
||||
{"name": "find_by_delta", "description": "Trova strike con delta più vicino a target, filtrato per liquidità (OI/vol)."},
|
||||
{"name": "calculate_spread_payoff", "description": "Payoff/greci/max P-L/break-even/fee per struttura multi-leg."},
|
||||
{"name": "run_backtest", "description": "Heuristic backtest RSI-based su storia OHLCV per threshold accept/marginal/reject."},
|
||||
{"name": "get_term_structure", "description": "IV ATM per ogni expiry disponibile, detect contango/backwardation."},
|
||||
{"name": "get_skew_25d", "description": "Skew 25-delta put/call IV + risk reversal + butterfly per expiry."},
|
||||
{"name": "get_pc_ratio", "description": "Put/Call ratio aggregato su OI e volume 24h."},
|
||||
{"name": "get_gex", "description": "Gamma exposure per strike + zero gamma level (top N strikes per OI)."},
|
||||
{"name": "get_technical_indicators", "description": "Indicatori tecnici (RSI, MACD, ATR, ADX)."},
|
||||
{"name": "get_realized_vol", "description": "Volatilità realizzata annualizzata (log-return std) BTC/ETH + spread IV−RV."},
|
||||
{"name": "place_order", "description": "Invia ordine (CORE only, testnet)."},
|
||||
{"name": "cancel_order", "description": "Cancella ordine."},
|
||||
{"name": "set_stop_loss", "description": "Setta stop loss su posizione."},
|
||||
{"name": "set_take_profit", "description": "Setta take profit su posizione."},
|
||||
{"name": "close_position", "description": "Chiude posizione aperta."},
|
||||
],
|
||||
)
|
||||
|
||||
return app
|
||||
@@ -0,0 +1,236 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import re
|
||||
|
||||
import pytest
|
||||
from mcp_deribit.client import DeribitClient
|
||||
from pytest_httpx import HTTPXMock
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def client():
|
||||
return DeribitClient(client_id="cid", client_secret="csec", testnet=True)
|
||||
|
||||
|
||||
AUTH_RESP = {"result": {"access_token": "tok", "expires_in": 3600}}
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_ticker(httpx_mock: HTTPXMock, client: DeribitClient):
|
||||
# public endpoint — no auth needed
|
||||
httpx_mock.add_response(
|
||||
url=re.compile(r"https://test\.deribit\.com/api/v2/public/ticker"),
|
||||
json={
|
||||
"result": {
|
||||
"mark_price": 50000,
|
||||
"last_price": 49900,
|
||||
"best_bid_price": 49950,
|
||||
"best_ask_price": 50050,
|
||||
"instrument_name": "BTC-PERPETUAL",
|
||||
"stats": {"volume": 1234.5},
|
||||
"open_interest": 9999,
|
||||
"greeks": None,
|
||||
"mark_iv": None,
|
||||
}
|
||||
},
|
||||
)
|
||||
result = await client.get_ticker("BTC-PERPETUAL")
|
||||
assert result["mark_price"] == 50000
|
||||
assert result["bid"] == 49950
|
||||
assert result["ask"] == 50050
|
||||
# CER-003: perpetual returns conceptual greeks, not None
|
||||
assert result["greeks"] == {"delta": 1.0, "gamma": 0.0, "vega": 0.0, "theta": 0.0, "rho": 0.0}
|
||||
# CER-007: testnet flag present
|
||||
assert result["testnet"] is True
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_ticker_option_preserves_greeks(httpx_mock: HTTPXMock, client: DeribitClient):
|
||||
real_greeks = {"delta": 0.42, "gamma": 0.001, "vega": 0.05, "theta": -0.02, "rho": 0.003}
|
||||
httpx_mock.add_response(
|
||||
url=re.compile(r"https://test\.deribit\.com/api/v2/public/ticker"),
|
||||
json={
|
||||
"result": {
|
||||
"mark_price": 2500,
|
||||
"last_price": 2500,
|
||||
"best_bid_price": 2490,
|
||||
"best_ask_price": 2510,
|
||||
"instrument_name": "BTC-30APR26-75000-C",
|
||||
"stats": {"volume": 5.0},
|
||||
"open_interest": 100,
|
||||
"greeks": real_greeks,
|
||||
"mark_iv": 62.5,
|
||||
}
|
||||
},
|
||||
)
|
||||
result = await client.get_ticker("BTC-30APR26-75000-C")
|
||||
assert result["greeks"] == real_greeks
|
||||
assert result["mark_iv"] == 62.5
|
||||
|
||||
|
||||
def test_is_testnet(client: DeribitClient):
|
||||
info = client.is_testnet()
|
||||
assert info["testnet"] is True
|
||||
assert "test.deribit.com" in info["base_url"]
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_instruments_pagination_and_filter(httpx_mock: HTTPXMock, client: DeribitClient):
|
||||
items = []
|
||||
for i, exp_ms in enumerate([1700000000000, 1776000000000, 1800000000000]):
|
||||
items.append({
|
||||
"instrument_name": f"BTC-inst-{i}",
|
||||
"strike": 50000 + i * 10000,
|
||||
"expiration_timestamp": exp_ms,
|
||||
"option_type": "call",
|
||||
"tick_size": 0.5,
|
||||
"min_trade_amount": 0.1,
|
||||
# CER-008: public/get_instruments non include OI in produzione
|
||||
})
|
||||
httpx_mock.add_response(
|
||||
url=re.compile(r"https://test\.deribit\.com/api/v2/public/get_instruments"),
|
||||
json={"result": items},
|
||||
)
|
||||
httpx_mock.add_response(
|
||||
url=re.compile(r"https://test\.deribit\.com/api/v2/public/get_book_summary_by_currency"),
|
||||
json={"result": [
|
||||
{"instrument_name": "BTC-inst-0", "open_interest": 100.0},
|
||||
{"instrument_name": "BTC-inst-1", "open_interest": 200.0},
|
||||
{"instrument_name": "BTC-inst-2", "open_interest": 300.0},
|
||||
]},
|
||||
)
|
||||
result = await client.get_instruments(
|
||||
"BTC", kind="option", strike_min=55000, limit=1, offset=0
|
||||
)
|
||||
assert result["total"] == 2
|
||||
assert len(result["instruments"]) == 1
|
||||
assert result["has_more"] is True
|
||||
assert result["testnet"] is True
|
||||
assert result["instruments"][0]["strike"] >= 55000
|
||||
# CER-008: OI merge da book_summary
|
||||
assert result["instruments"][0]["open_interest"] in (200.0, 300.0)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_instruments_min_oi_filter(httpx_mock: HTTPXMock, client: DeribitClient):
|
||||
"""CER-008: min_open_interest filtra server-side usando book_summary."""
|
||||
items = [
|
||||
{"instrument_name": "BTC-low-OI", "strike": 60000, "expiration_timestamp": 1800000000000,
|
||||
"option_type": "call", "tick_size": 0.5, "min_trade_amount": 0.1},
|
||||
{"instrument_name": "BTC-high-OI", "strike": 60000, "expiration_timestamp": 1800000000000,
|
||||
"option_type": "call", "tick_size": 0.5, "min_trade_amount": 0.1},
|
||||
]
|
||||
httpx_mock.add_response(
|
||||
url=re.compile(r"https://test\.deribit\.com/api/v2/public/get_instruments"),
|
||||
json={"result": items},
|
||||
)
|
||||
httpx_mock.add_response(
|
||||
url=re.compile(r"https://test\.deribit\.com/api/v2/public/get_book_summary_by_currency"),
|
||||
json={"result": [
|
||||
{"instrument_name": "BTC-low-OI", "open_interest": 5.0},
|
||||
{"instrument_name": "BTC-high-OI", "open_interest": 500.0},
|
||||
]},
|
||||
)
|
||||
result = await client.get_instruments("BTC", kind="option", min_open_interest=100)
|
||||
assert result["total"] == 1
|
||||
assert result["instruments"][0]["name"] == "BTC-high-OI"
|
||||
assert result["instruments"][0]["open_interest"] == 500.0
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_account_summary(httpx_mock: HTTPXMock, client: DeribitClient):
|
||||
httpx_mock.add_response(
|
||||
url=re.compile(r"https://test\.deribit\.com/api/v2/public/auth"),
|
||||
json=AUTH_RESP,
|
||||
)
|
||||
httpx_mock.add_response(
|
||||
url=re.compile(r"https://test\.deribit\.com/api/v2/private/get_account_summary"),
|
||||
json={"result": {"equity": 1000.0, "balance": 900.0, "currency": "USDC",
|
||||
"margin_balance": 800.0, "available_funds": 700.0,
|
||||
"unrealized_pnl": 50.0, "total_pnl": 100.0}},
|
||||
)
|
||||
result = await client.get_account_summary("USDC")
|
||||
assert result["equity"] == 1000.0
|
||||
assert result["balance"] == 900.0
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_place_order(httpx_mock: HTTPXMock, client: DeribitClient):
|
||||
httpx_mock.add_response(
|
||||
url=re.compile(r"https://test\.deribit\.com/api/v2/public/auth"),
|
||||
json=AUTH_RESP,
|
||||
)
|
||||
httpx_mock.add_response(
|
||||
url=re.compile(r"https://test\.deribit\.com/api/v2/private/buy"),
|
||||
json={"result": {"order": {"order_id": "abc", "amount": 10, "order_state": "open"}, "trades": []}},
|
||||
)
|
||||
result = await client.place_order(
|
||||
instrument_name="BTC-PERPETUAL",
|
||||
side="buy",
|
||||
amount=10,
|
||||
type="limit",
|
||||
price=50000,
|
||||
)
|
||||
assert result["order"]["order_id"] == "abc"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_positions(httpx_mock: HTTPXMock, client: DeribitClient):
|
||||
httpx_mock.add_response(
|
||||
url=re.compile(r"https://test\.deribit\.com/api/v2/public/auth"),
|
||||
json=AUTH_RESP,
|
||||
)
|
||||
httpx_mock.add_response(
|
||||
url=re.compile(r"https://test\.deribit\.com/api/v2/private/get_positions"),
|
||||
json={"result": [
|
||||
{
|
||||
"instrument_name": "BTC-PERPETUAL",
|
||||
"size": 100.0,
|
||||
"average_price": 48000.0,
|
||||
"mark_price": 50000.0,
|
||||
"floating_profit_loss": 200.0,
|
||||
"realized_profit_loss": 50.0,
|
||||
"leverage": 10,
|
||||
}
|
||||
]},
|
||||
)
|
||||
result = await client.get_positions("USDC")
|
||||
assert len(result) == 1
|
||||
assert result[0]["instrument"] == "BTC-PERPETUAL"
|
||||
assert result[0]["direction"] == "long"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_dvol(httpx_mock: HTTPXMock, client: DeribitClient):
|
||||
httpx_mock.add_response(
|
||||
url=re.compile(r"https://test\.deribit\.com/api/v2/public/get_volatility_index_data"),
|
||||
json={
|
||||
"result": {
|
||||
"data": [
|
||||
[1700000000000, 55.0, 58.0, 54.0, 57.0],
|
||||
[1700086400000, 57.0, 60.0, 56.0, 59.5],
|
||||
],
|
||||
"continuation": None,
|
||||
}
|
||||
},
|
||||
)
|
||||
result = await client.get_dvol("btc", "2024-01-01", "2024-01-02", "1D")
|
||||
assert result["currency"] == "BTC"
|
||||
assert result["latest"] == 59.5
|
||||
assert len(result["candles"]) == 2
|
||||
assert result["candles"][0]["close"] == 57.0
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_cancel_order(httpx_mock: HTTPXMock, client: DeribitClient):
|
||||
httpx_mock.add_response(
|
||||
url=re.compile(r"https://test\.deribit\.com/api/v2/public/auth"),
|
||||
json=AUTH_RESP,
|
||||
)
|
||||
httpx_mock.add_response(
|
||||
url=re.compile(r"https://test\.deribit\.com/api/v2/private/cancel"),
|
||||
json={"result": {"order_id": "abc123", "order_state": "cancelled"}},
|
||||
)
|
||||
result = await client.cancel_order("abc123")
|
||||
assert result["order_id"] == "abc123"
|
||||
assert result["state"] == "cancelled"
|
||||
@@ -0,0 +1,188 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from unittest.mock import AsyncMock, MagicMock
|
||||
|
||||
import pytest
|
||||
from fastapi.testclient import TestClient
|
||||
from mcp_deribit.server import create_app
|
||||
from option_mcp_common.auth import Principal, TokenStore
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_client():
|
||||
c = MagicMock()
|
||||
c.get_ticker = AsyncMock(return_value={"mark_price": 50000})
|
||||
c.get_instruments = AsyncMock(return_value=[])
|
||||
c.get_orderbook = AsyncMock(return_value={"bids": [], "asks": []})
|
||||
c.get_positions = AsyncMock(return_value=[])
|
||||
c.get_account_summary = AsyncMock(return_value={"equity": 1000})
|
||||
c.get_trade_history = AsyncMock(return_value=[])
|
||||
c.get_historical = AsyncMock(return_value={"candles": []})
|
||||
c.get_technical_indicators = AsyncMock(return_value={"rsi": 55.0})
|
||||
c.place_order = AsyncMock(return_value={"order_id": "x"})
|
||||
c.cancel_order = AsyncMock(return_value={"order_id": "x", "state": "cancelled"})
|
||||
c.set_stop_loss = AsyncMock(return_value={"order_id": "x", "stop_price": 45000})
|
||||
c.set_take_profit = AsyncMock(return_value={"order_id": "x", "tp_price": 55000})
|
||||
c.close_position = AsyncMock(return_value={"closed": True})
|
||||
c.set_leverage = AsyncMock(return_value={"state": "ok"})
|
||||
return c
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def http(mock_client):
|
||||
store = TokenStore(tokens={
|
||||
"ct": Principal("core", {"core"}),
|
||||
"ot": Principal("observer", {"observer"}),
|
||||
})
|
||||
app = create_app(client=mock_client, token_store=store)
|
||||
return TestClient(app)
|
||||
|
||||
|
||||
def test_health(http):
|
||||
assert http.get("/health").status_code == 200
|
||||
|
||||
|
||||
def test_get_ticker_core_ok(http):
|
||||
r = http.post(
|
||||
"/tools/get_ticker",
|
||||
headers={"Authorization": "Bearer ct"},
|
||||
json={"instrument_name": "BTC-PERPETUAL"},
|
||||
)
|
||||
assert r.status_code == 200
|
||||
assert r.json()["mark_price"] == 50000
|
||||
|
||||
|
||||
def test_get_ticker_observer_ok(http):
|
||||
r = http.post(
|
||||
"/tools/get_ticker",
|
||||
headers={"Authorization": "Bearer ot"},
|
||||
json={"instrument_name": "BTC-PERPETUAL"},
|
||||
)
|
||||
assert r.status_code == 200
|
||||
|
||||
|
||||
def test_get_ticker_no_auth_401(http):
|
||||
r = http.post("/tools/get_ticker", json={"instrument_name": "BTC-PERPETUAL"})
|
||||
assert r.status_code == 401
|
||||
|
||||
|
||||
def test_get_ticker_alias_instrument_ok(http, mock_client):
|
||||
r = http.post(
|
||||
"/tools/get_ticker",
|
||||
headers={"Authorization": "Bearer ct"},
|
||||
json={"instrument": "ETH"},
|
||||
)
|
||||
assert r.status_code == 200
|
||||
mock_client.get_ticker.assert_awaited_with("ETH")
|
||||
|
||||
|
||||
def test_place_order_core_ok(http):
|
||||
r = http.post(
|
||||
"/tools/place_order",
|
||||
headers={"Authorization": "Bearer ct"},
|
||||
json={"instrument_name": "BTC-PERPETUAL", "side": "buy", "amount": 10},
|
||||
)
|
||||
assert r.status_code == 200
|
||||
|
||||
|
||||
def test_place_order_observer_forbidden(http):
|
||||
r = http.post(
|
||||
"/tools/place_order",
|
||||
headers={"Authorization": "Bearer ot"},
|
||||
json={"instrument_name": "BTC-PERPETUAL", "side": "buy", "amount": 10},
|
||||
)
|
||||
assert r.status_code == 403
|
||||
|
||||
|
||||
def test_place_order_notional_cap_enforced(http):
|
||||
"""CER-016: reject se notional > CERBERO_MAX_NOTIONAL (default 200)."""
|
||||
r = http.post(
|
||||
"/tools/place_order",
|
||||
headers={"Authorization": "Bearer ct"},
|
||||
json={
|
||||
"instrument_name": "ETH-PERPETUAL",
|
||||
"side": "buy",
|
||||
"amount": 335, # USD — cap 200
|
||||
},
|
||||
)
|
||||
assert r.status_code == 403
|
||||
body = r.json()
|
||||
assert body["error"]["code"] == "HARD_PROHIBITION"
|
||||
|
||||
|
||||
def test_place_order_leverage_cap_enforced(http):
|
||||
"""CER-016: reject leverage > 3x."""
|
||||
r = http.post(
|
||||
"/tools/place_order",
|
||||
headers={"Authorization": "Bearer ct"},
|
||||
json={
|
||||
"instrument_name": "BTC-PERPETUAL",
|
||||
"side": "buy",
|
||||
"amount": 50,
|
||||
"leverage": 50,
|
||||
},
|
||||
)
|
||||
assert r.status_code == 403
|
||||
body = r.json()
|
||||
assert body["error"]["code"] == "HARD_PROHIBITION"
|
||||
|
||||
|
||||
def test_place_order_reduce_only_skips_cap(http):
|
||||
"""CER-016: reduce_only orders bypassano cap notional (è close)."""
|
||||
r = http.post(
|
||||
"/tools/place_order",
|
||||
headers={"Authorization": "Bearer ct"},
|
||||
json={
|
||||
"instrument_name": "ETH-PERPETUAL",
|
||||
"side": "sell",
|
||||
"amount": 10000,
|
||||
"reduce_only": True,
|
||||
},
|
||||
)
|
||||
assert r.status_code == 200
|
||||
|
||||
|
||||
def test_close_position_core_ok(http):
|
||||
r = http.post(
|
||||
"/tools/close_position",
|
||||
headers={"Authorization": "Bearer ct"},
|
||||
json={"instrument_name": "BTC-PERPETUAL"},
|
||||
)
|
||||
assert r.status_code == 200
|
||||
|
||||
|
||||
def test_close_position_observer_forbidden(http):
|
||||
r = http.post(
|
||||
"/tools/close_position",
|
||||
headers={"Authorization": "Bearer ot"},
|
||||
json={"instrument_name": "BTC-PERPETUAL"},
|
||||
)
|
||||
assert r.status_code == 403
|
||||
|
||||
|
||||
def test_cancel_order_observer_forbidden(http):
|
||||
r = http.post(
|
||||
"/tools/cancel_order",
|
||||
headers={"Authorization": "Bearer ot"},
|
||||
json={"order_id": "abc123"},
|
||||
)
|
||||
assert r.status_code == 403
|
||||
|
||||
|
||||
def test_set_stop_loss_observer_forbidden(http):
|
||||
r = http.post(
|
||||
"/tools/set_stop_loss",
|
||||
headers={"Authorization": "Bearer ot"},
|
||||
json={"order_id": "abc123", "stop_price": 45000.0},
|
||||
)
|
||||
assert r.status_code == 403
|
||||
|
||||
|
||||
def test_get_account_summary_observer_ok(http):
|
||||
r = http.post(
|
||||
"/tools/get_account_summary",
|
||||
headers={"Authorization": "Bearer ot"},
|
||||
json={"currency": "USDC"},
|
||||
)
|
||||
assert r.status_code == 200
|
||||
assert r.json()["equity"] == 1000
|
||||
@@ -0,0 +1,29 @@
|
||||
[project]
|
||||
name = "mcp-hyperliquid"
|
||||
version = "0.1.0"
|
||||
requires-python = ">=3.11"
|
||||
dependencies = [
|
||||
"option-mcp-common",
|
||||
"fastapi>=0.115",
|
||||
"uvicorn[standard]>=0.30",
|
||||
"httpx>=0.27",
|
||||
"pydantic>=2.6",
|
||||
"hyperliquid-python-sdk>=0.3",
|
||||
"eth-account>=0.11",
|
||||
]
|
||||
|
||||
[project.optional-dependencies]
|
||||
dev = ["pytest>=8", "pytest-asyncio>=0.23", "pytest-httpx>=0.30"]
|
||||
|
||||
[build-system]
|
||||
requires = ["hatchling"]
|
||||
build-backend = "hatchling.build"
|
||||
|
||||
[tool.hatch.build.targets.wheel]
|
||||
packages = ["src/mcp_hyperliquid"]
|
||||
|
||||
[tool.uv.sources]
|
||||
option-mcp-common = { workspace = true }
|
||||
|
||||
[project.scripts]
|
||||
mcp-hyperliquid = "mcp_hyperliquid.__main__:main"
|
||||
@@ -0,0 +1,43 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import os
|
||||
|
||||
import uvicorn
|
||||
from option_mcp_common.auth import load_token_store_from_files
|
||||
|
||||
from option_mcp_common.logging import configure_root_logging
|
||||
|
||||
from mcp_hyperliquid.client import HyperliquidClient
|
||||
from mcp_hyperliquid.server import create_app
|
||||
|
||||
|
||||
configure_root_logging() # CER-P5-009
|
||||
|
||||
def main():
|
||||
wallet_file = os.environ["HYPERLIQUID_WALLET_FILE"]
|
||||
with open(wallet_file) as f:
|
||||
creds = json.load(f)
|
||||
|
||||
client = HyperliquidClient(
|
||||
wallet_address=creds["wallet_address"],
|
||||
private_key=creds["private_key"],
|
||||
testnet=bool(creds.get("testnet", True)),
|
||||
api_wallet_address=creds.get("api_wallet_address"),
|
||||
)
|
||||
|
||||
token_store = load_token_store_from_files(
|
||||
core_token_file=os.environ.get("CORE_TOKEN_FILE"),
|
||||
observer_token_file=os.environ.get("OBSERVER_TOKEN_FILE"),
|
||||
)
|
||||
app = create_app(client=client, token_store=token_store)
|
||||
uvicorn.run(
|
||||
app,
|
||||
log_config=None, # CER-P5-009: delega al root JSON logger
|
||||
host=os.environ.get("HOST", "0.0.0.0"),
|
||||
port=int(os.environ.get("PORT", "9012")),
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -0,0 +1,577 @@
|
||||
"""Hyperliquid REST API client for perpetual futures trading."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
import datetime as _dt
|
||||
from typing import Any
|
||||
|
||||
import httpx
|
||||
from option_mcp_common import indicators as ind
|
||||
|
||||
BASE_LIVE = "https://api.hyperliquid.xyz"
|
||||
BASE_TESTNET = "https://api.hyperliquid-testnet.xyz"
|
||||
|
||||
RESOLUTION_MAP = {
|
||||
"1m": "1m",
|
||||
"5m": "5m",
|
||||
"15m": "15m",
|
||||
"1h": "1h",
|
||||
"4h": "4h",
|
||||
"1d": "1d",
|
||||
}
|
||||
|
||||
try:
|
||||
from eth_account import Account
|
||||
from hyperliquid.exchange import Exchange
|
||||
from hyperliquid.utils import constants as hl_constants
|
||||
|
||||
_SDK_AVAILABLE = True
|
||||
except ImportError: # pragma: no cover
|
||||
_SDK_AVAILABLE = False
|
||||
|
||||
|
||||
def _to_ms(date_str: str) -> int:
|
||||
try:
|
||||
dt = _dt.datetime.fromisoformat(date_str)
|
||||
except ValueError:
|
||||
dt = _dt.datetime.strptime(date_str, "%Y-%m-%d")
|
||||
return int(dt.timestamp() * 1000)
|
||||
|
||||
|
||||
class HyperliquidClient:
|
||||
"""Async client for the Hyperliquid API.
|
||||
|
||||
Read operations use direct HTTP calls via httpx against /info.
|
||||
Write operations delegate to hyperliquid-python-sdk for EIP-712 signing.
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
wallet_address: str,
|
||||
private_key: str,
|
||||
testnet: bool = True,
|
||||
api_wallet_address: str | None = None,
|
||||
):
|
||||
self.wallet_address = wallet_address
|
||||
self.private_key = private_key
|
||||
self.testnet = testnet
|
||||
self.api_wallet_address = api_wallet_address or wallet_address
|
||||
self.base_url = BASE_TESTNET if testnet else BASE_LIVE
|
||||
self._exchange: Any | None = None
|
||||
|
||||
# ── SDK exchange (lazy) ────────────────────────────────────
|
||||
|
||||
def _get_exchange(self) -> Any:
|
||||
"""Return (and cache) an SDK Exchange instance for write ops."""
|
||||
if not _SDK_AVAILABLE:
|
||||
raise RuntimeError(
|
||||
"hyperliquid-python-sdk is not installed; write operations unavailable."
|
||||
)
|
||||
if self._exchange is None:
|
||||
account = Account.from_key(self.private_key)
|
||||
base_url = (
|
||||
hl_constants.TESTNET_API_URL if self.testnet else hl_constants.MAINNET_API_URL
|
||||
)
|
||||
empty_spot_meta: dict[str, Any] = {"universe": [], "tokens": []}
|
||||
self._exchange = Exchange(
|
||||
account,
|
||||
base_url,
|
||||
account_address=self.wallet_address,
|
||||
spot_meta=empty_spot_meta,
|
||||
)
|
||||
return self._exchange
|
||||
|
||||
# ── Internal helpers ───────────────────────────────────────
|
||||
|
||||
async def _post(self, payload: dict[str, Any]) -> Any:
|
||||
"""POST JSON to the /info endpoint."""
|
||||
async with httpx.AsyncClient(timeout=15) as http:
|
||||
resp = await http.post(f"{self.base_url}/info", json=payload)
|
||||
resp.raise_for_status()
|
||||
return resp.json()
|
||||
|
||||
@staticmethod
|
||||
async def _run_sync(func: Any, *args: Any, **kwargs: Any) -> Any:
|
||||
"""Run a synchronous SDK call in the default executor."""
|
||||
loop = asyncio.get_event_loop()
|
||||
return await loop.run_in_executor(None, lambda: func(*args, **kwargs))
|
||||
|
||||
# ── Read tools ─────────────────────────────────────────────
|
||||
|
||||
async def get_markets(self) -> list[dict[str, Any]]:
|
||||
"""List all perp markets with metadata and current stats."""
|
||||
data = await self._post({"type": "metaAndAssetCtxs"})
|
||||
universe = data[0]["universe"]
|
||||
ctx_list = data[1]
|
||||
markets = []
|
||||
for meta, ctx in zip(universe, ctx_list, strict=True):
|
||||
markets.append(
|
||||
{
|
||||
"asset": meta["name"],
|
||||
"mark_price": float(ctx.get("markPx", 0)),
|
||||
"funding_rate": float(ctx.get("funding", 0)),
|
||||
"open_interest": float(ctx.get("openInterest", 0)),
|
||||
"volume_24h": float(ctx.get("dayNtlVlm", 0)),
|
||||
"max_leverage": int(meta.get("maxLeverage", 1)),
|
||||
}
|
||||
)
|
||||
return markets
|
||||
|
||||
async def get_ticker(self, instrument: str) -> dict[str, Any]:
|
||||
"""Get ticker information for a specific asset."""
|
||||
markets = await self.get_markets()
|
||||
for m in markets:
|
||||
if m["asset"].upper() == instrument.upper():
|
||||
return {
|
||||
"asset": m["asset"],
|
||||
"mark_price": m["mark_price"],
|
||||
"mid_price": m["mark_price"],
|
||||
"funding_rate": m["funding_rate"],
|
||||
"open_interest": m["open_interest"],
|
||||
"volume_24h": m["volume_24h"],
|
||||
"premium": 0.0,
|
||||
}
|
||||
return {"error": f"Asset {instrument} not found"}
|
||||
|
||||
async def get_orderbook(self, instrument: str, depth: int = 10) -> dict[str, Any]:
|
||||
"""Get L2 order book for an asset."""
|
||||
data = await self._post({"type": "l2Book", "coin": instrument.upper()})
|
||||
levels = data.get("levels", [[], []])
|
||||
bids = [{"price": float(b["px"]), "size": float(b["sz"])} for b in levels[0][:depth]]
|
||||
asks = [{"price": float(a["px"]), "size": float(a["sz"])} for a in levels[1][:depth]]
|
||||
return {"asset": instrument, "bids": bids, "asks": asks}
|
||||
|
||||
async def get_positions(self) -> list[dict[str, Any]]:
|
||||
"""Get open positions for the wallet."""
|
||||
data = await self._post(
|
||||
{"type": "clearinghouseState", "user": self.wallet_address}
|
||||
)
|
||||
positions = []
|
||||
for ap in data.get("assetPositions", []):
|
||||
pos = ap.get("position", {})
|
||||
size = float(pos.get("szi", 0))
|
||||
if size == 0:
|
||||
continue
|
||||
leverage_data = pos.get("leverage", {})
|
||||
lev_value = (
|
||||
leverage_data.get("value", "1")
|
||||
if isinstance(leverage_data, dict)
|
||||
else str(leverage_data)
|
||||
)
|
||||
positions.append(
|
||||
{
|
||||
"asset": pos.get("coin", ""),
|
||||
"size": abs(size),
|
||||
"direction": "long" if size > 0 else "short",
|
||||
"entry_price": float(pos.get("entryPx", 0) or 0),
|
||||
"unrealized_pnl": float(pos.get("unrealizedPnl", 0)),
|
||||
"leverage": float(lev_value),
|
||||
"liquidation_price": float(pos.get("liquidationPx", 0) or 0),
|
||||
}
|
||||
)
|
||||
return positions
|
||||
|
||||
async def get_account_summary(self) -> dict[str, Any]:
|
||||
"""Get account summary (equity, balance, margin) including spot balances.
|
||||
|
||||
Con Unified Account, spot USDC e perps condividono collaterale.
|
||||
`spot_fetch_ok` / `perps_fetch_ok` indicano se i due lati sono stati
|
||||
letti correttamente: se uno dei due è False il chiamante dovrebbe
|
||||
considerare `equity`/`available_balance` un lower bound.
|
||||
"""
|
||||
perps_fetch_ok = True
|
||||
perps_equity = 0.0
|
||||
perps_available = 0.0
|
||||
margin_used = 0.0
|
||||
unrealized_pnl = 0.0
|
||||
try:
|
||||
data = await self._post(
|
||||
{"type": "clearinghouseState", "user": self.wallet_address}
|
||||
)
|
||||
margin = data.get("marginSummary") or {}
|
||||
perps_equity = float(margin.get("accountValue", 0) or 0)
|
||||
perps_available = float(margin.get("totalRawUsd", 0) or 0)
|
||||
margin_used = float(margin.get("totalMarginUsed", 0) or 0)
|
||||
unrealized_pnl = float(margin.get("totalNtlPos", 0) or 0)
|
||||
except Exception:
|
||||
perps_fetch_ok = False
|
||||
|
||||
spot_fetch_ok = True
|
||||
spot_usdc = 0.0
|
||||
try:
|
||||
spot_data = await self._post(
|
||||
{"type": "spotClearinghouseState", "user": self.wallet_address}
|
||||
)
|
||||
for b in spot_data.get("balances", []) or []:
|
||||
if b.get("coin") == "USDC":
|
||||
spot_usdc = float(b.get("total", 0) or 0)
|
||||
except Exception:
|
||||
spot_fetch_ok = False
|
||||
|
||||
total_equity = perps_equity + spot_usdc
|
||||
total_available = perps_available + spot_usdc
|
||||
return {
|
||||
"equity": total_equity,
|
||||
"perps_equity": perps_equity,
|
||||
"perps_available": perps_available,
|
||||
"spot_usdc": spot_usdc,
|
||||
"available_balance": total_available,
|
||||
"margin_used": margin_used,
|
||||
"unrealized_pnl": unrealized_pnl,
|
||||
"perps_fetch_ok": perps_fetch_ok,
|
||||
"spot_fetch_ok": spot_fetch_ok,
|
||||
}
|
||||
|
||||
async def get_trade_history(self, limit: int = 100) -> list[dict[str, Any]]:
|
||||
"""Get recent trade fills."""
|
||||
data = await self._post({"type": "userFills", "user": self.wallet_address})
|
||||
trades = []
|
||||
for t in data[:limit]:
|
||||
trades.append(
|
||||
{
|
||||
"asset": t.get("coin", ""),
|
||||
"side": t.get("side", ""),
|
||||
"size": float(t.get("sz", 0)),
|
||||
"price": float(t.get("px", 0)),
|
||||
"fee": float(t.get("fee", 0)),
|
||||
"timestamp": t.get("time", ""),
|
||||
}
|
||||
)
|
||||
return trades
|
||||
|
||||
async def get_historical(
|
||||
self, instrument: str, start_date: str, end_date: str, resolution: str = "1h"
|
||||
) -> dict[str, Any]:
|
||||
"""Get OHLCV candles for an asset."""
|
||||
start_ms = _to_ms(start_date)
|
||||
end_ms = _to_ms(end_date)
|
||||
interval = RESOLUTION_MAP.get(resolution, resolution)
|
||||
data = await self._post(
|
||||
{
|
||||
"type": "candleSnapshot",
|
||||
"req": {
|
||||
"coin": instrument.upper(),
|
||||
"interval": interval,
|
||||
"startTime": start_ms,
|
||||
"endTime": end_ms,
|
||||
},
|
||||
}
|
||||
)
|
||||
candles = []
|
||||
for c in data:
|
||||
candles.append(
|
||||
{
|
||||
"timestamp": c.get("t", 0),
|
||||
"open": float(c.get("o", 0)),
|
||||
"high": float(c.get("h", 0)),
|
||||
"low": float(c.get("l", 0)),
|
||||
"close": float(c.get("c", 0)),
|
||||
"volume": float(c.get("v", 0)),
|
||||
}
|
||||
)
|
||||
return {"candles": candles}
|
||||
|
||||
async def get_open_orders(self) -> list[dict[str, Any]]:
|
||||
"""Get all open orders for the wallet."""
|
||||
data = await self._post({"type": "openOrders", "user": self.wallet_address})
|
||||
orders = []
|
||||
for o in data:
|
||||
orders.append(
|
||||
{
|
||||
"oid": o.get("oid"),
|
||||
"asset": o.get("coin", ""),
|
||||
"side": o.get("side", ""),
|
||||
"size": float(o.get("sz", 0)),
|
||||
"price": float(o.get("limitPx", 0)),
|
||||
"order_type": o.get("orderType", ""),
|
||||
}
|
||||
)
|
||||
return orders
|
||||
|
||||
async def basis_spot_perp(self, asset: str) -> dict[str, Any]:
|
||||
asset = asset.upper()
|
||||
# Spot reference price from Coinbase (mainnet reference, anche se HL è testnet)
|
||||
spot_price: float | None = None
|
||||
spot_source = "coinbase"
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=8) as c:
|
||||
resp = await c.get(f"https://api.coinbase.com/v2/prices/{asset}-USD/spot")
|
||||
if resp.status_code == 200:
|
||||
spot_price = float(resp.json().get("data", {}).get("amount"))
|
||||
except Exception:
|
||||
spot_price = None
|
||||
if spot_price is None:
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=8) as c:
|
||||
resp = await c.get(
|
||||
"https://api.kraken.com/0/public/Ticker", params={"pair": f"{asset}USD"}
|
||||
)
|
||||
if resp.status_code == 200:
|
||||
res = resp.json().get("result") or {}
|
||||
first = next(iter(res.values()), {})
|
||||
price = (first.get("c") or [None])[0]
|
||||
spot_price = float(price) if price else None
|
||||
spot_source = "kraken"
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Perp price + funding from HL
|
||||
try:
|
||||
ctx = await self._post({"type": "metaAndAssetCtxs"})
|
||||
universe = ctx[0]["universe"]
|
||||
ctx_list = ctx[1]
|
||||
perp_price = None
|
||||
funding = None
|
||||
for meta, c in zip(universe, ctx_list, strict=True):
|
||||
if meta["name"].upper() == asset:
|
||||
perp_price = float(c.get("markPx", 0))
|
||||
funding = float(c.get("funding", 0))
|
||||
break
|
||||
except Exception:
|
||||
perp_price = None
|
||||
funding = None
|
||||
|
||||
if spot_price is None or perp_price is None:
|
||||
return {
|
||||
"asset": asset,
|
||||
"spot_price": spot_price,
|
||||
"perp_price": perp_price,
|
||||
"error": "missing spot or perp price",
|
||||
}
|
||||
|
||||
basis_abs = perp_price - spot_price
|
||||
basis_pct = round(basis_abs / spot_price * 100, 4)
|
||||
basis_ann_funding = (
|
||||
round(funding * 24 * 365 * 100, 2) if funding is not None else None
|
||||
)
|
||||
carry_opp = bool(
|
||||
basis_ann_funding is not None
|
||||
and basis_ann_funding > 5
|
||||
and (funding or 0) > 0.0001
|
||||
)
|
||||
|
||||
return {
|
||||
"asset": asset,
|
||||
"spot_price": spot_price,
|
||||
"spot_source": spot_source,
|
||||
"perp_price": perp_price,
|
||||
"basis_absolute": round(basis_abs, 4),
|
||||
"basis_pct": basis_pct,
|
||||
"current_funding_hourly": funding,
|
||||
"basis_annualized_funding_only": basis_ann_funding,
|
||||
"carry_opportunity": carry_opp,
|
||||
"testnet": self.testnet,
|
||||
"data_timestamp": _dt.datetime.now(_dt.UTC).isoformat(),
|
||||
}
|
||||
|
||||
async def get_funding_rate(self, instrument: str) -> dict[str, Any]:
|
||||
"""Get current and recent historical funding rates for an asset."""
|
||||
data = await self._post({"type": "metaAndAssetCtxs"})
|
||||
universe = data[0]["universe"]
|
||||
ctx_list = data[1]
|
||||
current_rate = None
|
||||
for meta, ctx in zip(universe, ctx_list, strict=True):
|
||||
if meta["name"].upper() == instrument.upper():
|
||||
current_rate = float(ctx.get("funding", 0))
|
||||
break
|
||||
if current_rate is None:
|
||||
return {"error": f"Asset {instrument} not found"}
|
||||
|
||||
# Fetch funding history (last 7 days)
|
||||
end_ms = int(_dt.datetime.utcnow().timestamp() * 1000)
|
||||
start_ms = end_ms - 7 * 24 * 3600 * 1000
|
||||
history_data = await self._post(
|
||||
{
|
||||
"type": "fundingHistory",
|
||||
"coin": instrument.upper(),
|
||||
"startTime": start_ms,
|
||||
"endTime": end_ms,
|
||||
}
|
||||
)
|
||||
history = []
|
||||
for entry in history_data:
|
||||
history.append(
|
||||
{
|
||||
"timestamp": entry.get("time", 0),
|
||||
"funding_rate": float(entry.get("fundingRate", 0)),
|
||||
}
|
||||
)
|
||||
return {
|
||||
"asset": instrument,
|
||||
"current_funding_rate": current_rate,
|
||||
"history": history,
|
||||
}
|
||||
|
||||
async def get_indicators(
|
||||
self,
|
||||
instrument: str,
|
||||
indicators: list[str],
|
||||
start_date: str,
|
||||
end_date: str,
|
||||
resolution: str = "1h",
|
||||
) -> dict[str, Any]:
|
||||
"""Compute technical indicators over OHLCV data."""
|
||||
historical = await self.get_historical(instrument, start_date, end_date, resolution)
|
||||
candles = historical.get("candles", [])
|
||||
closes = [c["close"] for c in candles]
|
||||
highs = [c["high"] for c in candles]
|
||||
lows = [c["low"] for c in candles]
|
||||
|
||||
result: dict[str, Any] = {}
|
||||
for indicator in indicators:
|
||||
name = indicator.lower()
|
||||
if name == "sma":
|
||||
result["sma"] = ind.sma(closes, 20)
|
||||
elif name == "rsi":
|
||||
result["rsi"] = ind.rsi(closes)
|
||||
elif name == "atr":
|
||||
result["atr"] = ind.atr(highs, lows, closes)
|
||||
elif name == "macd":
|
||||
result["macd"] = ind.macd(closes)
|
||||
elif name == "adx":
|
||||
result["adx"] = ind.adx(highs, lows, closes)
|
||||
else:
|
||||
result[name] = None
|
||||
return result
|
||||
|
||||
# ── Write tools (via SDK) ──────────────────────────────────
|
||||
|
||||
async def place_order(
|
||||
self,
|
||||
instrument: str,
|
||||
side: str,
|
||||
amount: float,
|
||||
type: str = "limit",
|
||||
price: float | None = None,
|
||||
reduce_only: bool = False,
|
||||
) -> dict[str, Any]:
|
||||
"""Place an order on Hyperliquid using the SDK for EIP-712 signing."""
|
||||
exchange = self._get_exchange()
|
||||
is_buy = side.lower() in ("buy", "long")
|
||||
coin = instrument.upper()
|
||||
|
||||
if type == "market":
|
||||
ot: dict[str, Any] = {"limit": {"tif": "Ioc"}}
|
||||
if price is None:
|
||||
ticker = await self.get_ticker(coin)
|
||||
mark = ticker.get("mark_price", 0)
|
||||
price = round(mark * 1.03, 1) if is_buy else round(mark * 0.97, 1)
|
||||
elif type in ("stop_market", "stop_loss"):
|
||||
ot = {"trigger": {"triggerPx": float(price), "isMarket": True, "tpsl": "sl"}}
|
||||
elif type == "take_profit":
|
||||
ot = {"trigger": {"triggerPx": float(price), "isMarket": True, "tpsl": "tp"}}
|
||||
else:
|
||||
ot = {"limit": {"tif": "Gtc"}}
|
||||
|
||||
if price is None:
|
||||
return {"error": "price is required for limit orders"}
|
||||
|
||||
result = await self._run_sync(
|
||||
exchange.order, coin, is_buy, amount, price, ot, reduce_only
|
||||
)
|
||||
|
||||
status = result.get("status", "unknown")
|
||||
response = result.get("response", {})
|
||||
if isinstance(response, str):
|
||||
return {
|
||||
"status": status,
|
||||
"error": response,
|
||||
"order_id": "",
|
||||
"filled_size": 0,
|
||||
"avg_fill_price": 0,
|
||||
}
|
||||
|
||||
statuses = response.get("data", {}).get("statuses", [{}])
|
||||
first = statuses[0] if statuses else {}
|
||||
if isinstance(first, str):
|
||||
return {
|
||||
"status": status,
|
||||
"error": first,
|
||||
"order_id": "",
|
||||
"filled_size": 0,
|
||||
"avg_fill_price": 0,
|
||||
}
|
||||
return {
|
||||
"order_id": first.get("resting", {}).get(
|
||||
"oid", first.get("filled", {}).get("oid", "")
|
||||
),
|
||||
"status": status,
|
||||
"filled_size": float(first.get("filled", {}).get("totalSz", 0)),
|
||||
"avg_fill_price": float(first.get("filled", {}).get("avgPx", 0)),
|
||||
}
|
||||
|
||||
async def cancel_order(self, order_id: str, instrument: str) -> dict[str, Any]:
|
||||
"""Cancel an existing order using the SDK."""
|
||||
exchange = self._get_exchange()
|
||||
result = await self._run_sync(
|
||||
exchange.cancel, instrument.upper(), int(order_id)
|
||||
)
|
||||
status = result.get("status", "unknown")
|
||||
response = result.get("response", "")
|
||||
if isinstance(response, str) and status == "err":
|
||||
return {"order_id": order_id, "status": status, "error": response}
|
||||
return {"order_id": order_id, "status": status}
|
||||
|
||||
async def set_stop_loss(
|
||||
self, instrument: str, stop_price: float, size: float
|
||||
) -> dict[str, Any]:
|
||||
"""Set a stop-loss trigger order."""
|
||||
# Determine direction by checking open position
|
||||
positions = await self.get_positions()
|
||||
direction = "sell" # default: assume long
|
||||
for pos in positions:
|
||||
if pos["asset"].upper() == instrument.upper():
|
||||
direction = "sell" if pos["direction"] == "long" else "buy"
|
||||
if size == 0:
|
||||
size = pos["size"]
|
||||
break
|
||||
return await self.place_order(
|
||||
instrument=instrument,
|
||||
side=direction,
|
||||
amount=size,
|
||||
type="stop_loss",
|
||||
price=stop_price,
|
||||
reduce_only=True,
|
||||
)
|
||||
|
||||
async def set_take_profit(
|
||||
self, instrument: str, tp_price: float, size: float
|
||||
) -> dict[str, Any]:
|
||||
"""Set a take-profit trigger order."""
|
||||
positions = await self.get_positions()
|
||||
direction = "sell" # default: assume long
|
||||
for pos in positions:
|
||||
if pos["asset"].upper() == instrument.upper():
|
||||
direction = "sell" if pos["direction"] == "long" else "buy"
|
||||
if size == 0:
|
||||
size = pos["size"]
|
||||
break
|
||||
return await self.place_order(
|
||||
instrument=instrument,
|
||||
side=direction,
|
||||
amount=size,
|
||||
type="take_profit",
|
||||
price=tp_price,
|
||||
reduce_only=True,
|
||||
)
|
||||
|
||||
async def close_position(self, instrument: str) -> dict[str, Any]:
|
||||
"""Close an open position for the given asset using market_close."""
|
||||
exchange = self._get_exchange()
|
||||
try:
|
||||
result = await self._run_sync(exchange.market_close, instrument.upper())
|
||||
return {
|
||||
"status": result.get("status", "unknown"),
|
||||
"asset": instrument,
|
||||
}
|
||||
except Exception as exc:
|
||||
return {"error": str(exc), "asset": instrument}
|
||||
|
||||
async def health(self) -> dict[str, Any]:
|
||||
"""Health check — ping /info for server status."""
|
||||
try:
|
||||
await self._post({"type": "meta"})
|
||||
return {"status": "ok", "testnet": self.testnet}
|
||||
except Exception as exc:
|
||||
return {"status": "error", "error": str(exc)}
|
||||
@@ -0,0 +1,383 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
|
||||
from fastapi import Depends, FastAPI, HTTPException
|
||||
from option_mcp_common.auth import Principal, TokenStore, require_principal
|
||||
from option_mcp_common.mcp_bridge import mount_mcp_endpoint
|
||||
from option_mcp_common.risk_guard import (
|
||||
enforce_aggregate,
|
||||
enforce_leverage,
|
||||
enforce_single_notional,
|
||||
)
|
||||
from option_mcp_common.server import build_app
|
||||
from pydantic import BaseModel, field_validator, model_validator
|
||||
|
||||
from mcp_hyperliquid.client import HyperliquidClient
|
||||
|
||||
# --- Body models ---
|
||||
|
||||
class GetMarketsReq(BaseModel):
|
||||
pass
|
||||
|
||||
|
||||
class GetTickerReq(BaseModel):
|
||||
instrument: str
|
||||
|
||||
|
||||
class GetOrderbookReq(BaseModel):
|
||||
instrument: str
|
||||
depth: int = 10
|
||||
|
||||
|
||||
class GetPositionsReq(BaseModel):
|
||||
pass
|
||||
|
||||
|
||||
class GetAccountSummaryReq(BaseModel):
|
||||
pass
|
||||
|
||||
|
||||
class GetTradeHistoryReq(BaseModel):
|
||||
limit: int = 100
|
||||
|
||||
|
||||
class GetHistoricalReq(BaseModel):
|
||||
instrument: str | None = None
|
||||
asset: str | None = None
|
||||
start_date: str | None = None
|
||||
end_date: str | None = None
|
||||
resolution: str = "1h"
|
||||
interval: str | None = None
|
||||
limit: int = 50
|
||||
|
||||
model_config = {"extra": "allow"}
|
||||
|
||||
@model_validator(mode="after")
|
||||
def _normalize(self):
|
||||
from datetime import UTC, datetime, timedelta
|
||||
sym = self.instrument or self.asset
|
||||
if not sym:
|
||||
raise ValueError("instrument (or asset) is required")
|
||||
self.instrument = sym
|
||||
if self.interval:
|
||||
self.resolution = self.interval
|
||||
if not self.end_date:
|
||||
self.end_date = datetime.now(UTC).strftime("%Y-%m-%dT%H:%M:%S")
|
||||
if not self.start_date:
|
||||
days = max(1, self.limit // 6)
|
||||
self.start_date = (
|
||||
datetime.now(UTC) - timedelta(days=days)
|
||||
).strftime("%Y-%m-%dT%H:%M:%S")
|
||||
return self
|
||||
|
||||
|
||||
class GetOpenOrdersReq(BaseModel):
|
||||
pass
|
||||
|
||||
|
||||
class GetFundingRateReq(BaseModel):
|
||||
instrument: str
|
||||
|
||||
|
||||
class BasisSpotPerpReq(BaseModel):
|
||||
asset: str
|
||||
|
||||
|
||||
class GetIndicatorsReq(BaseModel):
|
||||
instrument: str | None = None
|
||||
asset: str | None = None
|
||||
indicators: list[str] = ["rsi", "atr", "macd", "adx"]
|
||||
start_date: str | None = None
|
||||
end_date: str | None = None
|
||||
resolution: str = "1h"
|
||||
interval: str | None = None
|
||||
limit: int = 50
|
||||
|
||||
model_config = {"extra": "allow"}
|
||||
|
||||
@model_validator(mode="after")
|
||||
def _normalize(self):
|
||||
from datetime import UTC, datetime, timedelta
|
||||
sym = self.instrument or self.asset
|
||||
if not sym:
|
||||
raise ValueError("instrument (or asset) is required")
|
||||
self.instrument = sym
|
||||
if self.interval:
|
||||
self.resolution = self.interval
|
||||
if not self.end_date:
|
||||
self.end_date = datetime.now(UTC).strftime("%Y-%m-%dT%H:%M:%S")
|
||||
if not self.start_date:
|
||||
days = max(2, self.limit // 6)
|
||||
self.start_date = (
|
||||
datetime.now(UTC) - timedelta(days=days)
|
||||
).strftime("%Y-%m-%dT%H:%M:%S")
|
||||
return self
|
||||
|
||||
@field_validator("indicators", mode="before")
|
||||
@classmethod
|
||||
def _coerce_indicators(cls, v):
|
||||
if isinstance(v, str):
|
||||
import json
|
||||
s = v.strip()
|
||||
if s.startswith("["):
|
||||
try:
|
||||
parsed = json.loads(s)
|
||||
if isinstance(parsed, list):
|
||||
return [str(x).strip() for x in parsed if str(x).strip()]
|
||||
except json.JSONDecodeError:
|
||||
pass
|
||||
return [x.strip() for x in s.split(",") if x.strip()]
|
||||
if isinstance(v, list):
|
||||
return v
|
||||
raise ValueError(
|
||||
"indicators must be a list like ['rsi','atr','macd'] "
|
||||
"or a comma-separated string like 'rsi,atr,macd'"
|
||||
)
|
||||
|
||||
|
||||
class PlaceOrderReq(BaseModel):
|
||||
instrument: str
|
||||
side: str # "buy" | "sell"
|
||||
amount: float
|
||||
type: str = "limit"
|
||||
price: float | None = None
|
||||
reduce_only: bool = False
|
||||
leverage: int | None = None # CER-016: None → default cap (3x)
|
||||
|
||||
|
||||
class CancelOrderReq(BaseModel):
|
||||
order_id: str
|
||||
instrument: str
|
||||
|
||||
|
||||
class SetStopLossReq(BaseModel):
|
||||
instrument: str
|
||||
stop_price: float
|
||||
size: float
|
||||
|
||||
|
||||
class SetTakeProfitReq(BaseModel):
|
||||
instrument: str
|
||||
tp_price: float
|
||||
size: float
|
||||
|
||||
|
||||
class ClosePositionReq(BaseModel):
|
||||
instrument: str
|
||||
|
||||
|
||||
# --- CER-016 notional helpers ---
|
||||
|
||||
async def _compute_notional_hl(client: HyperliquidClient, body: PlaceOrderReq) -> float:
|
||||
"""HL perp: amount è in base asset → notional = amount * price."""
|
||||
ref_price: float | None = body.price
|
||||
if ref_price is None:
|
||||
try:
|
||||
tk = await client.get_ticker(body.instrument)
|
||||
ref_price = tk.get("mark_price") or tk.get("last_price") or tk.get("price")
|
||||
except Exception:
|
||||
ref_price = None
|
||||
if not ref_price:
|
||||
return 0.0
|
||||
return float(body.amount) * float(ref_price)
|
||||
|
||||
|
||||
async def _current_aggregate_hl(client: HyperliquidClient) -> float:
|
||||
try:
|
||||
positions = await client.get_positions()
|
||||
except Exception:
|
||||
return 0.0
|
||||
total = 0.0
|
||||
for p in positions or []:
|
||||
size = abs(float(p.get("size") or p.get("amount") or 0))
|
||||
mark = float(p.get("mark_price") or p.get("price") or 0)
|
||||
total += size * mark
|
||||
return total
|
||||
|
||||
|
||||
# --- ACL helper ---
|
||||
|
||||
def _check(principal: Principal, *, core: bool = False, observer: bool = False) -> None:
|
||||
allowed: set[str] = set()
|
||||
if core:
|
||||
allowed.add("core")
|
||||
if observer:
|
||||
allowed.add("observer")
|
||||
if not (principal.capabilities & allowed):
|
||||
raise HTTPException(403, f"capability required: {allowed}")
|
||||
|
||||
|
||||
# --- App factory ---
|
||||
|
||||
def create_app(*, client: HyperliquidClient, token_store: TokenStore) -> FastAPI:
|
||||
app = build_app(name="mcp-hyperliquid", version="0.1.0", token_store=token_store)
|
||||
|
||||
# --- Read tools: core + observer ---
|
||||
|
||||
@app.post("/tools/get_markets", tags=["reads"])
|
||||
async def t_get_markets(
|
||||
body: GetMarketsReq, principal: Principal = Depends(require_principal)
|
||||
):
|
||||
_check(principal, core=True, observer=True)
|
||||
return await client.get_markets()
|
||||
|
||||
@app.post("/tools/get_ticker", tags=["reads"])
|
||||
async def t_get_ticker(
|
||||
body: GetTickerReq, principal: Principal = Depends(require_principal)
|
||||
):
|
||||
_check(principal, core=True, observer=True)
|
||||
return await client.get_ticker(body.instrument)
|
||||
|
||||
@app.post("/tools/get_orderbook", tags=["reads"])
|
||||
async def t_get_orderbook(
|
||||
body: GetOrderbookReq, principal: Principal = Depends(require_principal)
|
||||
):
|
||||
_check(principal, core=True, observer=True)
|
||||
return await client.get_orderbook(body.instrument, body.depth)
|
||||
|
||||
@app.post("/tools/get_positions", tags=["reads"])
|
||||
async def t_get_positions(
|
||||
body: GetPositionsReq, principal: Principal = Depends(require_principal)
|
||||
):
|
||||
_check(principal, core=True, observer=True)
|
||||
return await client.get_positions()
|
||||
|
||||
@app.post("/tools/get_account_summary", tags=["reads"])
|
||||
async def t_get_account_summary(
|
||||
body: GetAccountSummaryReq, principal: Principal = Depends(require_principal)
|
||||
):
|
||||
_check(principal, core=True, observer=True)
|
||||
return await client.get_account_summary()
|
||||
|
||||
@app.post("/tools/get_trade_history", tags=["reads"])
|
||||
async def t_get_trade_history(
|
||||
body: GetTradeHistoryReq, principal: Principal = Depends(require_principal)
|
||||
):
|
||||
_check(principal, core=True, observer=True)
|
||||
return await client.get_trade_history(body.limit)
|
||||
|
||||
@app.post("/tools/get_historical", tags=["reads"])
|
||||
async def t_get_historical(
|
||||
body: GetHistoricalReq, principal: Principal = Depends(require_principal)
|
||||
):
|
||||
_check(principal, core=True, observer=True)
|
||||
return await client.get_historical(
|
||||
body.instrument, body.start_date, body.end_date, body.resolution
|
||||
)
|
||||
|
||||
@app.post("/tools/get_open_orders", tags=["reads"])
|
||||
async def t_get_open_orders(
|
||||
body: GetOpenOrdersReq, principal: Principal = Depends(require_principal)
|
||||
):
|
||||
_check(principal, core=True, observer=True)
|
||||
return await client.get_open_orders()
|
||||
|
||||
@app.post("/tools/get_funding_rate", tags=["reads"])
|
||||
async def t_get_funding_rate(
|
||||
body: GetFundingRateReq, principal: Principal = Depends(require_principal)
|
||||
):
|
||||
_check(principal, core=True, observer=True)
|
||||
return await client.get_funding_rate(body.instrument)
|
||||
|
||||
@app.post("/tools/basis_spot_perp", tags=["writes"])
|
||||
async def t_basis_spot_perp(
|
||||
body: BasisSpotPerpReq, principal: Principal = Depends(require_principal)
|
||||
):
|
||||
_check(principal, core=True, observer=True)
|
||||
return await client.basis_spot_perp(body.asset)
|
||||
|
||||
@app.post("/tools/get_indicators", tags=["reads"])
|
||||
async def t_get_indicators(
|
||||
body: GetIndicatorsReq, principal: Principal = Depends(require_principal)
|
||||
):
|
||||
_check(principal, core=True, observer=True)
|
||||
return await client.get_indicators(
|
||||
body.instrument,
|
||||
body.indicators,
|
||||
body.start_date,
|
||||
body.end_date,
|
||||
body.resolution,
|
||||
)
|
||||
|
||||
# --- Write tools: core only ---
|
||||
|
||||
@app.post("/tools/place_order", tags=["writes"])
|
||||
async def t_place_order(
|
||||
body: PlaceOrderReq, principal: Principal = Depends(require_principal)
|
||||
):
|
||||
_check(principal, core=True)
|
||||
enforce_leverage(body.leverage)
|
||||
if not body.reduce_only:
|
||||
notional = await _compute_notional_hl(client, body)
|
||||
enforce_single_notional(
|
||||
notional, exchange="hyperliquid", instrument=body.instrument
|
||||
)
|
||||
agg = await _current_aggregate_hl(client)
|
||||
enforce_aggregate(agg, notional)
|
||||
return await client.place_order(
|
||||
instrument=body.instrument,
|
||||
side=body.side,
|
||||
amount=body.amount,
|
||||
type=body.type,
|
||||
price=body.price,
|
||||
reduce_only=body.reduce_only,
|
||||
)
|
||||
|
||||
@app.post("/tools/cancel_order", tags=["writes"])
|
||||
async def t_cancel_order(
|
||||
body: CancelOrderReq, principal: Principal = Depends(require_principal)
|
||||
):
|
||||
_check(principal, core=True)
|
||||
return await client.cancel_order(body.order_id, body.instrument)
|
||||
|
||||
@app.post("/tools/set_stop_loss", tags=["writes"])
|
||||
async def t_set_sl(
|
||||
body: SetStopLossReq, principal: Principal = Depends(require_principal)
|
||||
):
|
||||
_check(principal, core=True)
|
||||
return await client.set_stop_loss(body.instrument, body.stop_price, body.size)
|
||||
|
||||
@app.post("/tools/set_take_profit", tags=["writes"])
|
||||
async def t_set_tp(
|
||||
body: SetTakeProfitReq, principal: Principal = Depends(require_principal)
|
||||
):
|
||||
_check(principal, core=True)
|
||||
return await client.set_take_profit(body.instrument, body.tp_price, body.size)
|
||||
|
||||
@app.post("/tools/close_position", tags=["writes"])
|
||||
async def t_close_position(
|
||||
body: ClosePositionReq, principal: Principal = Depends(require_principal)
|
||||
):
|
||||
_check(principal, core=True)
|
||||
return await client.close_position(body.instrument)
|
||||
|
||||
# ───── MCP endpoint (/mcp) — bridge verso /tools/* ─────
|
||||
port = int(os.environ.get("PORT", "9012"))
|
||||
mount_mcp_endpoint(
|
||||
app,
|
||||
name="cerbero-hyperliquid",
|
||||
version="0.1.0",
|
||||
token_store=token_store,
|
||||
internal_base_url=f"http://localhost:{port}",
|
||||
tools=[
|
||||
{"name": "get_markets", "description": "Lista mercati perp disponibili."},
|
||||
{"name": "get_ticker", "description": "Ticker di un perp."},
|
||||
{"name": "get_orderbook", "description": "Orderbook L2."},
|
||||
{"name": "get_positions", "description": "Posizioni aperte."},
|
||||
{"name": "get_account_summary", "description": "Account summary (spot + perp equity)."},
|
||||
{"name": "get_trade_history", "description": "Storia trade."},
|
||||
{"name": "get_historical", "description": "OHLCV storico."},
|
||||
{"name": "get_open_orders", "description": "Ordini aperti."},
|
||||
{"name": "get_funding_rate", "description": "Funding rate corrente per simbolo."},
|
||||
{"name": "basis_spot_perp", "description": "Basis spot-perp annualizzato + carry opportunity detection."},
|
||||
{"name": "get_indicators", "description": "Indicatori tecnici."},
|
||||
{"name": "place_order", "description": "Invia ordine (CORE only)."},
|
||||
{"name": "cancel_order", "description": "Cancella ordine."},
|
||||
{"name": "set_stop_loss", "description": "Stop loss su posizione."},
|
||||
{"name": "set_take_profit", "description": "Take profit su posizione."},
|
||||
{"name": "close_position", "description": "Chiude posizione."},
|
||||
],
|
||||
)
|
||||
|
||||
return app
|
||||
@@ -0,0 +1,227 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import re
|
||||
|
||||
import pytest
|
||||
from mcp_hyperliquid.client import HyperliquidClient
|
||||
from pytest_httpx import HTTPXMock
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def client():
|
||||
return HyperliquidClient(
|
||||
wallet_address="0xDeadBeef",
|
||||
private_key="0x" + "a" * 64,
|
||||
testnet=True,
|
||||
)
|
||||
|
||||
|
||||
# Shared mock responses
|
||||
|
||||
META_AND_CTX = [
|
||||
{
|
||||
"universe": [
|
||||
{"name": "BTC", "maxLeverage": 50},
|
||||
{"name": "ETH", "maxLeverage": 25},
|
||||
]
|
||||
},
|
||||
[
|
||||
{
|
||||
"markPx": "50000.0",
|
||||
"funding": "0.0001",
|
||||
"openInterest": "1000.0",
|
||||
"dayNtlVlm": "500000.0",
|
||||
},
|
||||
{
|
||||
"markPx": "3000.0",
|
||||
"funding": "0.00005",
|
||||
"openInterest": "500.0",
|
||||
"dayNtlVlm": "200000.0",
|
||||
},
|
||||
],
|
||||
]
|
||||
|
||||
CLEARINGHOUSE_STATE = {
|
||||
"marginSummary": {
|
||||
"accountValue": "1500.0",
|
||||
"totalRawUsd": "1200.0",
|
||||
"totalMarginUsed": "300.0",
|
||||
"totalNtlPos": "50.0",
|
||||
},
|
||||
"assetPositions": [
|
||||
{
|
||||
"position": {
|
||||
"coin": "BTC",
|
||||
"szi": "0.1",
|
||||
"entryPx": "48000.0",
|
||||
"unrealizedPnl": "200.0",
|
||||
"leverage": {"value": "10"},
|
||||
"liquidationPx": "40000.0",
|
||||
}
|
||||
}
|
||||
],
|
||||
}
|
||||
|
||||
SPOT_STATE = {"balances": [{"coin": "USDC", "total": "500.0"}]}
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_markets(httpx_mock: HTTPXMock, client: HyperliquidClient):
|
||||
httpx_mock.add_response(
|
||||
url=re.compile(r"https://api\.hyperliquid-testnet\.xyz/info"),
|
||||
json=META_AND_CTX,
|
||||
)
|
||||
markets = await client.get_markets()
|
||||
assert len(markets) == 2
|
||||
assert markets[0]["asset"] == "BTC"
|
||||
assert markets[0]["mark_price"] == 50000.0
|
||||
assert markets[0]["max_leverage"] == 50
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_ticker(httpx_mock: HTTPXMock, client: HyperliquidClient):
|
||||
httpx_mock.add_response(
|
||||
url=re.compile(r"https://api\.hyperliquid-testnet\.xyz/info"),
|
||||
json=META_AND_CTX,
|
||||
)
|
||||
result = await client.get_ticker("BTC")
|
||||
assert result["asset"] == "BTC"
|
||||
assert result["mark_price"] == 50000.0
|
||||
assert result["funding_rate"] == 0.0001
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_ticker_not_found(httpx_mock: HTTPXMock, client: HyperliquidClient):
|
||||
httpx_mock.add_response(
|
||||
url=re.compile(r"https://api\.hyperliquid-testnet\.xyz/info"),
|
||||
json=META_AND_CTX,
|
||||
)
|
||||
result = await client.get_ticker("SOL")
|
||||
assert "error" in result
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_orderbook(httpx_mock: HTTPXMock, client: HyperliquidClient):
|
||||
httpx_mock.add_response(
|
||||
url=re.compile(r"https://api\.hyperliquid-testnet\.xyz/info"),
|
||||
json={
|
||||
"levels": [
|
||||
[{"px": "49990.0", "sz": "0.5"}, {"px": "49980.0", "sz": "1.0"}],
|
||||
[{"px": "50010.0", "sz": "0.3"}, {"px": "50020.0", "sz": "0.8"}],
|
||||
]
|
||||
},
|
||||
)
|
||||
result = await client.get_orderbook("BTC", depth=2)
|
||||
assert result["asset"] == "BTC"
|
||||
assert len(result["bids"]) == 2
|
||||
assert len(result["asks"]) == 2
|
||||
assert result["bids"][0]["price"] == 49990.0
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_positions(httpx_mock: HTTPXMock, client: HyperliquidClient):
|
||||
httpx_mock.add_response(
|
||||
url=re.compile(r"https://api\.hyperliquid-testnet\.xyz/info"),
|
||||
json=CLEARINGHOUSE_STATE,
|
||||
)
|
||||
positions = await client.get_positions()
|
||||
assert len(positions) == 1
|
||||
assert positions[0]["asset"] == "BTC"
|
||||
assert positions[0]["direction"] == "long"
|
||||
assert positions[0]["size"] == 0.1
|
||||
assert positions[0]["leverage"] == 10.0
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_account_summary(httpx_mock: HTTPXMock, client: HyperliquidClient):
|
||||
# get_account_summary calls /info twice (perp + spot)
|
||||
httpx_mock.add_response(
|
||||
url=re.compile(r"https://api\.hyperliquid-testnet\.xyz/info"),
|
||||
json=CLEARINGHOUSE_STATE,
|
||||
)
|
||||
httpx_mock.add_response(
|
||||
url=re.compile(r"https://api\.hyperliquid-testnet\.xyz/info"),
|
||||
json=SPOT_STATE,
|
||||
)
|
||||
result = await client.get_account_summary()
|
||||
assert result["perps_equity"] == 1500.0
|
||||
assert result["spot_usdc"] == 500.0
|
||||
assert result["equity"] == 2000.0
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_trade_history(httpx_mock: HTTPXMock, client: HyperliquidClient):
|
||||
httpx_mock.add_response(
|
||||
url=re.compile(r"https://api\.hyperliquid-testnet\.xyz/info"),
|
||||
json=[
|
||||
{"coin": "BTC", "side": "B", "sz": "0.1", "px": "50000", "fee": "0.5", "time": 1000},
|
||||
{"coin": "ETH", "side": "A", "sz": "1.0", "px": "3000", "fee": "0.3", "time": 2000},
|
||||
],
|
||||
)
|
||||
trades = await client.get_trade_history(limit=10)
|
||||
assert len(trades) == 2
|
||||
assert trades[0]["asset"] == "BTC"
|
||||
assert trades[0]["price"] == 50000.0
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_open_orders(httpx_mock: HTTPXMock, client: HyperliquidClient):
|
||||
httpx_mock.add_response(
|
||||
url=re.compile(r"https://api\.hyperliquid-testnet\.xyz/info"),
|
||||
json=[
|
||||
{
|
||||
"oid": 12345,
|
||||
"coin": "BTC",
|
||||
"side": "B",
|
||||
"sz": "0.05",
|
||||
"limitPx": "49000",
|
||||
"orderType": "Limit",
|
||||
}
|
||||
],
|
||||
)
|
||||
orders = await client.get_open_orders()
|
||||
assert len(orders) == 1
|
||||
assert orders[0]["oid"] == 12345
|
||||
assert orders[0]["asset"] == "BTC"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_historical(httpx_mock: HTTPXMock, client: HyperliquidClient):
|
||||
httpx_mock.add_response(
|
||||
url=re.compile(r"https://api\.hyperliquid-testnet\.xyz/info"),
|
||||
json=[
|
||||
{"t": 1000000, "o": "49000", "h": "51000", "l": "48500", "c": "50000", "v": "100"},
|
||||
],
|
||||
)
|
||||
result = await client.get_historical("BTC", "2024-01-01", "2024-01-02", "1h")
|
||||
assert len(result["candles"]) == 1
|
||||
assert result["candles"][0]["close"] == 50000.0
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_health_ok(httpx_mock: HTTPXMock, client: HyperliquidClient):
|
||||
httpx_mock.add_response(
|
||||
url=re.compile(r"https://api\.hyperliquid-testnet\.xyz/info"),
|
||||
json={"universe": []},
|
||||
)
|
||||
result = await client.health()
|
||||
assert result["status"] in ("ok", "healthy")
|
||||
assert result["testnet"] is True
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_place_order_sdk_unavailable(client: HyperliquidClient):
|
||||
"""place_order raises RuntimeError when SDK is not available (mocked)."""
|
||||
import mcp_hyperliquid.client as mod
|
||||
|
||||
original = mod._SDK_AVAILABLE
|
||||
mod._SDK_AVAILABLE = False
|
||||
client._exchange = None
|
||||
try:
|
||||
result = await client.place_order("BTC", "buy", 0.1, price=50000.0)
|
||||
# Should return error dict or raise RuntimeError
|
||||
assert "error" in result or result.get("status") == "error"
|
||||
except RuntimeError as exc:
|
||||
assert "not installed" in str(exc).lower() or "sdk" in str(exc).lower()
|
||||
finally:
|
||||
mod._SDK_AVAILABLE = original
|
||||
@@ -0,0 +1,217 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from unittest.mock import AsyncMock, MagicMock
|
||||
|
||||
import pytest
|
||||
from fastapi.testclient import TestClient
|
||||
from mcp_hyperliquid.server import create_app
|
||||
from option_mcp_common.auth import Principal, TokenStore
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_client():
|
||||
c = MagicMock()
|
||||
c.get_markets = AsyncMock(return_value=[{"asset": "BTC", "mark_price": 50000}])
|
||||
c.get_ticker = AsyncMock(return_value={"asset": "BTC", "mark_price": 50000})
|
||||
c.get_orderbook = AsyncMock(return_value={"bids": [], "asks": []})
|
||||
c.get_positions = AsyncMock(return_value=[])
|
||||
c.get_account_summary = AsyncMock(return_value={"equity": 1500, "perps_equity": 1000})
|
||||
c.get_trade_history = AsyncMock(return_value=[])
|
||||
c.get_historical = AsyncMock(return_value={"candles": []})
|
||||
c.get_open_orders = AsyncMock(return_value=[])
|
||||
c.get_funding_rate = AsyncMock(return_value={"asset": "BTC", "current_funding_rate": 0.0001})
|
||||
c.get_indicators = AsyncMock(return_value={"rsi": 55.0})
|
||||
c.place_order = AsyncMock(return_value={"order_id": "x", "status": "ok"})
|
||||
c.cancel_order = AsyncMock(return_value={"order_id": "x", "status": "ok"})
|
||||
c.set_stop_loss = AsyncMock(return_value={"order_id": "x", "status": "ok"})
|
||||
c.set_take_profit = AsyncMock(return_value={"order_id": "x", "status": "ok"})
|
||||
c.close_position = AsyncMock(return_value={"status": "ok", "asset": "BTC"})
|
||||
return c
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def http(mock_client):
|
||||
store = TokenStore(
|
||||
tokens={
|
||||
"ct": Principal("core", {"core"}),
|
||||
"ot": Principal("observer", {"observer"}),
|
||||
}
|
||||
)
|
||||
app = create_app(client=mock_client, token_store=store)
|
||||
return TestClient(app)
|
||||
|
||||
|
||||
# --- Health ---
|
||||
|
||||
def test_health(http):
|
||||
assert http.get("/health").status_code == 200
|
||||
|
||||
|
||||
# --- Read tools: both core and observer allowed ---
|
||||
|
||||
def test_get_markets_core_ok(http):
|
||||
r = http.post("/tools/get_markets", headers={"Authorization": "Bearer ct"}, json={})
|
||||
assert r.status_code == 200
|
||||
|
||||
|
||||
def test_get_markets_observer_ok(http):
|
||||
r = http.post("/tools/get_markets", headers={"Authorization": "Bearer ot"}, json={})
|
||||
assert r.status_code == 200
|
||||
|
||||
|
||||
def test_get_ticker_core_ok(http):
|
||||
r = http.post(
|
||||
"/tools/get_ticker",
|
||||
headers={"Authorization": "Bearer ct"},
|
||||
json={"instrument": "BTC"},
|
||||
)
|
||||
assert r.status_code == 200
|
||||
assert r.json()["mark_price"] == 50000
|
||||
|
||||
|
||||
def test_get_ticker_observer_ok(http):
|
||||
r = http.post(
|
||||
"/tools/get_ticker",
|
||||
headers={"Authorization": "Bearer ot"},
|
||||
json={"instrument": "BTC"},
|
||||
)
|
||||
assert r.status_code == 200
|
||||
|
||||
|
||||
def test_get_ticker_no_auth_401(http):
|
||||
r = http.post("/tools/get_ticker", json={"instrument": "BTC"})
|
||||
assert r.status_code == 401
|
||||
|
||||
|
||||
def test_get_account_summary_observer_ok(http):
|
||||
r = http.post(
|
||||
"/tools/get_account_summary",
|
||||
headers={"Authorization": "Bearer ot"},
|
||||
json={},
|
||||
)
|
||||
assert r.status_code == 200
|
||||
assert r.json()["equity"] == 1500
|
||||
|
||||
|
||||
def test_get_funding_rate_observer_ok(http):
|
||||
r = http.post(
|
||||
"/tools/get_funding_rate",
|
||||
headers={"Authorization": "Bearer ot"},
|
||||
json={"instrument": "BTC"},
|
||||
)
|
||||
assert r.status_code == 200
|
||||
|
||||
|
||||
def test_get_positions_no_auth_401(http):
|
||||
r = http.post("/tools/get_positions", json={})
|
||||
assert r.status_code == 401
|
||||
|
||||
|
||||
# --- Write tools: core only ---
|
||||
|
||||
def test_place_order_core_ok(http):
|
||||
# CER-016: amount * price = 150 < cap 200
|
||||
r = http.post(
|
||||
"/tools/place_order",
|
||||
headers={"Authorization": "Bearer ct"},
|
||||
json={"instrument": "BTC", "side": "buy", "amount": 0.003, "price": 50000},
|
||||
)
|
||||
assert r.status_code == 200
|
||||
|
||||
|
||||
def test_place_order_observer_forbidden(http):
|
||||
r = http.post(
|
||||
"/tools/place_order",
|
||||
headers={"Authorization": "Bearer ot"},
|
||||
json={"instrument": "BTC", "side": "buy", "amount": 0.001, "price": 50000},
|
||||
)
|
||||
assert r.status_code == 403
|
||||
|
||||
|
||||
def test_place_order_notional_cap_enforced(http):
|
||||
"""CER-016: HL reject amount*price > 200."""
|
||||
r = http.post(
|
||||
"/tools/place_order",
|
||||
headers={"Authorization": "Bearer ct"},
|
||||
json={"instrument": "ETH", "side": "buy", "amount": 0.1, "price": 3350},
|
||||
)
|
||||
assert r.status_code == 403
|
||||
assert r.json()["error"]["code"] == "HARD_PROHIBITION"
|
||||
|
||||
|
||||
def test_place_order_leverage_cap_enforced_hl(http):
|
||||
r = http.post(
|
||||
"/tools/place_order",
|
||||
headers={"Authorization": "Bearer ct"},
|
||||
json={
|
||||
"instrument": "BTC",
|
||||
"side": "buy",
|
||||
"amount": 0.001,
|
||||
"price": 50000,
|
||||
"leverage": 10,
|
||||
},
|
||||
)
|
||||
assert r.status_code == 403
|
||||
|
||||
|
||||
def test_cancel_order_core_ok(http):
|
||||
r = http.post(
|
||||
"/tools/cancel_order",
|
||||
headers={"Authorization": "Bearer ct"},
|
||||
json={"order_id": "123", "instrument": "BTC"},
|
||||
)
|
||||
assert r.status_code == 200
|
||||
|
||||
|
||||
def test_cancel_order_observer_forbidden(http):
|
||||
r = http.post(
|
||||
"/tools/cancel_order",
|
||||
headers={"Authorization": "Bearer ot"},
|
||||
json={"order_id": "123", "instrument": "BTC"},
|
||||
)
|
||||
assert r.status_code == 403
|
||||
|
||||
|
||||
def test_set_stop_loss_core_ok(http):
|
||||
r = http.post(
|
||||
"/tools/set_stop_loss",
|
||||
headers={"Authorization": "Bearer ct"},
|
||||
json={"instrument": "BTC", "stop_price": 45000.0, "size": 0.1},
|
||||
)
|
||||
assert r.status_code == 200
|
||||
|
||||
|
||||
def test_set_stop_loss_observer_forbidden(http):
|
||||
r = http.post(
|
||||
"/tools/set_stop_loss",
|
||||
headers={"Authorization": "Bearer ot"},
|
||||
json={"instrument": "BTC", "stop_price": 45000.0, "size": 0.1},
|
||||
)
|
||||
assert r.status_code == 403
|
||||
|
||||
|
||||
def test_set_take_profit_observer_forbidden(http):
|
||||
r = http.post(
|
||||
"/tools/set_take_profit",
|
||||
headers={"Authorization": "Bearer ot"},
|
||||
json={"instrument": "BTC", "tp_price": 55000.0, "size": 0.1},
|
||||
)
|
||||
assert r.status_code == 403
|
||||
|
||||
|
||||
def test_close_position_core_ok(http):
|
||||
r = http.post(
|
||||
"/tools/close_position",
|
||||
headers={"Authorization": "Bearer ct"},
|
||||
json={"instrument": "BTC"},
|
||||
)
|
||||
assert r.status_code == 200
|
||||
|
||||
|
||||
def test_close_position_observer_forbidden(http):
|
||||
r = http.post(
|
||||
"/tools/close_position",
|
||||
headers={"Authorization": "Bearer ot"},
|
||||
json={"instrument": "BTC"},
|
||||
)
|
||||
assert r.status_code == 403
|
||||
@@ -0,0 +1,27 @@
|
||||
[project]
|
||||
name = "mcp-macro"
|
||||
version = "0.1.0"
|
||||
requires-python = ">=3.11"
|
||||
dependencies = [
|
||||
"option-mcp-common",
|
||||
"fastapi>=0.115",
|
||||
"uvicorn[standard]>=0.30",
|
||||
"httpx>=0.27",
|
||||
"pydantic>=2.6",
|
||||
]
|
||||
|
||||
[project.optional-dependencies]
|
||||
dev = ["pytest>=8", "pytest-asyncio>=0.23", "pytest-httpx>=0.30"]
|
||||
|
||||
[build-system]
|
||||
requires = ["hatchling"]
|
||||
build-backend = "hatchling.build"
|
||||
|
||||
[tool.hatch.build.targets.wheel]
|
||||
packages = ["src/mcp_macro"]
|
||||
|
||||
[tool.uv.sources]
|
||||
option-mcp-common = { workspace = true }
|
||||
|
||||
[project.scripts]
|
||||
mcp-macro = "mcp_macro.__main__:main"
|
||||
@@ -0,0 +1,39 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import os
|
||||
|
||||
import uvicorn
|
||||
from option_mcp_common.auth import load_token_store_from_files
|
||||
|
||||
from option_mcp_common.logging import configure_root_logging
|
||||
|
||||
from mcp_macro.server import create_app
|
||||
|
||||
|
||||
configure_root_logging() # CER-P5-009
|
||||
|
||||
def main():
|
||||
creds_file = os.environ["MACRO_CREDENTIALS_FILE"]
|
||||
with open(creds_file) as f:
|
||||
creds = json.load(f)
|
||||
|
||||
token_store = load_token_store_from_files(
|
||||
core_token_file=os.environ.get("CORE_TOKEN_FILE"),
|
||||
observer_token_file=os.environ.get("OBSERVER_TOKEN_FILE"),
|
||||
)
|
||||
app = create_app(
|
||||
fred_api_key=creds.get("fred_api_key", ""),
|
||||
finnhub_api_key=creds.get("finnhub_api_key", ""),
|
||||
token_store=token_store,
|
||||
)
|
||||
uvicorn.run(
|
||||
app,
|
||||
log_config=None, # CER-P5-009: delega al root JSON logger
|
||||
host=os.environ.get("HOST", "0.0.0.0"),
|
||||
port=int(os.environ.get("PORT", "9013")),
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -0,0 +1,516 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import UTC, datetime, timedelta
|
||||
from typing import Any
|
||||
|
||||
import httpx
|
||||
|
||||
FRED_BASE = "https://api.stlouisfed.org/fred/series/observations"
|
||||
FINNHUB_CALENDAR = "https://finnhub.io/api/v1/calendar/economic"
|
||||
COINGECKO_GLOBAL = "https://api.coingecko.com/api/v3/global"
|
||||
COINGECKO_SIMPLE = "https://api.coingecko.com/api/v3/simple/price"
|
||||
DERIBIT_DVOL = "https://www.deribit.com/api/v2/public/get_volatility_index_data"
|
||||
YAHOO_CHART = "https://query1.finance.yahoo.com/v8/finance/chart/{symbol}"
|
||||
|
||||
ASSET_TICKER_MAP: dict[str, tuple[str, str]] = {
|
||||
"WTI": ("CL=F", "WTI Crude Oil"),
|
||||
"BRENT": ("BZ=F", "Brent Crude Oil"),
|
||||
"GOLD": ("GC=F", "Gold Futures"),
|
||||
"SILVER": ("SI=F", "Silver Futures"),
|
||||
"COPPER": ("HG=F", "Copper Futures"),
|
||||
"NATGAS": ("NG=F", "Natural Gas"),
|
||||
"DXY": ("DX-Y.NYB", "US Dollar Index"),
|
||||
"SPX": ("^GSPC", "S&P 500"),
|
||||
"NDX": ("^NDX", "Nasdaq 100"),
|
||||
"DJI": ("^DJI", "Dow Jones"),
|
||||
"RUT": ("^RUT", "Russell 2000"),
|
||||
"VIX": ("^VIX", "CBOE Volatility Index"),
|
||||
"US5Y": ("^FVX", "US 5-Year Treasury"),
|
||||
"US10Y": ("^TNX", "US 10-Year Treasury"),
|
||||
"US30Y": ("^TYX", "US 30-Year Treasury"),
|
||||
"US2Y": ("^UST2YR", "US 2-Year Treasury"),
|
||||
"EURUSD": ("EURUSD=X", "EUR/USD"),
|
||||
"USDJPY": ("JPY=X", "USD/JPY"),
|
||||
"GBPUSD": ("GBPUSD=X", "GBP/USD"),
|
||||
"BTCUSD": ("BTC-USD", "Bitcoin/USD"),
|
||||
"ETHUSD": ("ETH-USD", "Ethereum/USD"),
|
||||
"ES": ("ES=F", "E-mini S&P 500 Futures"),
|
||||
"NQ": ("NQ=F", "E-mini Nasdaq 100 Futures"),
|
||||
"YM": ("YM=F", "E-mini Dow Futures"),
|
||||
"RTY": ("RTY=F", "E-mini Russell 2000 Futures"),
|
||||
}
|
||||
|
||||
_ASSET_CACHE: dict[str, dict] = {}
|
||||
_ASSET_CACHE_TTL = 60.0
|
||||
|
||||
|
||||
async def _fetch_yahoo_meta(client: httpx.AsyncClient, symbol: str, range_: str = "10d") -> dict:
|
||||
try:
|
||||
resp = await client.get(
|
||||
YAHOO_CHART.format(symbol=symbol),
|
||||
params={"interval": "1d", "range": range_},
|
||||
headers={"User-Agent": "Mozilla/5.0"},
|
||||
)
|
||||
if resp.status_code != 200:
|
||||
return {}
|
||||
result = (resp.json().get("chart") or {}).get("result") or []
|
||||
if not result:
|
||||
return {}
|
||||
r0 = result[0]
|
||||
meta = r0.get("meta") or {}
|
||||
closes = ((r0.get("indicators") or {}).get("quote") or [{}])[0].get("close") or []
|
||||
closes = [c for c in closes if c is not None]
|
||||
return {"meta": meta, "closes": closes}
|
||||
except Exception:
|
||||
return {}
|
||||
|
||||
|
||||
async def fetch_asset_price(ticker: str) -> dict[str, Any]:
|
||||
import time
|
||||
|
||||
key = ticker.upper()
|
||||
now = time.monotonic()
|
||||
cached = _ASSET_CACHE.get(key)
|
||||
if cached and (now - cached["ts"]) < _ASSET_CACHE_TTL:
|
||||
return cached["data"]
|
||||
|
||||
mapping = ASSET_TICKER_MAP.get(key)
|
||||
if not mapping:
|
||||
return {"ticker": ticker, "error": f"unknown ticker {ticker}"}
|
||||
symbol, name = mapping
|
||||
|
||||
async with httpx.AsyncClient(timeout=10) as client:
|
||||
info = await _fetch_yahoo_meta(client, symbol, "10d")
|
||||
meta = info.get("meta") or {}
|
||||
closes = info.get("closes") or []
|
||||
price = meta.get("regularMarketPrice")
|
||||
prev_close = meta.get("previousClose")
|
||||
change_24h_pct = None
|
||||
if price is not None and prev_close:
|
||||
try:
|
||||
change_24h_pct = round((float(price) - float(prev_close)) / float(prev_close) * 100, 3)
|
||||
except Exception:
|
||||
change_24h_pct = None
|
||||
change_7d_pct = None
|
||||
if len(closes) >= 6 and price is not None:
|
||||
try:
|
||||
change_7d_pct = round((float(price) - float(closes[-6])) / float(closes[-6]) * 100, 3)
|
||||
except Exception:
|
||||
change_7d_pct = None
|
||||
|
||||
out = {
|
||||
"ticker": key,
|
||||
"name": name,
|
||||
"price": float(price) if price is not None else None,
|
||||
"change_24h_pct": change_24h_pct,
|
||||
"change_7d_pct": change_7d_pct,
|
||||
"source": f"yfinance:{symbol}",
|
||||
"data_timestamp": datetime.now(UTC).isoformat(),
|
||||
}
|
||||
_ASSET_CACHE[key] = {"data": out, "ts": now}
|
||||
return out
|
||||
|
||||
|
||||
_TREASURY_CACHE: dict[str, Any] = {"data": None, "ts": 0.0}
|
||||
_TREASURY_TTL = 300.0
|
||||
|
||||
|
||||
async def fetch_treasury_yields() -> dict[str, Any]:
|
||||
import time
|
||||
|
||||
now = time.monotonic()
|
||||
if _TREASURY_CACHE["data"] and (now - _TREASURY_CACHE["ts"]) < _TREASURY_TTL:
|
||||
return _TREASURY_CACHE["data"]
|
||||
|
||||
symbols = [
|
||||
("us2y", "^UST2YR"),
|
||||
("us5y", "^FVX"),
|
||||
("us10y", "^TNX"),
|
||||
("us30y", "^TYX"),
|
||||
]
|
||||
yields: dict[str, float | None] = {}
|
||||
async with httpx.AsyncClient(timeout=10) as client:
|
||||
for key, sym in symbols:
|
||||
info = await _fetch_yahoo_meta(client, sym, "5d")
|
||||
meta = info.get("meta") or {}
|
||||
price = meta.get("regularMarketPrice")
|
||||
yields[key] = float(price) if price is not None else None
|
||||
|
||||
spread = None
|
||||
if yields.get("us10y") is not None and yields.get("us2y") is not None:
|
||||
spread = round(yields["us10y"] - yields["us2y"], 3)
|
||||
shape = "unknown"
|
||||
if spread is not None:
|
||||
if spread > 0.25:
|
||||
shape = "normal"
|
||||
elif spread < -0.1:
|
||||
shape = "inverted"
|
||||
else:
|
||||
shape = "flat"
|
||||
|
||||
out = {
|
||||
"yields": yields,
|
||||
"spread_2y10y": spread,
|
||||
"yield_curve_shape": shape,
|
||||
"data_timestamp": datetime.now(UTC).isoformat(),
|
||||
}
|
||||
_TREASURY_CACHE["data"] = out
|
||||
_TREASURY_CACHE["ts"] = now
|
||||
return out
|
||||
|
||||
|
||||
async def fetch_equity_futures() -> dict[str, Any]:
|
||||
"""Fetch ES/NQ/YM/RTY futures con session detection."""
|
||||
tickers = [("es", "ES=F"), ("nq", "NQ=F"), ("ym", "YM=F"), ("rty", "RTY=F")]
|
||||
now = datetime.now(UTC)
|
||||
weekday = now.weekday() # 0=Mon
|
||||
hour_utc = now.hour
|
||||
cash_open = (weekday < 5) and (13 <= hour_utc < 20)
|
||||
if cash_open:
|
||||
session = "regular"
|
||||
elif weekday >= 5:
|
||||
session = "weekend"
|
||||
elif hour_utc < 13:
|
||||
session = "pre-market"
|
||||
else:
|
||||
session = "after-hours"
|
||||
|
||||
out: dict[str, Any] = {}
|
||||
async with httpx.AsyncClient(timeout=10) as client:
|
||||
for key, sym in tickers:
|
||||
info = await _fetch_yahoo_meta(client, sym, "5d")
|
||||
meta = info.get("meta") or {}
|
||||
price = meta.get("regularMarketPrice")
|
||||
prev = meta.get("previousClose") or meta.get("chartPreviousClose")
|
||||
change_pct = None
|
||||
if price is not None and prev:
|
||||
try:
|
||||
change_pct = round((float(price) - float(prev)) / float(prev) * 100, 3)
|
||||
except Exception:
|
||||
change_pct = None
|
||||
out[key] = {
|
||||
"price": float(price) if price is not None else None,
|
||||
"change_pct": change_pct,
|
||||
"session": session,
|
||||
}
|
||||
|
||||
next_open = None
|
||||
if weekday < 5 and hour_utc < 13:
|
||||
next_open = now.replace(hour=13, minute=30, second=0, microsecond=0).isoformat()
|
||||
else:
|
||||
days_ahead = (7 - weekday) if weekday >= 5 else 1
|
||||
nd = (now.replace(hour=13, minute=30, second=0, microsecond=0) + timedelta(days=days_ahead))
|
||||
next_open = nd.isoformat()
|
||||
|
||||
return {
|
||||
"futures": out,
|
||||
"session_status": {
|
||||
"cash_open": cash_open,
|
||||
"session": session,
|
||||
"next_open_utc": next_open,
|
||||
},
|
||||
"data_timestamp": datetime.now(UTC).isoformat(),
|
||||
}
|
||||
|
||||
_MARKET_CACHE: dict[str, Any] = {"data": None, "ts": 0.0}
|
||||
_MARKET_CACHE_TTL = 120.0
|
||||
|
||||
|
||||
async def _fetch_yahoo_price(client: httpx.AsyncClient, symbol: str) -> float | None:
|
||||
try:
|
||||
resp = await client.get(
|
||||
YAHOO_CHART.format(symbol=symbol),
|
||||
params={"interval": "1d", "range": "5d"},
|
||||
headers={"User-Agent": "Mozilla/5.0"},
|
||||
)
|
||||
if resp.status_code != 200:
|
||||
return None
|
||||
result = (resp.json().get("chart") or {}).get("result") or []
|
||||
if not result:
|
||||
return None
|
||||
price = (result[0].get("meta") or {}).get("regularMarketPrice")
|
||||
return float(price) if price is not None else None
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
|
||||
async def _fetch_dvol_latest(client: httpx.AsyncClient, currency: str) -> float | None:
|
||||
now_ms = int(datetime.now(UTC).timestamp() * 1000)
|
||||
start_ms = now_ms - 7 * 24 * 3600 * 1000
|
||||
try:
|
||||
resp = await client.get(
|
||||
DERIBIT_DVOL,
|
||||
params={
|
||||
"currency": currency,
|
||||
"start_timestamp": start_ms,
|
||||
"end_timestamp": now_ms,
|
||||
"resolution": "1D",
|
||||
},
|
||||
)
|
||||
rows = (resp.json().get("result") or {}).get("data") or []
|
||||
if not rows:
|
||||
return None
|
||||
return float(rows[-1][4])
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
|
||||
async def fetch_economic_indicators(
|
||||
fred_api_key: str = "",
|
||||
indicators: list[str] | None = None,
|
||||
) -> dict[str, Any]:
|
||||
series_map = {
|
||||
"fed_rate": "FEDFUNDS",
|
||||
"cpi": "CPIAUCSL",
|
||||
"unemployment": "UNRATE",
|
||||
"us10y_yield": "DGS10",
|
||||
}
|
||||
result: dict[str, Any] = {}
|
||||
if not fred_api_key:
|
||||
return {"indicators": result, "error": "No FRED API key configured"}
|
||||
async with httpx.AsyncClient(timeout=10) as client:
|
||||
for name, series_id in series_map.items():
|
||||
if indicators and name not in indicators:
|
||||
continue
|
||||
resp = await client.get(
|
||||
FRED_BASE,
|
||||
params={
|
||||
"series_id": series_id,
|
||||
"api_key": fred_api_key,
|
||||
"file_type": "json",
|
||||
"sort_order": "desc",
|
||||
"limit": 1,
|
||||
},
|
||||
)
|
||||
data = resp.json()
|
||||
obs = data.get("observations", [])
|
||||
result[name] = float(obs[0]["value"]) if obs else None
|
||||
result["updated_at"] = datetime.now(UTC).isoformat()
|
||||
return result
|
||||
|
||||
|
||||
CURRENCY_TO_COUNTRY = {
|
||||
"USD": ("US", "United States"),
|
||||
"EUR": ("EU", "Euro Area"),
|
||||
"JPY": ("JP", "Japan"),
|
||||
"GBP": ("UK", "United Kingdom"),
|
||||
"CAD": ("CA", "Canada"),
|
||||
"AUD": ("AU", "Australia"),
|
||||
"NZD": ("NZ", "New Zealand"),
|
||||
"CHF": ("CH", "Switzerland"),
|
||||
"CNY": ("CN", "China"),
|
||||
}
|
||||
|
||||
_HIGH_IMPACT_EVENTS = (
|
||||
"fomc", "fed", "cpi", "nfp", "non-farm", "nonfarm", "ppi",
|
||||
"ecb", "boj", "boe", "gdp", "unemployment rate",
|
||||
)
|
||||
|
||||
|
||||
def _market_impact_historical(name: str) -> str:
|
||||
n = (name or "").lower()
|
||||
for kw in _HIGH_IMPACT_EVENTS:
|
||||
if kw in n:
|
||||
return "high_vol_spike"
|
||||
return "normal"
|
||||
|
||||
|
||||
async def fetch_macro_calendar(
|
||||
finnhub_api_key: str = "",
|
||||
days_ahead: int = 7,
|
||||
country_filter: list[str] | None = None,
|
||||
importance_min: str | None = None,
|
||||
start: str | None = None,
|
||||
end: str | None = None,
|
||||
) -> dict[str, Any]:
|
||||
"""Fetch economic calendar con filtri country/importance/date range."""
|
||||
events: list[dict[str, Any]] = []
|
||||
|
||||
importance_order = {"low": 0, "medium": 1, "high": 2}
|
||||
min_level = importance_order.get(
|
||||
(importance_min or "").lower(), 0
|
||||
) if importance_min else 0
|
||||
|
||||
start_dt: datetime | None = None
|
||||
end_dt: datetime | None = None
|
||||
if start:
|
||||
try:
|
||||
start_dt = datetime.fromisoformat(start).replace(tzinfo=UTC)
|
||||
except ValueError:
|
||||
start_dt = datetime.strptime(start, "%Y-%m-%d").replace(tzinfo=UTC)
|
||||
if end:
|
||||
try:
|
||||
end_dt = datetime.fromisoformat(end).replace(tzinfo=UTC)
|
||||
except ValueError:
|
||||
end_dt = datetime.strptime(end, "%Y-%m-%d").replace(tzinfo=UTC)
|
||||
|
||||
country_filter_set = (
|
||||
{c.upper() for c in country_filter} if country_filter else None
|
||||
)
|
||||
|
||||
# Try Forex Factory free feed first
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=10) as client:
|
||||
resp = await client.get("https://nfs.faireconomy.media/ff_calendar_thisweek.json")
|
||||
if resp.status_code == 200:
|
||||
raw = resp.json()
|
||||
now = datetime.now(UTC)
|
||||
for e in raw:
|
||||
date_str = e.get("date", "")
|
||||
event_dt: datetime | None = None
|
||||
try:
|
||||
event_dt = datetime.fromisoformat(date_str.replace("Z", "+00:00"))
|
||||
if event_dt < now:
|
||||
continue
|
||||
except (ValueError, TypeError):
|
||||
pass
|
||||
|
||||
currency = (e.get("country", "") or "").upper()
|
||||
country_code, country_name = CURRENCY_TO_COUNTRY.get(
|
||||
currency, (currency or "", e.get("country", "") or "")
|
||||
)
|
||||
|
||||
if country_filter_set and country_code not in country_filter_set:
|
||||
continue
|
||||
|
||||
impact = (e.get("impact", "") or "").lower()
|
||||
importance = (
|
||||
"high" if impact == "high" else "medium" if impact == "medium" else "low"
|
||||
)
|
||||
if importance_order[importance] < min_level:
|
||||
continue
|
||||
|
||||
if start_dt and event_dt and event_dt < start_dt:
|
||||
continue
|
||||
if end_dt and event_dt and event_dt > end_dt:
|
||||
continue
|
||||
|
||||
name = e.get("title", "")
|
||||
events.append(
|
||||
{
|
||||
"date": date_str,
|
||||
"datetime_utc": event_dt.isoformat() if event_dt else date_str,
|
||||
"name": name,
|
||||
"event": name,
|
||||
"country": country_name,
|
||||
"country_code": country_code,
|
||||
"importance": importance,
|
||||
"forecast": e.get("forecast", ""),
|
||||
"previous": e.get("previous", ""),
|
||||
"actual": e.get("actual"),
|
||||
"market_impact_historical": _market_impact_historical(name),
|
||||
}
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Fallback to Finnhub if we have a key and no events
|
||||
if not events and finnhub_api_key:
|
||||
try:
|
||||
now = datetime.now(UTC)
|
||||
end_default = now + timedelta(days=days_ahead)
|
||||
async with httpx.AsyncClient(timeout=10) as client:
|
||||
resp = await client.get(
|
||||
FINNHUB_CALENDAR,
|
||||
params={
|
||||
"from": (start_dt or now).strftime("%Y-%m-%d"),
|
||||
"to": (end_dt or end_default).strftime("%Y-%m-%d"),
|
||||
"token": finnhub_api_key,
|
||||
},
|
||||
)
|
||||
data = resp.json()
|
||||
if isinstance(data, dict) and "error" in data:
|
||||
return {"events": [], "error": data["error"]}
|
||||
raw = data if isinstance(data, list) else data.get("economicCalendar", [])
|
||||
for e in raw:
|
||||
importance_raw = (
|
||||
e.get("importance")
|
||||
or e.get("impact")
|
||||
or "medium"
|
||||
)
|
||||
if isinstance(importance_raw, int):
|
||||
importance = (
|
||||
"high" if importance_raw >= 3 else
|
||||
"medium" if importance_raw >= 2 else
|
||||
"low"
|
||||
)
|
||||
else:
|
||||
importance = str(importance_raw).lower()
|
||||
if importance not in ("low", "medium", "high"):
|
||||
importance = "medium"
|
||||
if importance_order[importance] < min_level:
|
||||
continue
|
||||
country_code = (e.get("country", "") or "").upper()
|
||||
country_name = CURRENCY_TO_COUNTRY.get(
|
||||
country_code, (country_code, country_code)
|
||||
)[1]
|
||||
if country_filter_set and country_code not in country_filter_set:
|
||||
continue
|
||||
name = e.get("event", "")
|
||||
date_str = e.get("date", e.get("time", ""))
|
||||
events.append({
|
||||
"date": date_str,
|
||||
"datetime_utc": date_str,
|
||||
"name": name,
|
||||
"event": name,
|
||||
"country": country_name,
|
||||
"country_code": country_code,
|
||||
"importance": importance,
|
||||
"forecast": e.get("forecast", ""),
|
||||
"previous": e.get("previous", e.get("prev", "")),
|
||||
"actual": e.get("actual"),
|
||||
"market_impact_historical": _market_impact_historical(name),
|
||||
})
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
if not events:
|
||||
return {"events": [], "note": "No calendar source available"}
|
||||
|
||||
return {"events": events}
|
||||
|
||||
|
||||
async def fetch_market_overview() -> dict[str, Any]:
|
||||
import time
|
||||
|
||||
now = time.monotonic()
|
||||
if _MARKET_CACHE["data"] is not None and (now - _MARKET_CACHE["ts"]) < _MARKET_CACHE_TTL:
|
||||
return _MARKET_CACHE["data"]
|
||||
|
||||
async with httpx.AsyncClient(timeout=10) as client:
|
||||
global_data: dict[str, Any] = {}
|
||||
prices: dict[str, Any] = {}
|
||||
try:
|
||||
global_resp = await client.get(COINGECKO_GLOBAL)
|
||||
global_data = global_resp.json().get("data", {}) or {}
|
||||
except Exception:
|
||||
global_data = {}
|
||||
try:
|
||||
price_resp = await client.get(
|
||||
COINGECKO_SIMPLE,
|
||||
params={"ids": "bitcoin,ethereum", "vs_currencies": "usd"},
|
||||
)
|
||||
prices = price_resp.json() or {}
|
||||
except Exception:
|
||||
prices = {}
|
||||
dvol_btc = await _fetch_dvol_latest(client, "BTC")
|
||||
dvol_eth = await _fetch_dvol_latest(client, "ETH")
|
||||
sp500 = await _fetch_yahoo_price(client, "^GSPC")
|
||||
gold = await _fetch_yahoo_price(client, "GC=F")
|
||||
vix = await _fetch_yahoo_price(client, "^VIX")
|
||||
|
||||
out = {
|
||||
"btc_dominance": global_data.get("market_cap_percentage", {}).get("btc"),
|
||||
"total_market_cap": global_data.get("total_market_cap", {}).get("usd"),
|
||||
"btc_price": prices.get("bitcoin", {}).get("usd"),
|
||||
"eth_price": prices.get("ethereum", {}).get("usd"),
|
||||
"sp500": sp500,
|
||||
"gold": gold,
|
||||
"vix": vix,
|
||||
"dvol_btc": dvol_btc,
|
||||
"dvol_eth": dvol_eth,
|
||||
"data_timestamp": datetime.now(UTC).isoformat(),
|
||||
}
|
||||
_MARKET_CACHE["data"] = out
|
||||
_MARKET_CACHE["ts"] = now
|
||||
return out
|
||||
@@ -0,0 +1,136 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
|
||||
from fastapi import Depends, FastAPI, HTTPException
|
||||
from option_mcp_common.auth import Principal, TokenStore, require_principal
|
||||
from option_mcp_common.mcp_bridge import mount_mcp_endpoint
|
||||
from option_mcp_common.server import build_app
|
||||
from pydantic import BaseModel
|
||||
|
||||
from mcp_macro.fetchers import (
|
||||
fetch_asset_price,
|
||||
fetch_economic_indicators,
|
||||
fetch_equity_futures,
|
||||
fetch_macro_calendar,
|
||||
fetch_market_overview,
|
||||
fetch_treasury_yields,
|
||||
)
|
||||
|
||||
# --- Body models ---
|
||||
|
||||
class GetEconomicIndicatorsReq(BaseModel):
|
||||
indicators: list[str] | None = None
|
||||
|
||||
|
||||
class GetMacroCalendarReq(BaseModel):
|
||||
days: int = 7
|
||||
country_filter: list[str] | None = None
|
||||
importance_min: str | None = None
|
||||
start: str | None = None
|
||||
end: str | None = None
|
||||
|
||||
|
||||
class GetMarketOverviewReq(BaseModel):
|
||||
pass
|
||||
|
||||
|
||||
class GetAssetPriceReq(BaseModel):
|
||||
ticker: str
|
||||
|
||||
|
||||
class GetTreasuryYieldsReq(BaseModel):
|
||||
pass
|
||||
|
||||
|
||||
class GetEquityFuturesReq(BaseModel):
|
||||
pass
|
||||
|
||||
|
||||
# --- ACL helper ---
|
||||
|
||||
def _check(principal: Principal, *, core: bool = False, observer: bool = False) -> None:
|
||||
allowed: set[str] = set()
|
||||
if core:
|
||||
allowed.add("core")
|
||||
if observer:
|
||||
allowed.add("observer")
|
||||
if not (principal.capabilities & allowed):
|
||||
raise HTTPException(403, f"capability required: {allowed}")
|
||||
|
||||
|
||||
# --- App factory ---
|
||||
|
||||
def create_app(*, fred_api_key: str = "", finnhub_api_key: str = "", token_store: TokenStore) -> FastAPI:
|
||||
app = build_app(name="mcp-macro", version="0.1.0", token_store=token_store)
|
||||
|
||||
@app.post("/tools/get_economic_indicators", tags=["reads"])
|
||||
async def t_get_economic_indicators(
|
||||
body: GetEconomicIndicatorsReq, principal: Principal = Depends(require_principal)
|
||||
):
|
||||
_check(principal, core=True, observer=True)
|
||||
return await fetch_economic_indicators(
|
||||
fred_api_key=fred_api_key, indicators=body.indicators
|
||||
)
|
||||
|
||||
@app.post("/tools/get_macro_calendar", tags=["reads"])
|
||||
async def t_get_macro_calendar(
|
||||
body: GetMacroCalendarReq, principal: Principal = Depends(require_principal)
|
||||
):
|
||||
_check(principal, core=True, observer=True)
|
||||
return await fetch_macro_calendar(
|
||||
finnhub_api_key=finnhub_api_key,
|
||||
days_ahead=body.days,
|
||||
country_filter=body.country_filter,
|
||||
importance_min=body.importance_min,
|
||||
start=body.start,
|
||||
end=body.end,
|
||||
)
|
||||
|
||||
@app.post("/tools/get_market_overview", tags=["reads"])
|
||||
async def t_get_market_overview(
|
||||
body: GetMarketOverviewReq, principal: Principal = Depends(require_principal)
|
||||
):
|
||||
_check(principal, core=True, observer=True)
|
||||
return await fetch_market_overview()
|
||||
|
||||
@app.post("/tools/get_asset_price", tags=["reads"])
|
||||
async def t_get_asset_price(
|
||||
body: GetAssetPriceReq, principal: Principal = Depends(require_principal)
|
||||
):
|
||||
_check(principal, core=True, observer=True)
|
||||
return await fetch_asset_price(body.ticker)
|
||||
|
||||
@app.post("/tools/get_treasury_yields", tags=["reads"])
|
||||
async def t_get_treasury_yields(
|
||||
body: GetTreasuryYieldsReq, principal: Principal = Depends(require_principal)
|
||||
):
|
||||
_check(principal, core=True, observer=True)
|
||||
return await fetch_treasury_yields()
|
||||
|
||||
@app.post("/tools/get_equity_futures", tags=["reads"])
|
||||
async def t_get_equity_futures(
|
||||
body: GetEquityFuturesReq, principal: Principal = Depends(require_principal)
|
||||
):
|
||||
_check(principal, core=True, observer=True)
|
||||
return await fetch_equity_futures()
|
||||
|
||||
# ───── MCP endpoint (/mcp) — bridge verso /tools/* ─────
|
||||
port = int(os.environ.get("PORT", "9013"))
|
||||
mount_mcp_endpoint(
|
||||
app,
|
||||
name="cerbero-macro",
|
||||
version="0.1.0",
|
||||
token_store=token_store,
|
||||
internal_base_url=f"http://localhost:{port}",
|
||||
tools=[
|
||||
{"name": "get_economic_indicators", "description": "FRED economic indicators (Fed rate, CPI, ecc)."},
|
||||
{"name": "get_macro_calendar", "description": "Eventi macro con filtri country/importance/date range."},
|
||||
{"name": "get_market_overview", "description": "Snapshot overview mercato macro."},
|
||||
{"name": "get_asset_price", "description": "Prezzo cross-asset: WTI, DXY, SPX, VIX, yields, FX, ecc."},
|
||||
{"name": "get_treasury_yields", "description": "Curva US Treasury 2y/5y/10y/30y + shape detection."},
|
||||
{"name": "get_equity_futures", "description": "Futures ES/NQ/YM/RTY con session status."},
|
||||
],
|
||||
)
|
||||
|
||||
return app
|
||||
@@ -0,0 +1,185 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import UTC
|
||||
|
||||
import httpx
|
||||
import pytest
|
||||
import pytest_httpx
|
||||
from mcp_macro.fetchers import (
|
||||
fetch_economic_indicators,
|
||||
fetch_macro_calendar,
|
||||
fetch_market_overview,
|
||||
)
|
||||
|
||||
# --- fetch_economic_indicators ---
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_economic_indicators_no_key():
|
||||
result = await fetch_economic_indicators(fred_api_key="")
|
||||
assert "error" in result
|
||||
assert result["error"] == "No FRED API key configured"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_economic_indicators_happy_path(httpx_mock: pytest_httpx.HTTPXMock):
|
||||
for series_id in ("FEDFUNDS", "CPIAUCSL", "UNRATE", "DGS10"):
|
||||
httpx_mock.add_response(
|
||||
url=httpx.URL(
|
||||
"https://api.stlouisfed.org/fred/series/observations",
|
||||
params={
|
||||
"series_id": series_id,
|
||||
"api_key": "testkey",
|
||||
"file_type": "json",
|
||||
"sort_order": "desc",
|
||||
"limit": "1",
|
||||
},
|
||||
),
|
||||
json={"observations": [{"value": "5.25"}]},
|
||||
)
|
||||
result = await fetch_economic_indicators(fred_api_key="testkey")
|
||||
assert result["fed_rate"] == 5.25
|
||||
assert result["cpi"] == 5.25
|
||||
assert result["unemployment"] == 5.25
|
||||
assert result["us10y_yield"] == 5.25
|
||||
assert "updated_at" in result
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_economic_indicators_filter(httpx_mock: pytest_httpx.HTTPXMock):
|
||||
httpx_mock.add_response(
|
||||
url=httpx.URL(
|
||||
"https://api.stlouisfed.org/fred/series/observations",
|
||||
params={
|
||||
"series_id": "FEDFUNDS",
|
||||
"api_key": "k",
|
||||
"file_type": "json",
|
||||
"sort_order": "desc",
|
||||
"limit": "1",
|
||||
},
|
||||
),
|
||||
json={"observations": [{"value": "5.33"}]},
|
||||
)
|
||||
result = await fetch_economic_indicators(fred_api_key="k", indicators=["fed_rate"])
|
||||
assert "fed_rate" in result
|
||||
assert "cpi" not in result
|
||||
|
||||
|
||||
# --- fetch_macro_calendar ---
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_macro_calendar_forex_factory_happy(httpx_mock: pytest_httpx.HTTPXMock):
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
future = (datetime.now(UTC) + timedelta(days=1)).isoformat()
|
||||
httpx_mock.add_response(
|
||||
url="https://nfs.faireconomy.media/ff_calendar_thisweek.json",
|
||||
json=[
|
||||
{
|
||||
"date": future,
|
||||
"title": "CPI",
|
||||
"country": "US",
|
||||
"impact": "High",
|
||||
"forecast": "3.0%",
|
||||
"previous": "3.2%",
|
||||
}
|
||||
],
|
||||
)
|
||||
result = await fetch_macro_calendar()
|
||||
assert "events" in result
|
||||
assert len(result["events"]) >= 1
|
||||
assert result["events"][0]["name"] == "CPI"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_macro_calendar_no_source(httpx_mock: pytest_httpx.HTTPXMock):
|
||||
httpx_mock.add_response(
|
||||
url="https://nfs.faireconomy.media/ff_calendar_thisweek.json",
|
||||
status_code=500,
|
||||
)
|
||||
result = await fetch_macro_calendar(finnhub_api_key="")
|
||||
assert result == {"events": [], "note": "No calendar source available"}
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@pytest.mark.httpx_mock(assert_all_responses_were_requested=False, assert_all_requests_were_expected=False)
|
||||
async def test_macro_calendar_finnhub_fallback(httpx_mock: pytest_httpx.HTTPXMock):
|
||||
httpx_mock.add_response(
|
||||
url="https://nfs.faireconomy.media/ff_calendar_thisweek.json",
|
||||
status_code=500,
|
||||
)
|
||||
|
||||
def dispatch(request: httpx.Request) -> httpx.Response:
|
||||
if "finnhub.io" in str(request.url):
|
||||
return httpx.Response(
|
||||
200,
|
||||
json=[{"date": "2024-01-15", "event": "FOMC", "importance": "high", "forecast": "", "prev": ""}],
|
||||
)
|
||||
return httpx.Response(500)
|
||||
|
||||
httpx_mock.add_callback(dispatch)
|
||||
result = await fetch_macro_calendar(finnhub_api_key="fkey")
|
||||
assert "events" in result
|
||||
assert result["events"][0]["name"] == "FOMC"
|
||||
|
||||
|
||||
# --- fetch_market_overview ---
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_market_overview_happy(httpx_mock: pytest_httpx.HTTPXMock):
|
||||
httpx_mock.add_response(
|
||||
url="https://api.coingecko.com/api/v3/global",
|
||||
json={
|
||||
"data": {
|
||||
"market_cap_percentage": {"btc": 52.3},
|
||||
"total_market_cap": {"usd": 2_000_000_000_000},
|
||||
}
|
||||
},
|
||||
)
|
||||
httpx_mock.add_response(
|
||||
url=httpx.URL(
|
||||
"https://api.coingecko.com/api/v3/simple/price",
|
||||
params={"ids": "bitcoin,ethereum", "vs_currencies": "usd"},
|
||||
),
|
||||
json={"bitcoin": {"usd": 65000}, "ethereum": {"usd": 3500}},
|
||||
)
|
||||
import re as _re
|
||||
httpx_mock.add_response(
|
||||
url=_re.compile(
|
||||
r"https://www\.deribit\.com/api/v2/public/get_volatility_index_data\?currency=BTC.*"
|
||||
),
|
||||
json={"result": {"data": [[1, 50, 52, 49, 51.5]], "continuation": None}},
|
||||
)
|
||||
httpx_mock.add_response(
|
||||
url=_re.compile(
|
||||
r"https://www\.deribit\.com/api/v2/public/get_volatility_index_data\?currency=ETH.*"
|
||||
),
|
||||
json={"result": {"data": [[1, 60, 62, 59, 61.2]], "continuation": None}},
|
||||
)
|
||||
import re as _re
|
||||
httpx_mock.add_response(
|
||||
url=_re.compile(r"https://query1\.finance\.yahoo\.com/v8/finance/chart/\^GSPC.*"),
|
||||
json={"chart": {"result": [{"meta": {"regularMarketPrice": 5830.12}}]}},
|
||||
)
|
||||
httpx_mock.add_response(
|
||||
url=_re.compile(r"https://query1\.finance\.yahoo\.com/v8/finance/chart/GC[%=].*"),
|
||||
json={"chart": {"result": [{"meta": {"regularMarketPrice": 2412.5}}]}},
|
||||
)
|
||||
httpx_mock.add_response(
|
||||
url=_re.compile(r"https://query1\.finance\.yahoo\.com/v8/finance/chart/\^VIX.*"),
|
||||
json={"chart": {"result": [{"meta": {"regularMarketPrice": 18.3}}]}},
|
||||
)
|
||||
# Clear module cache to force fresh fetch
|
||||
from mcp_macro import fetchers as _f
|
||||
_f._MARKET_CACHE["data"] = None
|
||||
_f._MARKET_CACHE["ts"] = 0.0
|
||||
result = await fetch_market_overview()
|
||||
assert result["btc_dominance"] == 52.3
|
||||
assert result["btc_price"] == 65000
|
||||
assert result["eth_price"] == 3500
|
||||
assert result["total_market_cap"] == 2_000_000_000_000
|
||||
assert result["dvol_btc"] == 51.5
|
||||
assert result["dvol_eth"] == 61.2
|
||||
assert result["sp500"] == 5830.12
|
||||
assert result["gold"] == 2412.5
|
||||
assert result["vix"] == 18.3
|
||||
assert "data_timestamp" in result
|
||||
@@ -0,0 +1,127 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from unittest.mock import AsyncMock, patch
|
||||
|
||||
import pytest
|
||||
from fastapi.testclient import TestClient
|
||||
from mcp_macro.server import create_app
|
||||
from option_mcp_common.auth import Principal, TokenStore
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def http():
|
||||
store = TokenStore(
|
||||
tokens={
|
||||
"ct": Principal("core", {"core"}),
|
||||
"ot": Principal("observer", {"observer"}),
|
||||
}
|
||||
)
|
||||
app = create_app(fred_api_key="testfred", finnhub_api_key="testfinn", token_store=store)
|
||||
return TestClient(app)
|
||||
|
||||
|
||||
# --- Health ---
|
||||
|
||||
def test_health(http):
|
||||
assert http.get("/health").status_code == 200
|
||||
|
||||
|
||||
# --- get_economic_indicators ---
|
||||
|
||||
def test_get_economic_indicators_core_ok(http):
|
||||
with patch(
|
||||
"mcp_macro.server.fetch_economic_indicators",
|
||||
new=AsyncMock(return_value={"fed_rate": 5.25, "updated_at": "2024-01-01T00:00:00+00:00"}),
|
||||
):
|
||||
r = http.post(
|
||||
"/tools/get_economic_indicators",
|
||||
headers={"Authorization": "Bearer ct"},
|
||||
json={},
|
||||
)
|
||||
assert r.status_code == 200
|
||||
assert r.json()["fed_rate"] == 5.25
|
||||
|
||||
|
||||
def test_get_economic_indicators_observer_ok(http):
|
||||
with patch(
|
||||
"mcp_macro.server.fetch_economic_indicators",
|
||||
new=AsyncMock(return_value={"fed_rate": 5.25}),
|
||||
):
|
||||
r = http.post(
|
||||
"/tools/get_economic_indicators",
|
||||
headers={"Authorization": "Bearer ot"},
|
||||
json={},
|
||||
)
|
||||
assert r.status_code == 200
|
||||
|
||||
|
||||
def test_get_economic_indicators_no_auth_401(http):
|
||||
r = http.post("/tools/get_economic_indicators", json={})
|
||||
assert r.status_code == 401
|
||||
|
||||
|
||||
# --- get_macro_calendar ---
|
||||
|
||||
def test_get_macro_calendar_core_ok(http):
|
||||
with patch(
|
||||
"mcp_macro.server.fetch_macro_calendar",
|
||||
new=AsyncMock(return_value={"events": []}),
|
||||
):
|
||||
r = http.post(
|
||||
"/tools/get_macro_calendar",
|
||||
headers={"Authorization": "Bearer ct"},
|
||||
json={"days": 7},
|
||||
)
|
||||
assert r.status_code == 200
|
||||
|
||||
|
||||
def test_get_macro_calendar_observer_ok(http):
|
||||
with patch(
|
||||
"mcp_macro.server.fetch_macro_calendar",
|
||||
new=AsyncMock(return_value={"events": []}),
|
||||
):
|
||||
r = http.post(
|
||||
"/tools/get_macro_calendar",
|
||||
headers={"Authorization": "Bearer ot"},
|
||||
json={},
|
||||
)
|
||||
assert r.status_code == 200
|
||||
|
||||
|
||||
def test_get_macro_calendar_no_auth_401(http):
|
||||
r = http.post("/tools/get_macro_calendar", json={})
|
||||
assert r.status_code == 401
|
||||
|
||||
|
||||
# --- get_market_overview ---
|
||||
|
||||
def test_get_market_overview_core_ok(http):
|
||||
with patch(
|
||||
"mcp_macro.server.fetch_market_overview",
|
||||
new=AsyncMock(return_value={"btc_dominance": 52.0, "btc_price": 65000}),
|
||||
):
|
||||
r = http.post(
|
||||
"/tools/get_market_overview",
|
||||
headers={"Authorization": "Bearer ct"},
|
||||
json={},
|
||||
)
|
||||
assert r.status_code == 200
|
||||
assert r.json()["btc_price"] == 65000
|
||||
|
||||
|
||||
def test_get_market_overview_observer_ok(http):
|
||||
with patch(
|
||||
"mcp_macro.server.fetch_market_overview",
|
||||
new=AsyncMock(return_value={"btc_dominance": 52.0}),
|
||||
):
|
||||
r = http.post(
|
||||
"/tools/get_market_overview",
|
||||
headers={"Authorization": "Bearer ot"},
|
||||
json={},
|
||||
)
|
||||
assert r.status_code == 200
|
||||
|
||||
|
||||
def test_get_market_overview_no_auth_401(http):
|
||||
r = http.post("/tools/get_market_overview", json={})
|
||||
assert r.status_code == 401
|
||||
@@ -0,0 +1,27 @@
|
||||
[project]
|
||||
name = "mcp-sentiment"
|
||||
version = "0.1.0"
|
||||
requires-python = ">=3.11"
|
||||
dependencies = [
|
||||
"option-mcp-common",
|
||||
"fastapi>=0.115",
|
||||
"uvicorn[standard]>=0.30",
|
||||
"httpx>=0.27",
|
||||
"pydantic>=2.6",
|
||||
]
|
||||
|
||||
[project.optional-dependencies]
|
||||
dev = ["pytest>=8", "pytest-asyncio>=0.23", "pytest-httpx>=0.30"]
|
||||
|
||||
[build-system]
|
||||
requires = ["hatchling"]
|
||||
build-backend = "hatchling.build"
|
||||
|
||||
[tool.hatch.build.targets.wheel]
|
||||
packages = ["src/mcp_sentiment"]
|
||||
|
||||
[tool.uv.sources]
|
||||
option-mcp-common = { workspace = true }
|
||||
|
||||
[project.scripts]
|
||||
mcp-sentiment = "mcp_sentiment.__main__:main"
|
||||
@@ -0,0 +1,47 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import os
|
||||
|
||||
import uvicorn
|
||||
from option_mcp_common.auth import load_token_store_from_files
|
||||
|
||||
from option_mcp_common.logging import configure_root_logging
|
||||
|
||||
from mcp_sentiment.server import create_app
|
||||
|
||||
|
||||
def _load_cryptopanic_key() -> str:
|
||||
"""CER-002: preferisci file secret, fallback a env CRYPTOPANIC_API_KEY."""
|
||||
creds_file = os.environ.get("SENTIMENT_CREDENTIALS_FILE")
|
||||
if creds_file and os.path.exists(creds_file):
|
||||
try:
|
||||
with open(creds_file) as f:
|
||||
creds = json.load(f)
|
||||
key = (creds.get("cryptopanic_key") or "").strip()
|
||||
if key and key.lower() not in ("placeholder", "changeme", "none"):
|
||||
return key
|
||||
except (OSError, json.JSONDecodeError):
|
||||
pass
|
||||
return (os.environ.get("CRYPTOPANIC_API_KEY") or "").strip()
|
||||
|
||||
|
||||
configure_root_logging() # CER-P5-009
|
||||
|
||||
def main():
|
||||
key = _load_cryptopanic_key()
|
||||
token_store = load_token_store_from_files(
|
||||
core_token_file=os.environ.get("CORE_TOKEN_FILE"),
|
||||
observer_token_file=os.environ.get("OBSERVER_TOKEN_FILE"),
|
||||
)
|
||||
app = create_app(cryptopanic_key=key, token_store=token_store)
|
||||
uvicorn.run(
|
||||
app,
|
||||
log_config=None, # CER-P5-009: delega al root JSON logger
|
||||
host=os.environ.get("HOST", "0.0.0.0"),
|
||||
port=int(os.environ.get("PORT", "9014")),
|
||||
)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -0,0 +1,524 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import re
|
||||
import xml.etree.ElementTree as ET
|
||||
from typing import Any
|
||||
|
||||
import httpx
|
||||
|
||||
CRYPTOPANIC_URL = "https://cryptopanic.com/api/v1/posts/"
|
||||
ALTERNATIVE_ME_URL = "https://api.alternative.me/fng/"
|
||||
COINDESK_RSS = "https://www.coindesk.com/arc/outboundfeeds/rss/"
|
||||
LUNARCRUSH_COIN_URL = "https://lunarcrush.com/api4/public/coins/{symbol}/v1"
|
||||
CRYPTOCOMPARE_NEWS_URL = "https://min-api.cryptocompare.com/data/v2/news/"
|
||||
MESSARI_NEWS_URL = "https://data.messari.io/api/v1/news"
|
||||
|
||||
|
||||
async def _fetch_coindesk_headlines(limit: int = 20) -> list[dict[str, Any]]:
|
||||
items: list[dict[str, Any]] = []
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=10, follow_redirects=True) as client:
|
||||
resp = await client.get(COINDESK_RSS)
|
||||
if resp.status_code != 200:
|
||||
return items
|
||||
root = ET.fromstring(resp.text)
|
||||
for node in root.findall(".//item")[:limit]:
|
||||
items.append(
|
||||
{
|
||||
"title": node.findtext("title", ""),
|
||||
"source": "CoinDesk",
|
||||
"published_at": node.findtext("pubDate", ""),
|
||||
"url": node.findtext("link", ""),
|
||||
}
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
return items
|
||||
|
||||
WORLD_NEWS_FEEDS = [
|
||||
("Reuters Business", "https://feeds.reuters.com/reuters/businessNews"),
|
||||
("CNBC Top News", "https://search.cnbc.com/rs/search/combinedcms/view.xml?partnerId=wrss01&id=100003114"),
|
||||
("Bloomberg Markets", "https://feeds.bloomberg.com/markets/news.rss"),
|
||||
("CoinDesk", "https://www.coindesk.com/arc/outboundfeeds/rss/"),
|
||||
]
|
||||
|
||||
# Public funding rate endpoints (no auth required)
|
||||
BINANCE_FUNDING_URL = "https://fapi.binance.com/fapi/v1/premiumIndex"
|
||||
BYBIT_FUNDING_URL = "https://api.bybit.com/v5/market/tickers"
|
||||
OKX_FUNDING_URL = "https://www.okx.com/api/v5/public/funding-rate"
|
||||
BINANCE_OI_HIST_URL = "https://fapi.binance.com/futures/data/openInterestHist"
|
||||
|
||||
|
||||
async def _fetch_cryptocompare_news(limit: int = 20) -> list[dict[str, Any]]:
|
||||
"""CER-017: CryptoCompare news free (no key needed)."""
|
||||
items: list[dict[str, Any]] = []
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=10) as client:
|
||||
resp = await client.get(CRYPTOCOMPARE_NEWS_URL, params={"lang": "EN"})
|
||||
if resp.status_code != 200:
|
||||
return items
|
||||
data = resp.json()
|
||||
for r in (data.get("Data") or [])[:limit]:
|
||||
ts = r.get("published_on")
|
||||
try:
|
||||
import datetime as _dt
|
||||
pub = _dt.datetime.fromtimestamp(int(ts), _dt.UTC).isoformat() if ts else ""
|
||||
except (TypeError, ValueError):
|
||||
pub = ""
|
||||
items.append({
|
||||
"title": r.get("title", ""),
|
||||
"source": r.get("source", "CryptoCompare"),
|
||||
"published_at": pub,
|
||||
"url": r.get("url", ""),
|
||||
"provider": "cryptocompare",
|
||||
})
|
||||
except Exception:
|
||||
pass
|
||||
return items
|
||||
|
||||
|
||||
async def _fetch_messari_news(limit: int = 20) -> list[dict[str, Any]]:
|
||||
"""CER-017: Messari news free (no key needed for basic feed)."""
|
||||
items: list[dict[str, Any]] = []
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=10) as client:
|
||||
resp = await client.get(MESSARI_NEWS_URL)
|
||||
if resp.status_code != 200:
|
||||
return items
|
||||
data = resp.json()
|
||||
for r in (data.get("data") or [])[:limit]:
|
||||
items.append({
|
||||
"title": r.get("title", ""),
|
||||
"source": (r.get("author") or {}).get("name") or "Messari",
|
||||
"published_at": r.get("published_at", ""),
|
||||
"url": r.get("url", ""),
|
||||
"provider": "messari",
|
||||
})
|
||||
except Exception:
|
||||
pass
|
||||
return items
|
||||
|
||||
|
||||
def _normalize_title(t: str) -> str:
|
||||
"""Lowercase + strip non-alnum per dedup tra provider."""
|
||||
return "".join(ch for ch in t.lower() if ch.isalnum() or ch.isspace()).strip()
|
||||
|
||||
|
||||
async def fetch_crypto_news(api_key: str = "", limit: int = 20) -> dict[str, Any]:
|
||||
"""CER-017: multi-source aggregator (CoinDesk + CryptoCompare + Messari) + dedup.
|
||||
|
||||
Se `api_key` Cryptopanic presente, include anche quella come 4° source.
|
||||
"""
|
||||
import asyncio
|
||||
|
||||
# CoinDesk + CryptoCompare + Messari sempre (free, no key)
|
||||
tasks = [
|
||||
_fetch_coindesk_headlines(limit),
|
||||
_fetch_cryptocompare_news(limit),
|
||||
_fetch_messari_news(limit),
|
||||
]
|
||||
include_cp = bool(api_key) and api_key.lower() not in ("placeholder", "none", "changeme")
|
||||
if include_cp:
|
||||
tasks.append(_fetch_cryptopanic_news(api_key, limit))
|
||||
|
||||
results = await asyncio.gather(*tasks, return_exceptions=True)
|
||||
all_items: list[dict[str, Any]] = []
|
||||
providers_ok: list[str] = []
|
||||
providers_failed: list[str] = []
|
||||
provider_names = ["coindesk", "cryptocompare", "messari"]
|
||||
if include_cp:
|
||||
provider_names.append("cryptopanic")
|
||||
for name, res in zip(provider_names, results, strict=True):
|
||||
if isinstance(res, Exception) or not res:
|
||||
providers_failed.append(name)
|
||||
continue
|
||||
providers_ok.append(name)
|
||||
for item in res:
|
||||
if "provider" not in item:
|
||||
item["provider"] = name
|
||||
all_items.append(item)
|
||||
|
||||
# Dedup per normalized title — preserva primo match
|
||||
seen: set[str] = set()
|
||||
deduped: list[dict[str, Any]] = []
|
||||
for h in all_items:
|
||||
key = _normalize_title(h.get("title", ""))
|
||||
if not key or key in seen:
|
||||
continue
|
||||
seen.add(key)
|
||||
deduped.append(h)
|
||||
|
||||
# Sort per published_at DESC (stringhe ISO confrontabili; quelle vuote in fondo)
|
||||
deduped.sort(key=lambda x: x.get("published_at") or "", reverse=True)
|
||||
|
||||
return {
|
||||
"headlines": deduped[:limit],
|
||||
"sources": providers_ok,
|
||||
"sources_failed": providers_failed,
|
||||
"total_before_dedup": len(all_items),
|
||||
"total_after_dedup": len(deduped),
|
||||
}
|
||||
|
||||
|
||||
async def _fetch_cryptopanic_news(api_key: str, limit: int) -> list[dict[str, Any]]:
|
||||
"""Cryptopanic as one of the sources. Failure → []."""
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=10) as client:
|
||||
resp = await client.get(
|
||||
CRYPTOPANIC_URL,
|
||||
params={"auth_token": api_key, "public": "true"},
|
||||
)
|
||||
if resp.status_code >= 400:
|
||||
return []
|
||||
data = resp.json()
|
||||
except Exception:
|
||||
return []
|
||||
return [
|
||||
{
|
||||
"title": r.get("title", ""),
|
||||
"source": (r.get("source") or {}).get("title", "Cryptopanic"),
|
||||
"published_at": r.get("published_at", ""),
|
||||
"url": r.get("url", ""),
|
||||
"provider": "cryptopanic",
|
||||
}
|
||||
for r in (data.get("results") or [])[:limit]
|
||||
]
|
||||
|
||||
|
||||
async def _fetch_lunarcrush(symbol: str, api_key: str) -> dict | None:
|
||||
"""CER-P2-005: LunarCrush v4 social metrics. Ritorna None se fail."""
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=10) as client:
|
||||
resp = await client.get(
|
||||
LUNARCRUSH_COIN_URL.format(symbol=symbol.upper()),
|
||||
headers={"Authorization": f"Bearer {api_key}"},
|
||||
)
|
||||
if resp.status_code != 200:
|
||||
return None
|
||||
data = (resp.json() or {}).get("data") or {}
|
||||
return {
|
||||
"galaxy_score": data.get("galaxy_score"),
|
||||
"alt_rank": data.get("alt_rank"),
|
||||
"sentiment": data.get("sentiment"), # 0-100 scale
|
||||
"social_volume_24h": data.get("social_volume_24h"),
|
||||
"social_dominance": data.get("social_dominance"),
|
||||
}
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
|
||||
def _fng_to_sentiment(value: int) -> float:
|
||||
"""Normalize fear&greed 0-100 to [-1, 1] proxy sentiment."""
|
||||
return round((value - 50) / 50.0, 3)
|
||||
|
||||
|
||||
async def fetch_social_sentiment(symbol: str = "BTC") -> dict[str, Any]:
|
||||
"""CER-P2-005: provider chain LunarCrush + fear_greed proxy.
|
||||
|
||||
Se LUNARCRUSH_API_KEY env è presente e risponde, usa valori reali.
|
||||
Altrimenti deriva proxy da fear_greed per popolare twitter/reddit sentiment
|
||||
(marcato come derived=True così l'agent sa che è proxy).
|
||||
"""
|
||||
async with httpx.AsyncClient(timeout=10) as client:
|
||||
fng_resp = await client.get(ALTERNATIVE_ME_URL, params={"limit": 1})
|
||||
fng_data = fng_resp.json()
|
||||
fng_list = fng_data.get("data", [])
|
||||
fng = fng_list[0] if fng_list else {}
|
||||
fng_value = int(fng.get("value", 0))
|
||||
proxy = _fng_to_sentiment(fng_value) if fng_value else 0.0
|
||||
|
||||
result: dict[str, Any] = {
|
||||
"fear_greed_index": fng_value,
|
||||
"fear_greed_label": fng.get("value_classification", ""),
|
||||
"symbol": symbol.upper(),
|
||||
"social_volume": 0,
|
||||
"twitter_sentiment": 0.0,
|
||||
"reddit_sentiment": 0.0,
|
||||
"source": "fear_greed_only",
|
||||
"derived": True,
|
||||
}
|
||||
|
||||
lc_key = os.environ.get("LUNARCRUSH_API_KEY", "").strip()
|
||||
if lc_key:
|
||||
lc = await _fetch_lunarcrush(symbol, lc_key)
|
||||
if lc is not None:
|
||||
# LunarCrush sentiment 0-100 → normalize to [-1, 1]
|
||||
lc_sent = lc.get("sentiment")
|
||||
norm = round((float(lc_sent) - 50) / 50.0, 3) if lc_sent is not None else None
|
||||
result.update({
|
||||
"twitter_sentiment": norm if norm is not None else proxy,
|
||||
"reddit_sentiment": norm if norm is not None else proxy,
|
||||
"social_volume": int(lc.get("social_volume_24h") or 0),
|
||||
"galaxy_score": lc.get("galaxy_score"),
|
||||
"alt_rank": lc.get("alt_rank"),
|
||||
"social_dominance": lc.get("social_dominance"),
|
||||
"source": "lunarcrush+fear_greed",
|
||||
"derived": False,
|
||||
})
|
||||
return result
|
||||
|
||||
# Proxy-only path
|
||||
result["twitter_sentiment"] = proxy
|
||||
result["reddit_sentiment"] = proxy
|
||||
result["note"] = (
|
||||
"twitter/reddit derived from fear_greed_index; configure LUNARCRUSH_API_KEY "
|
||||
"for real social metrics"
|
||||
)
|
||||
return result
|
||||
|
||||
|
||||
async def fetch_funding_rates(asset: str = "BTC") -> dict[str, Any]:
|
||||
"""Fetch perpetual funding rates from Binance, Bybit and OKX public APIs."""
|
||||
asset = asset.upper()
|
||||
usdt_symbol = f"{asset}USDT"
|
||||
okx_inst = f"{asset}-USDT-SWAP"
|
||||
rates: list[dict[str, Any]] = []
|
||||
|
||||
async with httpx.AsyncClient(timeout=10) as client:
|
||||
# Binance
|
||||
try:
|
||||
resp = await client.get(BINANCE_FUNDING_URL, params={"symbol": usdt_symbol})
|
||||
if resp.status_code == 200:
|
||||
d = resp.json()
|
||||
rates.append(
|
||||
{
|
||||
"exchange": "binance",
|
||||
"asset": asset,
|
||||
"rate": float(d.get("lastFundingRate", 0)),
|
||||
"next_funding_time": d.get("nextFundingTime", ""),
|
||||
}
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Bybit
|
||||
try:
|
||||
resp = await client.get(
|
||||
BYBIT_FUNDING_URL,
|
||||
params={"category": "linear", "symbol": usdt_symbol},
|
||||
)
|
||||
if resp.status_code == 200:
|
||||
items = resp.json().get("result", {}).get("list", [])
|
||||
if items:
|
||||
d = items[0]
|
||||
rates.append(
|
||||
{
|
||||
"exchange": "bybit",
|
||||
"asset": asset,
|
||||
"rate": float(d.get("fundingRate", 0)),
|
||||
"next_funding_time": d.get("nextFundingTime", ""),
|
||||
}
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# OKX
|
||||
try:
|
||||
resp = await client.get(OKX_FUNDING_URL, params={"instId": okx_inst})
|
||||
if resp.status_code == 200:
|
||||
items = resp.json().get("data", [])
|
||||
if items:
|
||||
d = items[0]
|
||||
rates.append(
|
||||
{
|
||||
"exchange": "okx",
|
||||
"asset": asset,
|
||||
"rate": float(d.get("fundingRate", 0)),
|
||||
"next_funding_time": d.get("nextFundingTime", ""),
|
||||
}
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return {"asset": asset, "rates": rates}
|
||||
|
||||
|
||||
async def fetch_cross_exchange_funding(assets: list[str] | None = None) -> dict[str, Any]:
|
||||
"""Snapshot multi-asset funding rates con spread e arbitrage detection."""
|
||||
from datetime import UTC, datetime as _dt
|
||||
|
||||
assets = [a.upper() for a in (assets or ["BTC", "ETH", "SOL"])]
|
||||
snapshot: dict[str, dict[str, Any]] = {}
|
||||
async with httpx.AsyncClient(timeout=10) as client:
|
||||
for asset in assets:
|
||||
rates: dict[str, float | None] = {
|
||||
"binance": None,
|
||||
"bybit": None,
|
||||
"okx": None,
|
||||
"hyperliquid": None,
|
||||
}
|
||||
try:
|
||||
resp = await client.get(
|
||||
BINANCE_FUNDING_URL, params={"symbol": f"{asset}USDT"}
|
||||
)
|
||||
if resp.status_code == 200:
|
||||
rates["binance"] = float(resp.json().get("lastFundingRate", 0))
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
resp = await client.get(
|
||||
BYBIT_FUNDING_URL,
|
||||
params={"category": "linear", "symbol": f"{asset}USDT"},
|
||||
)
|
||||
if resp.status_code == 200:
|
||||
items = resp.json().get("result", {}).get("list", [])
|
||||
if items:
|
||||
rates["bybit"] = float(items[0].get("fundingRate", 0))
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
resp = await client.get(
|
||||
OKX_FUNDING_URL, params={"instId": f"{asset}-USDT-SWAP"}
|
||||
)
|
||||
if resp.status_code == 200:
|
||||
items = resp.json().get("data", [])
|
||||
if items:
|
||||
rates["okx"] = float(items[0].get("fundingRate", 0))
|
||||
except Exception:
|
||||
pass
|
||||
try:
|
||||
resp = await client.post(
|
||||
"https://api.hyperliquid.xyz/info",
|
||||
json={"type": "metaAndAssetCtxs"},
|
||||
)
|
||||
if resp.status_code == 200:
|
||||
data = resp.json()
|
||||
universe = data[0].get("universe") or []
|
||||
ctx_list = data[1] if len(data) > 1 else []
|
||||
for meta, ctx in zip(universe, ctx_list, strict=False):
|
||||
if meta.get("name", "").upper() == asset:
|
||||
rates["hyperliquid"] = float(ctx.get("funding", 0))
|
||||
break
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
present = [v for v in rates.values() if v is not None]
|
||||
spread_max_min = max(present) - min(present) if present else None
|
||||
anomaly = None
|
||||
if present and spread_max_min is not None:
|
||||
mean_r = sum(present) / len(present)
|
||||
for name, v in rates.items():
|
||||
if v is None:
|
||||
continue
|
||||
if abs(v - mean_r) > 2 * (spread_max_min / 2 or 1e-9):
|
||||
anomaly = f"{name}_outlier"
|
||||
break
|
||||
|
||||
snapshot[asset] = {
|
||||
**rates,
|
||||
"spread_max_min": spread_max_min,
|
||||
"anomaly": anomaly,
|
||||
}
|
||||
|
||||
# Arbitrage opportunities
|
||||
arbs = []
|
||||
for asset, data in snapshot.items():
|
||||
values = [(k, v) for k, v in data.items() if k in ("binance", "bybit", "okx", "hyperliquid") and v is not None]
|
||||
if len(values) < 2:
|
||||
continue
|
||||
values.sort(key=lambda x: x[1])
|
||||
low_ex, low_v = values[0]
|
||||
high_ex, high_v = values[-1]
|
||||
diff = high_v - low_v
|
||||
ann_pct = diff * 24 * 365 * 100 # hourly funding → annual pct
|
||||
if ann_pct > 50:
|
||||
arbs.append({
|
||||
"asset": asset,
|
||||
"pair": f"long_{low_ex}_short_{high_ex}",
|
||||
"funding_differential_ann": round(ann_pct, 2),
|
||||
"risk_adjusted": "acceptable" if ann_pct > 100 else "marginal",
|
||||
})
|
||||
|
||||
return {
|
||||
"assets": assets,
|
||||
"snapshot": snapshot,
|
||||
"arbitrage_opportunities": arbs,
|
||||
"data_timestamp": _dt.now(UTC).isoformat(),
|
||||
}
|
||||
|
||||
|
||||
async def fetch_world_news() -> dict[str, Any]:
|
||||
"""Fetch world financial news from free RSS feeds."""
|
||||
articles: list[dict[str, Any]] = []
|
||||
|
||||
async with httpx.AsyncClient(timeout=10, follow_redirects=True) as client:
|
||||
for source_name, url in WORLD_NEWS_FEEDS:
|
||||
try:
|
||||
resp = await client.get(url)
|
||||
if resp.status_code != 200:
|
||||
continue
|
||||
root = ET.fromstring(resp.text)
|
||||
for item in root.findall(".//item")[:5]:
|
||||
title = item.findtext("title", "")
|
||||
link = item.findtext("link", "")
|
||||
pub_date = item.findtext("pubDate", "")
|
||||
description = item.findtext("description", "")
|
||||
if "<" in description:
|
||||
description = re.sub(r"<[^>]+>", "", description).strip()
|
||||
articles.append(
|
||||
{
|
||||
"source": source_name,
|
||||
"title": title,
|
||||
"url": link,
|
||||
"published": pub_date,
|
||||
"summary": description[:200] if description else "",
|
||||
}
|
||||
)
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
return {"articles": articles, "count": len(articles)}
|
||||
|
||||
|
||||
async def fetch_oi_history(asset: str = "BTC", period: str = "5m", limit: int = 288) -> dict[str, Any]:
|
||||
"""Open interest history perpetual da Binance futures (public).
|
||||
|
||||
period: 5m|15m|30m|1h|2h|4h|6h|12h|1d (Binance API).
|
||||
limit: 1..500, default 288 = 24h a 5min.
|
||||
"""
|
||||
asset = asset.upper()
|
||||
symbol = f"{asset}USDT"
|
||||
limit = max(1, min(int(limit), 500))
|
||||
points: list[dict[str, Any]] = []
|
||||
try:
|
||||
async with httpx.AsyncClient(timeout=10) as client:
|
||||
resp = await client.get(
|
||||
BINANCE_OI_HIST_URL,
|
||||
params={"symbol": symbol, "period": period, "limit": limit},
|
||||
)
|
||||
if resp.status_code == 200:
|
||||
for row in resp.json() or []:
|
||||
points.append(
|
||||
{
|
||||
"timestamp": int(row.get("timestamp", 0)),
|
||||
"oi": float(row.get("sumOpenInterest", 0)),
|
||||
"oi_value_usd": float(row.get("sumOpenInterestValue", 0)),
|
||||
}
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
def _delta_pct(points: list[dict[str, Any]], minutes_back: int) -> float | None:
|
||||
if len(points) < 2:
|
||||
return None
|
||||
current = points[-1]
|
||||
cutoff_ts = current["timestamp"] - minutes_back * 60 * 1000
|
||||
past = next((p for p in reversed(points) if p["timestamp"] <= cutoff_ts), None)
|
||||
if past is None or past["oi"] == 0:
|
||||
return None
|
||||
return round(100.0 * (current["oi"] - past["oi"]) / past["oi"], 3)
|
||||
|
||||
return {
|
||||
"asset": asset,
|
||||
"exchange": "binance",
|
||||
"symbol": symbol,
|
||||
"period": period,
|
||||
"points": points,
|
||||
"current_oi": points[-1]["oi"] if points else None,
|
||||
"current_oi_value_usd": points[-1]["oi_value_usd"] if points else None,
|
||||
"delta_pct_1h": _delta_pct(points, 60),
|
||||
"delta_pct_4h": _delta_pct(points, 240),
|
||||
"delta_pct_24h": _delta_pct(points, 1440),
|
||||
"data_points": len(points),
|
||||
}
|
||||
@@ -0,0 +1,134 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import os
|
||||
|
||||
from fastapi import Depends, FastAPI, HTTPException
|
||||
from option_mcp_common.auth import Principal, TokenStore, require_principal
|
||||
from option_mcp_common.mcp_bridge import mount_mcp_endpoint
|
||||
from option_mcp_common.server import build_app
|
||||
from pydantic import BaseModel
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
from mcp_sentiment.fetchers import (
|
||||
fetch_crypto_news,
|
||||
fetch_cross_exchange_funding,
|
||||
fetch_funding_rates,
|
||||
fetch_oi_history,
|
||||
fetch_social_sentiment,
|
||||
fetch_world_news,
|
||||
)
|
||||
|
||||
# --- Body models ---
|
||||
|
||||
class GetCryptoNewsReq(BaseModel):
|
||||
limit: int = 20
|
||||
|
||||
|
||||
class GetSocialSentimentReq(BaseModel):
|
||||
symbol: str = "BTC"
|
||||
|
||||
|
||||
class GetFundingRatesReq(BaseModel):
|
||||
asset: str = "BTC"
|
||||
|
||||
|
||||
class GetWorldNewsReq(BaseModel):
|
||||
pass
|
||||
|
||||
|
||||
class GetCrossExchangeFundingReq(BaseModel):
|
||||
assets: list[str] | None = None
|
||||
|
||||
|
||||
class GetOiHistoryReq(BaseModel):
|
||||
asset: str = "BTC"
|
||||
period: str = "5m"
|
||||
limit: int = 288
|
||||
|
||||
|
||||
# --- ACL helper ---
|
||||
|
||||
def _check(principal: Principal, *, core: bool = False, observer: bool = False) -> None:
|
||||
allowed: set[str] = set()
|
||||
if core:
|
||||
allowed.add("core")
|
||||
if observer:
|
||||
allowed.add("observer")
|
||||
if not (principal.capabilities & allowed):
|
||||
raise HTTPException(403, f"capability required: {allowed}")
|
||||
|
||||
|
||||
# --- App factory ---
|
||||
|
||||
def create_app(*, cryptopanic_key: str = "", token_store: TokenStore) -> FastAPI:
|
||||
app = build_app(name="mcp-sentiment", version="0.1.0", token_store=token_store)
|
||||
|
||||
if not cryptopanic_key or cryptopanic_key.lower() in ("placeholder", "none", "changeme"):
|
||||
logger.warning(
|
||||
"mcp-sentiment: cryptopanic_key mancante o placeholder — get_crypto_news "
|
||||
"ritornerà headlines=[] con note diagnostica"
|
||||
)
|
||||
|
||||
@app.post("/tools/get_crypto_news", tags=["reads"])
|
||||
async def t_get_crypto_news(
|
||||
body: GetCryptoNewsReq, principal: Principal = Depends(require_principal)
|
||||
):
|
||||
_check(principal, core=True, observer=True)
|
||||
return await fetch_crypto_news(api_key=cryptopanic_key, limit=body.limit)
|
||||
|
||||
@app.post("/tools/get_social_sentiment", tags=["reads"])
|
||||
async def t_get_social_sentiment(
|
||||
body: GetSocialSentimentReq, principal: Principal = Depends(require_principal)
|
||||
):
|
||||
_check(principal, core=True, observer=True)
|
||||
return await fetch_social_sentiment(body.symbol)
|
||||
|
||||
@app.post("/tools/get_funding_rates", tags=["reads"])
|
||||
async def t_get_funding_rates(
|
||||
body: GetFundingRatesReq, principal: Principal = Depends(require_principal)
|
||||
):
|
||||
_check(principal, core=True, observer=True)
|
||||
return await fetch_funding_rates(body.asset)
|
||||
|
||||
@app.post("/tools/get_world_news", tags=["reads"])
|
||||
async def t_get_world_news(
|
||||
body: GetWorldNewsReq, principal: Principal = Depends(require_principal)
|
||||
):
|
||||
_check(principal, core=True, observer=True)
|
||||
return await fetch_world_news()
|
||||
|
||||
@app.post("/tools/get_cross_exchange_funding", tags=["reads"])
|
||||
async def t_get_cross_exchange_funding(
|
||||
body: GetCrossExchangeFundingReq, principal: Principal = Depends(require_principal)
|
||||
):
|
||||
_check(principal, core=True, observer=True)
|
||||
return await fetch_cross_exchange_funding(body.assets)
|
||||
|
||||
@app.post("/tools/get_oi_history", tags=["reads"])
|
||||
async def t_get_oi_history(
|
||||
body: GetOiHistoryReq, principal: Principal = Depends(require_principal)
|
||||
):
|
||||
_check(principal, core=True, observer=True)
|
||||
return await fetch_oi_history(body.asset, body.period, body.limit)
|
||||
|
||||
# ───── MCP endpoint (/mcp) — bridge verso /tools/* ─────
|
||||
port = int(os.environ.get("PORT", "9014"))
|
||||
mount_mcp_endpoint(
|
||||
app,
|
||||
name="cerbero-sentiment",
|
||||
version="0.1.0",
|
||||
token_store=token_store,
|
||||
internal_base_url=f"http://localhost:{port}",
|
||||
tools=[
|
||||
{"name": "get_crypto_news", "description": "News crypto da CryptoPanic."},
|
||||
{"name": "get_social_sentiment", "description": "Sentiment aggregato social."},
|
||||
{"name": "get_funding_rates", "description": "Funding rates aggregati."},
|
||||
{"name": "get_world_news", "description": "News macro/world."},
|
||||
{"name": "get_cross_exchange_funding", "description": "Funding multi-asset multi-exchange + arbitrage opportunities."},
|
||||
{"name": "get_oi_history", "description": "Open interest history perp (Binance) + delta_pct 1h/4h/24h."},
|
||||
],
|
||||
)
|
||||
|
||||
return app
|
||||
@@ -0,0 +1,320 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import httpx
|
||||
import pytest
|
||||
import pytest_httpx
|
||||
from mcp_sentiment.fetchers import (
|
||||
fetch_crypto_news,
|
||||
fetch_funding_rates,
|
||||
fetch_social_sentiment,
|
||||
fetch_world_news,
|
||||
)
|
||||
|
||||
# --- CER-017 multi-source news aggregator ---
|
||||
|
||||
_COINDESK_RSS = (
|
||||
'<?xml version="1.0"?><rss><channel>'
|
||||
"<item><title>ETH rally</title><link>https://coindesk.com/eth</link>"
|
||||
"<pubDate>2026-04-19</pubDate></item>"
|
||||
"<item><title>Common headline</title><link>https://coindesk.com/x</link>"
|
||||
"<pubDate>2026-04-18</pubDate></item>"
|
||||
"</channel></rss>"
|
||||
)
|
||||
|
||||
|
||||
def _mock_three_providers(httpx_mock: pytest_httpx.HTTPXMock, *, cc_items=None, messari_items=None):
|
||||
httpx_mock.add_response(url="https://www.coindesk.com/arc/outboundfeeds/rss/", text=_COINDESK_RSS)
|
||||
httpx_mock.add_response(
|
||||
url="https://min-api.cryptocompare.com/data/v2/news/?lang=EN",
|
||||
json={"Data": cc_items if cc_items is not None else [
|
||||
{"title": "BTC ATH", "source": "CryptoCompare", "published_on": 1761868800, "url": "https://x/1"},
|
||||
{"title": "Common headline", "source": "Reuters", "published_on": 1761782400, "url": "https://x/2"},
|
||||
]},
|
||||
)
|
||||
httpx_mock.add_response(
|
||||
url="https://data.messari.io/api/v1/news",
|
||||
json={"data": messari_items if messari_items is not None else [
|
||||
{"title": "SOL rally", "author": {"name": "Messari"}, "published_at": "2026-04-19T10:00:00Z", "url": "https://x/3"},
|
||||
]},
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_crypto_news_aggregates_three_sources(httpx_mock: pytest_httpx.HTTPXMock):
|
||||
"""CER-017: CoinDesk + CryptoCompare + Messari in parallelo."""
|
||||
_mock_three_providers(httpx_mock)
|
||||
result = await fetch_crypto_news(limit=20)
|
||||
titles = {h["title"] for h in result["headlines"]}
|
||||
assert "ETH rally" in titles
|
||||
assert "BTC ATH" in titles
|
||||
assert "SOL rally" in titles
|
||||
assert set(result["sources"]) == {"coindesk", "cryptocompare", "messari"}
|
||||
assert result["sources_failed"] == []
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_crypto_news_dedup_by_title(httpx_mock: pytest_httpx.HTTPXMock):
|
||||
"""CER-017: stesso titolo su 2 provider → 1 sola entry."""
|
||||
_mock_three_providers(httpx_mock)
|
||||
result = await fetch_crypto_news(limit=20)
|
||||
common_count = sum(1 for h in result["headlines"] if h["title"].lower() == "common headline")
|
||||
assert common_count == 1
|
||||
assert result["total_before_dedup"] > result["total_after_dedup"]
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_crypto_news_partial_failure(httpx_mock: pytest_httpx.HTTPXMock):
|
||||
"""CER-017: 1 provider 500 → altri proseguono, sources_failed riporta."""
|
||||
httpx_mock.add_response(url="https://www.coindesk.com/arc/outboundfeeds/rss/", text=_COINDESK_RSS)
|
||||
httpx_mock.add_response(
|
||||
url="https://min-api.cryptocompare.com/data/v2/news/?lang=EN",
|
||||
status_code=500,
|
||||
)
|
||||
httpx_mock.add_response(
|
||||
url="https://data.messari.io/api/v1/news",
|
||||
json={"data": [{"title": "OK Messari", "author": {"name": "M"}, "published_at": "2026-04-19T10:00:00Z", "url": "https://x"}]},
|
||||
)
|
||||
result = await fetch_crypto_news(limit=20)
|
||||
assert "cryptocompare" in result["sources_failed"]
|
||||
assert "coindesk" in result["sources"]
|
||||
assert "messari" in result["sources"]
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_crypto_news_sorted_desc_by_date(httpx_mock: pytest_httpx.HTTPXMock):
|
||||
"""CER-017: ordine published_at DESC."""
|
||||
_mock_three_providers(httpx_mock)
|
||||
result = await fetch_crypto_news(limit=20)
|
||||
dates = [h.get("published_at") or "" for h in result["headlines"] if h.get("published_at")]
|
||||
assert dates == sorted(dates, reverse=True)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_crypto_news_with_cryptopanic_key(httpx_mock: pytest_httpx.HTTPXMock):
|
||||
"""CER-017: se api_key presente, include Cryptopanic come 4° source."""
|
||||
_mock_three_providers(httpx_mock)
|
||||
httpx_mock.add_response(
|
||||
url=httpx.URL("https://cryptopanic.com/api/v1/posts/", params={"auth_token": "k", "public": "true"}),
|
||||
json={"results": [{
|
||||
"title": "Cryptopanic exclusive",
|
||||
"source": {"title": "CP"},
|
||||
"published_at": "2026-04-20T00:00:00Z",
|
||||
"url": "https://x/cp",
|
||||
}]},
|
||||
)
|
||||
result = await fetch_crypto_news(api_key="k", limit=20)
|
||||
titles = {h["title"] for h in result["headlines"]}
|
||||
assert "Cryptopanic exclusive" in titles
|
||||
assert "cryptopanic" in result["sources"]
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_crypto_news_placeholder_key_skips_cryptopanic(httpx_mock: pytest_httpx.HTTPXMock):
|
||||
"""CER-017: api_key placeholder → no Cryptopanic call."""
|
||||
_mock_three_providers(httpx_mock)
|
||||
result = await fetch_crypto_news(api_key="placeholder", limit=20)
|
||||
assert "cryptopanic" not in result["sources"]
|
||||
assert "cryptopanic" not in result["sources_failed"]
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_crypto_news_provider_tracing(httpx_mock: pytest_httpx.HTTPXMock):
|
||||
"""CER-017: ogni headline ha campo provider."""
|
||||
_mock_three_providers(httpx_mock)
|
||||
result = await fetch_crypto_news(limit=20)
|
||||
for h in result["headlines"]:
|
||||
assert h.get("provider") in {"coindesk", "cryptocompare", "messari"}
|
||||
|
||||
|
||||
# --- fetch_social_sentiment ---
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_social_sentiment_happy(httpx_mock: pytest_httpx.HTTPXMock):
|
||||
httpx_mock.add_response(
|
||||
url=httpx.URL(
|
||||
"https://api.alternative.me/fng/",
|
||||
params={"limit": "1"},
|
||||
),
|
||||
json={"data": [{"value": "72", "value_classification": "Greed"}]},
|
||||
)
|
||||
result = await fetch_social_sentiment()
|
||||
assert result["fear_greed_index"] == 72
|
||||
assert result["fear_greed_label"] == "Greed"
|
||||
assert "social_volume" in result
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_social_sentiment_empty_data(httpx_mock: pytest_httpx.HTTPXMock):
|
||||
httpx_mock.add_response(
|
||||
url=httpx.URL(
|
||||
"https://api.alternative.me/fng/",
|
||||
params={"limit": "1"},
|
||||
),
|
||||
json={"data": []},
|
||||
)
|
||||
result = await fetch_social_sentiment()
|
||||
assert result["fear_greed_index"] == 0
|
||||
assert result["fear_greed_label"] == ""
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_social_sentiment_derives_proxy_from_fng(
|
||||
httpx_mock: pytest_httpx.HTTPXMock, monkeypatch
|
||||
):
|
||||
"""CER-P2-005: senza LUNARCRUSH_API_KEY, twitter/reddit derivano da F&G."""
|
||||
monkeypatch.delenv("LUNARCRUSH_API_KEY", raising=False)
|
||||
httpx_mock.add_response(
|
||||
url=httpx.URL("https://api.alternative.me/fng/", params={"limit": "1"}),
|
||||
json={"data": [{"value": "25", "value_classification": "Extreme Fear"}]},
|
||||
)
|
||||
result = await fetch_social_sentiment()
|
||||
assert result["twitter_sentiment"] == pytest.approx(-0.5)
|
||||
assert result["reddit_sentiment"] == pytest.approx(-0.5)
|
||||
assert result["derived"] is True
|
||||
assert result["source"] == "fear_greed_only"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_social_sentiment_uses_lunarcrush_when_key_present(
|
||||
httpx_mock: pytest_httpx.HTTPXMock, monkeypatch
|
||||
):
|
||||
"""CER-P2-005: con LUNARCRUSH_API_KEY, valori reali."""
|
||||
monkeypatch.setenv("LUNARCRUSH_API_KEY", "test-key")
|
||||
httpx_mock.add_response(
|
||||
url=httpx.URL("https://api.alternative.me/fng/", params={"limit": "1"}),
|
||||
json={"data": [{"value": "50", "value_classification": "Neutral"}]},
|
||||
)
|
||||
httpx_mock.add_response(
|
||||
url="https://lunarcrush.com/api4/public/coins/BTC/v1",
|
||||
json={"data": {
|
||||
"sentiment": 80,
|
||||
"galaxy_score": 75,
|
||||
"alt_rank": 3,
|
||||
"social_volume_24h": 12345,
|
||||
"social_dominance": 25.5,
|
||||
}},
|
||||
)
|
||||
result = await fetch_social_sentiment("BTC")
|
||||
assert result["twitter_sentiment"] == pytest.approx(0.6)
|
||||
assert result["reddit_sentiment"] == pytest.approx(0.6)
|
||||
assert result["social_volume"] == 12345
|
||||
assert result["galaxy_score"] == 75
|
||||
assert result["derived"] is False
|
||||
assert "lunarcrush" in result["source"]
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_social_sentiment_lunarcrush_failure_fallback_to_proxy(
|
||||
httpx_mock: pytest_httpx.HTTPXMock, monkeypatch
|
||||
):
|
||||
"""CER-P2-005: se LC fallisce, fallback a proxy F&G — no crash."""
|
||||
monkeypatch.setenv("LUNARCRUSH_API_KEY", "broken-key")
|
||||
httpx_mock.add_response(
|
||||
url=httpx.URL("https://api.alternative.me/fng/", params={"limit": "1"}),
|
||||
json={"data": [{"value": "75", "value_classification": "Greed"}]},
|
||||
)
|
||||
httpx_mock.add_response(
|
||||
url="https://lunarcrush.com/api4/public/coins/BTC/v1",
|
||||
status_code=401,
|
||||
json={"error": "unauthorized"},
|
||||
)
|
||||
result = await fetch_social_sentiment("BTC")
|
||||
assert result["twitter_sentiment"] == pytest.approx(0.5)
|
||||
assert result["derived"] is True
|
||||
assert result["source"] == "fear_greed_only"
|
||||
|
||||
|
||||
# --- fetch_funding_rates ---
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_funding_rates_all_exchanges(httpx_mock: pytest_httpx.HTTPXMock):
|
||||
httpx_mock.add_response(
|
||||
url=httpx.URL(
|
||||
"https://fapi.binance.com/fapi/v1/premiumIndex",
|
||||
params={"symbol": "BTCUSDT"},
|
||||
),
|
||||
json={"lastFundingRate": "0.0001", "nextFundingTime": 1700000000000},
|
||||
)
|
||||
httpx_mock.add_response(
|
||||
url=httpx.URL(
|
||||
"https://api.bybit.com/v5/market/tickers",
|
||||
params={"category": "linear", "symbol": "BTCUSDT"},
|
||||
),
|
||||
json={"result": {"list": [{"fundingRate": "0.0002", "nextFundingTime": "1700000000000"}]}},
|
||||
)
|
||||
httpx_mock.add_response(
|
||||
url=httpx.URL(
|
||||
"https://www.okx.com/api/v5/public/funding-rate",
|
||||
params={"instId": "BTC-USDT-SWAP"},
|
||||
),
|
||||
json={"data": [{"fundingRate": "0.00015", "nextFundingTime": "1700000000000"}]},
|
||||
)
|
||||
result = await fetch_funding_rates()
|
||||
assert "rates" in result
|
||||
exchanges = {r["exchange"] for r in result["rates"]}
|
||||
assert "binance" in exchanges
|
||||
assert "bybit" in exchanges
|
||||
assert "okx" in exchanges
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_funding_rates_partial_failure(httpx_mock: pytest_httpx.HTTPXMock):
|
||||
"""If some exchanges fail, we still get results from others."""
|
||||
httpx_mock.add_response(
|
||||
url=httpx.URL(
|
||||
"https://fapi.binance.com/fapi/v1/premiumIndex",
|
||||
params={"symbol": "BTCUSDT"},
|
||||
),
|
||||
json={"lastFundingRate": "0.0001", "nextFundingTime": 1700000000000},
|
||||
)
|
||||
httpx_mock.add_response(
|
||||
url=httpx.URL(
|
||||
"https://api.bybit.com/v5/market/tickers",
|
||||
params={"category": "linear", "symbol": "BTCUSDT"},
|
||||
),
|
||||
status_code=500,
|
||||
)
|
||||
httpx_mock.add_response(
|
||||
url=httpx.URL(
|
||||
"https://www.okx.com/api/v5/public/funding-rate",
|
||||
params={"instId": "BTC-USDT-SWAP"},
|
||||
),
|
||||
status_code=500,
|
||||
)
|
||||
result = await fetch_funding_rates()
|
||||
assert len(result["rates"]) == 1
|
||||
assert result["rates"][0]["exchange"] == "binance"
|
||||
|
||||
|
||||
# --- fetch_world_news ---
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_world_news_happy(httpx_mock: pytest_httpx.HTTPXMock):
|
||||
rss_xml = """<?xml version="1.0"?>
|
||||
<rss version="2.0"><channel>
|
||||
<item><title>Markets rally</title><link>http://example.com/1</link><pubDate>Mon, 15 Jan 2024 10:00:00 +0000</pubDate><description>Stocks up</description></item>
|
||||
</channel></rss>"""
|
||||
for _, url in [
|
||||
("Reuters Business", "https://feeds.reuters.com/reuters/businessNews"),
|
||||
("CNBC Top News", "https://search.cnbc.com/rs/search/combinedcms/view.xml?partnerId=wrss01&id=100003114"),
|
||||
("Bloomberg Markets", "https://feeds.bloomberg.com/markets/news.rss"),
|
||||
("CoinDesk", "https://www.coindesk.com/arc/outboundfeeds/rss/"),
|
||||
]:
|
||||
httpx_mock.add_response(url=url, text=rss_xml)
|
||||
result = await fetch_world_news()
|
||||
assert result["count"] == 4
|
||||
assert result["articles"][0]["title"] == "Markets rally"
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_world_news_all_fail(httpx_mock: pytest_httpx.HTTPXMock):
|
||||
for _, url in [
|
||||
("Reuters Business", "https://feeds.reuters.com/reuters/businessNews"),
|
||||
("CNBC Top News", "https://search.cnbc.com/rs/search/combinedcms/view.xml?partnerId=wrss01&id=100003114"),
|
||||
("Bloomberg Markets", "https://feeds.bloomberg.com/markets/news.rss"),
|
||||
("CoinDesk", "https://www.coindesk.com/arc/outboundfeeds/rss/"),
|
||||
]:
|
||||
httpx_mock.add_response(url=url, status_code=503)
|
||||
result = await fetch_world_news()
|
||||
assert result["articles"] == []
|
||||
assert result["count"] == 0
|
||||
@@ -0,0 +1,159 @@
|
||||
from __future__ import annotations
|
||||
|
||||
from unittest.mock import AsyncMock, patch
|
||||
|
||||
import pytest
|
||||
from fastapi.testclient import TestClient
|
||||
from mcp_sentiment.server import create_app
|
||||
from option_mcp_common.auth import Principal, TokenStore
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def http():
|
||||
store = TokenStore(
|
||||
tokens={
|
||||
"ct": Principal("core", {"core"}),
|
||||
"ot": Principal("observer", {"observer"}),
|
||||
}
|
||||
)
|
||||
app = create_app(cryptopanic_key="testkey", token_store=store)
|
||||
return TestClient(app)
|
||||
|
||||
|
||||
# --- Health ---
|
||||
|
||||
def test_health(http):
|
||||
assert http.get("/health").status_code == 200
|
||||
|
||||
|
||||
# --- get_crypto_news ---
|
||||
|
||||
def test_get_crypto_news_core_ok(http):
|
||||
with patch(
|
||||
"mcp_sentiment.server.fetch_crypto_news",
|
||||
new=AsyncMock(return_value={"headlines": []}),
|
||||
):
|
||||
r = http.post(
|
||||
"/tools/get_crypto_news",
|
||||
headers={"Authorization": "Bearer ct"},
|
||||
json={"limit": 5},
|
||||
)
|
||||
assert r.status_code == 200
|
||||
|
||||
|
||||
def test_get_crypto_news_observer_ok(http):
|
||||
with patch(
|
||||
"mcp_sentiment.server.fetch_crypto_news",
|
||||
new=AsyncMock(return_value={"headlines": []}),
|
||||
):
|
||||
r = http.post(
|
||||
"/tools/get_crypto_news",
|
||||
headers={"Authorization": "Bearer ot"},
|
||||
json={},
|
||||
)
|
||||
assert r.status_code == 200
|
||||
|
||||
|
||||
def test_get_crypto_news_no_auth_401(http):
|
||||
r = http.post("/tools/get_crypto_news", json={})
|
||||
assert r.status_code == 401
|
||||
|
||||
|
||||
# --- get_social_sentiment ---
|
||||
|
||||
def test_get_social_sentiment_core_ok(http):
|
||||
with patch(
|
||||
"mcp_sentiment.server.fetch_social_sentiment",
|
||||
new=AsyncMock(return_value={"fear_greed_index": 65, "fear_greed_label": "Greed"}),
|
||||
):
|
||||
r = http.post(
|
||||
"/tools/get_social_sentiment",
|
||||
headers={"Authorization": "Bearer ct"},
|
||||
json={},
|
||||
)
|
||||
assert r.status_code == 200
|
||||
assert r.json()["fear_greed_index"] == 65
|
||||
|
||||
|
||||
def test_get_social_sentiment_observer_ok(http):
|
||||
with patch(
|
||||
"mcp_sentiment.server.fetch_social_sentiment",
|
||||
new=AsyncMock(return_value={"fear_greed_index": 65}),
|
||||
):
|
||||
r = http.post(
|
||||
"/tools/get_social_sentiment",
|
||||
headers={"Authorization": "Bearer ot"},
|
||||
json={},
|
||||
)
|
||||
assert r.status_code == 200
|
||||
|
||||
|
||||
def test_get_social_sentiment_no_auth_401(http):
|
||||
r = http.post("/tools/get_social_sentiment", json={})
|
||||
assert r.status_code == 401
|
||||
|
||||
|
||||
# --- get_funding_rates ---
|
||||
|
||||
def test_get_funding_rates_core_ok(http):
|
||||
with patch(
|
||||
"mcp_sentiment.server.fetch_funding_rates",
|
||||
new=AsyncMock(return_value={"rates": []}),
|
||||
):
|
||||
r = http.post(
|
||||
"/tools/get_funding_rates",
|
||||
headers={"Authorization": "Bearer ct"},
|
||||
json={},
|
||||
)
|
||||
assert r.status_code == 200
|
||||
|
||||
|
||||
def test_get_funding_rates_observer_ok(http):
|
||||
with patch(
|
||||
"mcp_sentiment.server.fetch_funding_rates",
|
||||
new=AsyncMock(return_value={"rates": []}),
|
||||
):
|
||||
r = http.post(
|
||||
"/tools/get_funding_rates",
|
||||
headers={"Authorization": "Bearer ot"},
|
||||
json={},
|
||||
)
|
||||
assert r.status_code == 200
|
||||
|
||||
|
||||
def test_get_funding_rates_no_auth_401(http):
|
||||
r = http.post("/tools/get_funding_rates", json={})
|
||||
assert r.status_code == 401
|
||||
|
||||
|
||||
# --- get_world_news ---
|
||||
|
||||
def test_get_world_news_core_ok(http):
|
||||
with patch(
|
||||
"mcp_sentiment.server.fetch_world_news",
|
||||
new=AsyncMock(return_value={"articles": [], "count": 0}),
|
||||
):
|
||||
r = http.post(
|
||||
"/tools/get_world_news",
|
||||
headers={"Authorization": "Bearer ct"},
|
||||
json={},
|
||||
)
|
||||
assert r.status_code == 200
|
||||
|
||||
|
||||
def test_get_world_news_observer_ok(http):
|
||||
with patch(
|
||||
"mcp_sentiment.server.fetch_world_news",
|
||||
new=AsyncMock(return_value={"articles": [], "count": 0}),
|
||||
):
|
||||
r = http.post(
|
||||
"/tools/get_world_news",
|
||||
headers={"Authorization": "Bearer ot"},
|
||||
json={},
|
||||
)
|
||||
assert r.status_code == 200
|
||||
|
||||
|
||||
def test_get_world_news_no_auth_401(http):
|
||||
r = http.post("/tools/get_world_news", json={})
|
||||
assert r.status_code == 401
|
||||
Reference in New Issue
Block a user