chore(V2): rimuovi services/, gateway/, secrets/, docker/ (legacy V1)

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
AdrianoDev
2026-04-30 18:58:11 +02:00
parent 1c1b3e1570
commit 6d19165d9e
100 changed files with 0 additions and 15408 deletions
-12
View File
@@ -1,12 +0,0 @@
FROM python:3.11-slim AS base
RUN apt-get update && apt-get install -y --no-install-recommends \
build-essential curl \
&& rm -rf /var/lib/apt/lists/*
RUN pip install --no-cache-dir "uv>=0.5,<0.7"
WORKDIR /app
COPY pyproject.toml uv.lock ./
COPY services/common ./services/common
RUN uv sync --frozen --no-dev --package mcp-common
ENV PATH="/app/.venv/bin:$PATH"
FROM base AS dev
RUN uv sync --frozen --package mcp-common
-25
View File
@@ -1,25 +0,0 @@
ARG BASE_IMAGE=cerbero-base
ARG BASE_TAG=latest
FROM ${BASE_IMAGE}:${BASE_TAG} AS builder
COPY services/mcp-alpaca ./services/mcp-alpaca
RUN uv sync --frozen --no-dev --package mcp-alpaca
FROM python:3.11-slim AS runtime
LABEL org.opencontainers.image.source="https://github.com/AdrianoDev/cerbero" \
cerbero.service="mcp-alpaca"
WORKDIR /app
COPY --from=builder /app /app
ENV PATH="/app/.venv/bin:$PATH"
RUN useradd -m -u 1000 app
USER app
ENV HOST=0.0.0.0 PORT=9020
EXPOSE 9020
HEALTHCHECK --interval=30s --timeout=5s --retries=3 --start-period=15s \
CMD python -c "import os, urllib.request; urllib.request.urlopen(f'http://localhost:{os.environ.get(\"PORT\",\"9020\")}/health', timeout=3).close()"
CMD ["mcp-alpaca"]
-25
View File
@@ -1,25 +0,0 @@
ARG BASE_IMAGE=cerbero-base
ARG BASE_TAG=latest
FROM ${BASE_IMAGE}:${BASE_TAG} AS builder
COPY services/mcp-bybit ./services/mcp-bybit
RUN uv sync --frozen --no-dev --package mcp-bybit
FROM python:3.11-slim AS runtime
LABEL org.opencontainers.image.source="https://github.com/AdrianoDev/cerbero" \
cerbero.service="mcp-bybit"
WORKDIR /app
COPY --from=builder /app /app
ENV PATH="/app/.venv/bin:$PATH"
RUN useradd -m -u 1000 app
USER app
ENV HOST=0.0.0.0 PORT=9019
EXPOSE 9019
HEALTHCHECK --interval=30s --timeout=5s --retries=3 --start-period=15s \
CMD python -c "import os, urllib.request; urllib.request.urlopen(f'http://localhost:{os.environ.get(\"PORT\",\"9019\")}/health', timeout=3).close()"
CMD ["mcp-bybit"]
-27
View File
@@ -1,27 +0,0 @@
# CER-P5-012 multi-stage slim: builder da cerbero-base (con uv + toolchain),
# runtime da python:3.11-slim (solo venv + source).
ARG BASE_IMAGE=cerbero-base
ARG BASE_TAG=latest
FROM ${BASE_IMAGE}:${BASE_TAG} AS builder
COPY services/mcp-deribit ./services/mcp-deribit
RUN uv sync --frozen --no-dev --package mcp-deribit
FROM python:3.11-slim AS runtime
LABEL org.opencontainers.image.source="https://github.com/AdrianoDev/cerbero" \
cerbero.service="mcp-deribit"
WORKDIR /app
COPY --from=builder /app /app
ENV PATH="/app/.venv/bin:$PATH"
RUN useradd -m -u 1000 app
USER app
ENV HOST=0.0.0.0 PORT=9011
EXPOSE 9011
HEALTHCHECK --interval=30s --timeout=5s --retries=3 --start-period=15s \
CMD python -c "import os, urllib.request; urllib.request.urlopen(f'http://localhost:{os.environ.get(\"PORT\",\"9011\")}/health', timeout=3).close()"
CMD ["mcp-deribit"]
-25
View File
@@ -1,25 +0,0 @@
ARG BASE_IMAGE=cerbero-base
ARG BASE_TAG=latest
FROM ${BASE_IMAGE}:${BASE_TAG} AS builder
COPY services/mcp-hyperliquid ./services/mcp-hyperliquid
RUN uv sync --frozen --no-dev --package mcp-hyperliquid
FROM python:3.11-slim AS runtime
LABEL org.opencontainers.image.source="https://github.com/AdrianoDev/cerbero" \
cerbero.service="mcp-hyperliquid"
WORKDIR /app
COPY --from=builder /app /app
ENV PATH="/app/.venv/bin:$PATH"
RUN useradd -m -u 1000 app
USER app
ENV HOST=0.0.0.0 PORT=9012
EXPOSE 9012
HEALTHCHECK --interval=30s --timeout=5s --retries=3 --start-period=15s \
CMD python -c "import os, urllib.request; urllib.request.urlopen(f'http://localhost:{os.environ.get(\"PORT\",\"9012\")}/health', timeout=3).close()"
CMD ["mcp-hyperliquid"]
-25
View File
@@ -1,25 +0,0 @@
ARG BASE_IMAGE=cerbero-base
ARG BASE_TAG=latest
FROM ${BASE_IMAGE}:${BASE_TAG} AS builder
COPY services/mcp-macro ./services/mcp-macro
RUN uv sync --frozen --no-dev --package mcp-macro
FROM python:3.11-slim AS runtime
LABEL org.opencontainers.image.source="https://github.com/AdrianoDev/cerbero" \
cerbero.service="mcp-macro"
WORKDIR /app
COPY --from=builder /app /app
ENV PATH="/app/.venv/bin:$PATH"
RUN useradd -m -u 1000 app
USER app
ENV HOST=0.0.0.0 PORT=9013
EXPOSE 9013
HEALTHCHECK --interval=30s --timeout=5s --retries=3 --start-period=15s \
CMD python -c "import os, urllib.request; urllib.request.urlopen(f'http://localhost:{os.environ.get(\"PORT\",\"9013\")}/health', timeout=3).close()"
CMD ["mcp-macro"]
-25
View File
@@ -1,25 +0,0 @@
ARG BASE_IMAGE=cerbero-base
ARG BASE_TAG=latest
FROM ${BASE_IMAGE}:${BASE_TAG} AS builder
COPY services/mcp-sentiment ./services/mcp-sentiment
RUN uv sync --frozen --no-dev --package mcp-sentiment
FROM python:3.11-slim AS runtime
LABEL org.opencontainers.image.source="https://github.com/AdrianoDev/cerbero" \
cerbero.service="mcp-sentiment"
WORKDIR /app
COPY --from=builder /app /app
ENV PATH="/app/.venv/bin:$PATH"
RUN useradd -m -u 1000 app
USER app
ENV HOST=0.0.0.0 PORT=9014
EXPOSE 9014
HEALTHCHECK --interval=30s --timeout=5s --retries=3 --start-period=15s \
CMD python -c "import os, urllib.request; urllib.request.urlopen(f'http://localhost:{os.environ.get(\"PORT\",\"9014\")}/health', timeout=3).close()"
CMD ["mcp-sentiment"]
-86
View File
@@ -1,86 +0,0 @@
{
admin off
email {$ACME_EMAIL:adrianodalpastro@tielogic.com}
auto_https {$AUTO_HTTPS:on}
# Plugin mholt/caddy-ratelimit
order rate_limit before basicauth
# Trusted proxies: rispetta X-Forwarded-For quando dietro reverse proxy
# (es. Traefik). Default = solo private ranges.
servers {
trusted_proxies static {$TRUSTED_PROXIES:private_ranges}
}
}
{$LISTEN:cerbero-mcp.tielogic.xyz} {
log {
output stdout
format json
}
# ───── Security headers ─────
header {
Strict-Transport-Security "max-age=31536000; includeSubDomains; preload"
X-Content-Type-Options "nosniff"
X-Frame-Options "DENY"
Referrer-Policy "no-referrer"
-Server
}
# ───── IP allowlist su endpoint write ─────
# WRITE_ALLOWLIST: CIDR space-separated (es. "1.2.3.4/32 5.6.7.0/24").
# Default 127.0.0.1/32 — fail-closed se non configurato.
@writes_blocked {
path_regexp ^/mcp-[a-z]+/tools/(place_|cancel_|set_|close_|transfer_|amend_|switch_)
not remote_ip {$WRITE_ALLOWLIST:127.0.0.1/32 ::1/128 172.16.0.0/12}
}
respond @writes_blocked "forbidden: source ip not in allowlist" 403
# ───── Rate limit ─────
# Reads: 60 req/min/IP, writes: 10 req/min/IP (sliding window).
rate_limit {
zone reads {
match {
not path_regexp ^/mcp-[a-z]+/tools/(place_|cancel_|set_|close_|transfer_|amend_|switch_)
}
key {remote_ip}
events 60
window 1m
}
zone writes {
match {
path_regexp ^/mcp-[a-z]+/tools/(place_|cancel_|set_|close_|transfer_|amend_|switch_)
}
key {remote_ip}
events 10
window 1m
}
}
# ───── Reverse proxy ─────
handle_path /mcp-deribit/* {
reverse_proxy mcp-deribit:9011
}
handle_path /mcp-bybit/* {
reverse_proxy mcp-bybit:9019
}
handle_path /mcp-hyperliquid/* {
reverse_proxy mcp-hyperliquid:9012
}
handle_path /mcp-alpaca/* {
reverse_proxy mcp-alpaca:9020
}
handle_path /mcp-macro/* {
reverse_proxy mcp-macro:9013
}
handle_path /mcp-sentiment/* {
reverse_proxy mcp-sentiment:9014
}
# Landing page statica
handle {
root * /srv
file_server
}
}
-6
View File
@@ -1,6 +0,0 @@
FROM caddy:2.8-builder-alpine AS builder
RUN xcaddy build \
--with github.com/mholt/caddy-ratelimit
FROM caddy:2.8-alpine
COPY --from=builder /usr/bin/caddy /usr/bin/caddy
-97
View File
@@ -1,97 +0,0 @@
<!DOCTYPE html>
<html lang="it">
<head>
<meta charset="UTF-8">
<title>Cerbero — MCP gateway</title>
<link rel="stylesheet" href="/style.css">
</head>
<body>
<header>
<h1>Cerbero</h1>
<p>Sistema trading autonomo crypto, architettura MCP-only.</p>
</header>
<main>
<table id="services">
<thead>
<tr>
<th>Stato</th>
<th>Servizio</th>
<th>Porta int.</th>
<th>Descrizione</th>
<th>Link</th>
</tr>
</thead>
<tbody>
<tr data-path="/mcp-memory">
<td><span class="status" aria-label="unknown"></span></td>
<td>mcp-memory</td>
<td>9015</td>
<td>Store L1/L2, system prompt base + dyn</td>
<td><a href="/mcp-memory/health">health</a> · <a href="/mcp-memory/docs">docs</a></td>
</tr>
<tr data-path="/mcp-scheduler">
<td><span class="status" aria-label="unknown"></span></td>
<td>mcp-scheduler</td>
<td>9016</td>
<td>Recurring task + core agent runner</td>
<td><a href="/mcp-scheduler/health">health</a> · <a href="/mcp-scheduler/docs">docs</a></td>
</tr>
<tr data-path="/mcp-deribit">
<td><span class="status" aria-label="unknown"></span></td>
<td>mcp-deribit</td>
<td>9011</td>
<td>Options testnet order/market</td>
<td><a href="/mcp-deribit/health">health</a> · <a href="/mcp-deribit/docs">docs</a></td>
</tr>
<tr data-path="/mcp-hyperliquid">
<td><span class="status" aria-label="unknown"></span></td>
<td>mcp-hyperliquid</td>
<td>9012</td>
<td>Perp DEX testnet</td>
<td><a href="/mcp-hyperliquid/health">health</a> · <a href="/mcp-hyperliquid/docs">docs</a></td>
</tr>
<tr data-path="/mcp-macro">
<td><span class="status" aria-label="unknown"></span></td>
<td>mcp-macro</td>
<td>9013</td>
<td>FRED indicators + Finnhub calendar</td>
<td><a href="/mcp-macro/health">health</a> · <a href="/mcp-macro/docs">docs</a></td>
</tr>
<tr data-path="/mcp-sentiment">
<td><span class="status" aria-label="unknown"></span></td>
<td>mcp-sentiment</td>
<td>9014</td>
<td>CryptoPanic news feed</td>
<td><a href="/mcp-sentiment/health">health</a> · <a href="/mcp-sentiment/docs">docs</a></td>
</tr>
<tr data-path="/mcp-telegram">
<td><span class="status" aria-label="unknown"></span></td>
<td>mcp-telegram</td>
<td>9017</td>
<td>Bot commands + notifiche operatore</td>
<td><a href="/mcp-telegram/health">health</a> · <a href="/mcp-telegram/docs">docs</a></td>
</tr>
<tr data-path="/mcp-portfolio">
<td><span class="status" aria-label="unknown"></span></td>
<td>mcp-portfolio</td>
<td>9018</td>
<td>Holdings + yfinance + UI htmx</td>
<td><a href="/mcp-portfolio/health">health</a> · <a href="/gui">gui</a> · <a href="/mcp-portfolio/docs">docs</a></td>
</tr>
</tbody>
</table>
<section style="margin-top: 2rem;">
<h2 style="color: var(--accent); margin-bottom: 0.5rem;">Console operativa</h2>
<p><a href="/console" style="font-size: 1.1rem;">/console</a> — run del core agent, eventi stdout/stderr, L1 live, trigger manuale.</p>
</section>
</main>
<footer>
<p>Status aggiornato ogni 5 s. Gateway Caddy su porta configurata via <code>GATEWAY_PORT</code>.</p>
</footer>
<script src="/status.js"></script>
</body>
</html>
-23
View File
@@ -1,23 +0,0 @@
const rows = document.querySelectorAll("tr[data-path]");
async function poll() {
for (const row of rows) {
const dot = row.querySelector(".status");
try {
const r = await fetch(`${row.dataset.path}/health`, {
method: "GET",
cache: "no-store",
});
dot.classList.toggle("ok", r.ok);
dot.classList.toggle("err", !r.ok);
dot.setAttribute("aria-label", r.ok ? "ok" : "error");
} catch {
dot.classList.remove("ok");
dot.classList.add("err");
dot.setAttribute("aria-label", "unreachable");
}
}
}
poll();
setInterval(poll, 5000);
-101
View File
@@ -1,101 +0,0 @@
:root {
--bg: #0f172a;
--fg: #e2e8f0;
--muted: #94a3b8;
--card: #1e293b;
--border: #334155;
--ok: #22c55e;
--err: #ef4444;
--unknown: #64748b;
--accent: #38bdf8;
}
* { box-sizing: border-box; }
body {
margin: 0;
font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, sans-serif;
background: var(--bg);
color: var(--fg);
line-height: 1.5;
}
header, main, footer {
max-width: 960px;
margin: 0 auto;
padding: 1.5rem;
}
header h1 {
margin: 0 0 0.25rem;
color: var(--accent);
font-size: 2rem;
}
header p {
margin: 0;
color: var(--muted);
}
table {
width: 100%;
border-collapse: collapse;
background: var(--card);
border-radius: 8px;
overflow: hidden;
}
th, td {
padding: 0.75rem 1rem;
text-align: left;
border-bottom: 1px solid var(--border);
}
th {
background: #0f172a;
color: var(--muted);
font-weight: 600;
font-size: 0.85rem;
text-transform: uppercase;
letter-spacing: 0.05em;
}
tr:last-child td { border-bottom: none; }
td:nth-child(3) {
font-family: ui-monospace, "SF Mono", Menlo, monospace;
color: var(--muted);
}
a {
color: var(--accent);
text-decoration: none;
margin-right: 0.5rem;
}
a:hover { text-decoration: underline; }
.status {
display: inline-block;
width: 12px;
height: 12px;
border-radius: 50%;
background: var(--unknown);
transition: background 0.3s ease;
}
.status.ok { background: var(--ok); box-shadow: 0 0 8px var(--ok); }
.status.err { background: var(--err); box-shadow: 0 0 8px var(--err); }
footer {
color: var(--muted);
font-size: 0.85rem;
margin-top: 2rem;
}
code {
background: var(--border);
padding: 0.1rem 0.3rem;
border-radius: 3px;
font-size: 0.9em;
}
-23
View File
@@ -1,23 +0,0 @@
[project]
name = "mcp-common"
version = "0.1.0"
requires-python = ">=3.11"
dependencies = [
"fastapi>=0.115",
"uvicorn[standard]>=0.30",
"mcp>=1.0",
"httpx>=0.27",
"pydantic>=2.6",
"pydantic-settings>=2.3",
"python-json-logger>=2.0",
]
[project.optional-dependencies]
dev = ["pytest>=8", "pytest-asyncio>=0.23", "pytest-httpx>=0.30", "ruff>=0.5"]
[build-system]
requires = ["hatchling"]
build-backend = "hatchling.build"
[tool.hatch.build.targets.wheel]
packages = ["src/mcp_common"]
@@ -1 +0,0 @@
__all__ = []
@@ -1,95 +0,0 @@
"""App factory comune per i servizi mcp-{exchange}.
Centralizza il boilerplate dei `__main__.py`:
- configure_root_logging (JSON)
- fail_fast_if_missing su env mandatory
- summarize env
- load creds JSON
- resolve_environment con default URLs
- load token store
- delega creazione client + app a callback per-servizio
- uvicorn.run
Ogni servizio invoca `run_exchange_main(spec)` con uno spec dichiarativo.
"""
from __future__ import annotations
import json
import os
from collections.abc import Callable
from dataclasses import dataclass
from typing import Any
import uvicorn
from mcp_common.auth import load_token_store_from_files
from mcp_common.env_validation import fail_fast_if_missing, require_env, summarize
from mcp_common.environment import (
EnvironmentInfo,
consistency_check,
resolve_environment,
)
from mcp_common.logging import configure_root_logging
@dataclass(frozen=True)
class ExchangeAppSpec:
exchange: str
creds_env_var: str
env_var: str # es. "BYBIT_TESTNET", "ALPACA_PAPER"
flag_key: str # campo nel secret JSON ("testnet" o "paper")
default_base_url_live: str
default_base_url_testnet: str
default_port: int
build_client: Callable[[dict, EnvironmentInfo], Any]
build_app: Callable[..., Any]
extra_summarize_envs: tuple[str, ...] = ()
def run_exchange_main(spec: ExchangeAppSpec) -> None:
configure_root_logging()
fail_fast_if_missing([spec.creds_env_var])
summarize([
spec.creds_env_var,
"CORE_TOKEN_FILE",
"OBSERVER_TOKEN_FILE",
"PORT",
"HOST",
spec.env_var,
*spec.extra_summarize_envs,
])
creds_file = require_env(spec.creds_env_var, f"{spec.exchange} credentials JSON path")
with open(creds_file) as f:
creds = json.load(f)
env_info = resolve_environment(
creds,
env_var=spec.env_var,
flag_key=spec.flag_key,
exchange=spec.exchange,
default_base_url_live=spec.default_base_url_live,
default_base_url_testnet=spec.default_base_url_testnet,
)
# Safety: previene switch accidentali a mainnet senza conferma esplicita
# nel secret. Solleva EnvironmentMismatchError → boot abort se mismatch.
strict_mainnet = os.environ.get("STRICT_MAINNET", "true").lower() not in ("0", "false", "no")
consistency_check(env_info, creds, strict_mainnet=strict_mainnet)
client = spec.build_client(creds, env_info)
token_store = load_token_store_from_files(
core_token_file=os.environ.get("CORE_TOKEN_FILE"),
observer_token_file=os.environ.get("OBSERVER_TOKEN_FILE"),
)
app = spec.build_app(client=client, token_store=token_store, creds=creds, env_info=env_info)
uvicorn.run(
app,
log_config=None,
host=os.environ.get("HOST", "0.0.0.0"),
port=int(os.environ.get("PORT", str(spec.default_port))),
)
-121
View File
@@ -1,121 +0,0 @@
"""Audit log strutturato per write endpoint MCP (place_order, cancel,
set_*, close_*, transfer_*). Usa un logger dedicato `mcp.audit` su stream
JSON.
Sink:
- stdout/stderr (sempre): tramite root JSON logger configurato da
`mcp_common.logging.configure_root_logging`.
- File JSONL persistente (opzionale): se env var `AUDIT_LOG_FILE` è
settata, aggiunge un `TimedRotatingFileHandler` che ruota a mezzanotte
con `AUDIT_LOG_BACKUP_DAYS` di retention (default 30). Una riga JSON
per record (formato `.jsonl`).
Per VPS produzione: setta `AUDIT_LOG_FILE=/var/log/cerbero-mcp/<service>.audit.jsonl`
con bind mount del volume `/var/log/cerbero-mcp` nel docker-compose.
Payload sensibile (api_key, secret) già filtrato dal SecretsFilter
globale; qui non si include creds.
"""
from __future__ import annotations
import logging
import os
from logging.handlers import TimedRotatingFileHandler
from typing import Any
from mcp_common.auth import Principal
from mcp_common.logging import SecretsFilter, get_json_logger
try:
from pythonjsonlogger.json import JsonFormatter as _JsonFormatter # noqa: N813
except ImportError:
from pythonjsonlogger.jsonlogger import JsonFormatter as _JsonFormatter # noqa: N813
_logger = get_json_logger("mcp.audit", level=logging.INFO)
_file_handler_attached = False
def _configure_audit_sink() -> None:
"""Aggiunge FileHandler al logger mcp.audit se AUDIT_LOG_FILE è settato.
Idempotente: chiamato la prima volta da audit_write_op, poi no-op.
"""
global _file_handler_attached
if _file_handler_attached:
return
file_path = os.environ.get("AUDIT_LOG_FILE", "").strip()
if not file_path:
_file_handler_attached = True
return
backup_days = int(os.environ.get("AUDIT_LOG_BACKUP_DAYS", "30"))
os.makedirs(os.path.dirname(file_path) or ".", exist_ok=True)
handler = TimedRotatingFileHandler(
file_path,
when="midnight",
interval=1,
backupCount=backup_days,
encoding="utf-8",
utc=True,
)
handler.setFormatter(_JsonFormatter("%(asctime)s %(name)s %(levelname)s %(message)s"))
handler.addFilter(SecretsFilter())
_logger.addHandler(handler)
_file_handler_attached = True
def audit_write_op(
*,
principal: Principal | None,
action: str,
exchange: str,
target: str | None = None,
payload: dict[str, Any] | None = None,
result: dict[str, Any] | None = None,
error: str | None = None,
) -> None:
"""Emit a structured audit log record per write operation.
principal: chi ha invocato (None se anonimo, ma normalmente _check
impedisce di arrivare qui senza principal).
action: nome del tool (es. "place_order", "cancel_order").
exchange: identificatore servizio (deribit, bybit, alpaca, hyperliquid).
target: instrument/symbol/order_id su cui si agisce.
payload: input non-sensibile (qty, side, leverage, ecc.).
result: output del client (order_id, status, ecc.).
error: stringa errore se l'operazione ha fallito.
"""
_configure_audit_sink()
record: dict[str, Any] = {
"audit_event": "write_op",
"action": action,
"exchange": exchange,
"principal": principal.name if principal else None,
"target": target,
"payload": payload or {},
}
if result is not None:
record["result"] = _summarize_result(result)
if error is not None:
record["error"] = error
_logger.error("audit", extra=record)
else:
_logger.info("audit", extra=record)
def _summarize_result(result: dict[str, Any]) -> dict[str, Any]:
"""Estrae i campi rilevanti dal result (order_id, state, error code)
per evitare di loggare payload enormi.
"""
keys = (
"order_id", "order_link_id", "combo_instrument", "state", "status",
"code", "error", "stop_price", "tp_price", "transfer_id",
)
out: dict[str, Any] = {}
for k in keys:
if k in result:
out[k] = result[k]
if "orders" in result:
out["orders_count"] = len(result["orders"])
return out
-98
View File
@@ -1,98 +0,0 @@
from __future__ import annotations
from collections.abc import Callable
from dataclasses import dataclass, field
from functools import wraps
from fastapi import HTTPException, Request, status
@dataclass
class Principal:
name: str
capabilities: set[str] = field(default_factory=set)
@dataclass
class TokenStore:
tokens: dict[str, Principal]
def get(self, token: str) -> Principal | None:
return self.tokens.get(token)
def require_principal(request: Request) -> Principal:
auth = request.headers.get("Authorization", "")
if not auth.startswith("Bearer "):
raise HTTPException(status.HTTP_401_UNAUTHORIZED, "missing bearer token")
token = auth[len("Bearer "):].strip()
store: TokenStore = request.app.state.token_store
principal = store.get(token)
if principal is None:
raise HTTPException(status.HTTP_403_FORBIDDEN, "invalid token")
return principal
def acl_requires(*, core: bool = False, observer: bool = False) -> Callable:
"""Decorator: require at least one matching capability."""
allowed: set[str] = set()
if core:
allowed.add("core")
if observer:
allowed.add("observer")
def decorator(func: Callable) -> Callable:
@wraps(func)
async def async_wrapper(*args, **kwargs):
principal = kwargs.get("principal")
if principal is None:
for a in args:
if isinstance(a, Principal):
principal = a
break
if principal is None or not (principal.capabilities & allowed):
raise HTTPException(
status.HTTP_403_FORBIDDEN,
f"capability required: {allowed}",
)
return await func(*args, **kwargs) if _is_coro(func) else func(*args, **kwargs)
@wraps(func)
def sync_wrapper(*args, **kwargs):
principal = kwargs.get("principal")
if principal is None:
for a in args:
if isinstance(a, Principal):
principal = a
break
if principal is None or not (principal.capabilities & allowed):
raise HTTPException(
status.HTTP_403_FORBIDDEN,
f"capability required: {allowed}",
)
return func(*args, **kwargs)
return async_wrapper if _is_coro(func) else sync_wrapper
return decorator
def _is_coro(func: Callable) -> bool:
import asyncio
return asyncio.iscoroutinefunction(func)
def load_token_store_from_files(
core_token_file: str | None,
observer_token_file: str | None,
) -> TokenStore:
tokens: dict[str, Principal] = {}
if core_token_file:
with open(core_token_file) as f:
tokens[f.read().strip()] = Principal(name="core", capabilities={"core"})
if observer_token_file:
with open(observer_token_file) as f:
tokens[f.read().strip()] = Principal(
name="observer", capabilities={"observer"}
)
return TokenStore(tokens=tokens)
@@ -1,69 +0,0 @@
"""Env validation policy: fail-fast per mandatory, soft per optional.
Usage al boot di ogni mcp `__main__.py`:
from mcp_common.env_validation import require_env, optional_env, summarize
creds_file = require_env("CREDENTIALS_FILE", "deribit credentials JSON path")
host = optional_env("HOST", default="0.0.0.0")
summarize(["CREDENTIALS_FILE", "HOST", "PORT"])
"""
from __future__ import annotations
import logging
import os
import sys
logger = logging.getLogger(__name__)
class MissingEnvError(RuntimeError):
"""Mandatory env var absent or empty."""
def require_env(name: str, description: str = "") -> str:
val = (os.environ.get(name) or "").strip()
if not val:
msg = f"missing mandatory env var: {name}"
if description:
msg += f" ({description})"
logger.error(msg)
raise MissingEnvError(msg)
return val
def optional_env(name: str, *, default: str = "") -> str:
val = (os.environ.get(name) or "").strip()
if not val:
if default:
logger.info("env %s not set, using default=%r", name, default)
return default
return val
def summarize(names: list[str]) -> None:
sensitive_tokens = ("SECRET", "KEY", "TOKEN", "PASSWORD", "CREDENTIAL", "WALLET")
for n in names:
val = os.environ.get(n)
if val is None:
logger.info("env[%s]: <unset>", n)
continue
if any(t in n.upper() for t in sensitive_tokens):
logger.info("env[%s]: <set, %d chars>", n, len(val))
else:
logger.info("env[%s]: %s", n, val)
def fail_fast_if_missing(names: list[str]) -> None:
missing: list[str] = []
for n in names:
if not (os.environ.get(n) or "").strip():
missing.append(n)
if missing:
logger.error("boot aborted: missing mandatory env vars: %s", missing)
print(
f"FATAL: missing mandatory env vars: {missing}",
file=sys.stderr,
)
sys.exit(2)
@@ -1,138 +0,0 @@
"""Resolver di ambiente (testnet/mainnet) per MCP exchange.
Precedenza: env var > campo secret > default True (testnet).
Safety: `consistency_check` previene switch accidentali a mainnet senza
conferma esplicita nel secret JSON.
"""
from __future__ import annotations
import logging
import os
from dataclasses import dataclass
from typing import Literal
logger = logging.getLogger(__name__)
Environment = Literal["testnet", "mainnet"]
Source = Literal["env", "credentials", "default"]
TRUTHY = {"1", "true", "yes", "on"}
# Tokens nel base_url che indicano endpoint testnet (case-insensitive).
TESTNET_URL_HINTS = ("test", "testnet", "paper")
class EnvironmentMismatchError(RuntimeError):
"""Boot abort: ambiente risolto non matcha conferma esplicita nel secret."""
@dataclass(frozen=True)
class EnvironmentInfo:
exchange: str
environment: Environment
source: Source
env_value: str | None
base_url: str
def resolve_environment(
creds: dict,
*,
env_var: str,
flag_key: str,
exchange: str,
default_base_url_live: str | None = None,
default_base_url_testnet: str | None = None,
) -> EnvironmentInfo:
"""Risolvi l'ambiente per un MCP exchange.
creds: dict letto dal secret JSON. Può contenere base_url_live/base_url_testnet
per override; in assenza vengono usati i default kwargs.
env_var: nome della env var di override (es. DERIBIT_TESTNET).
flag_key: chiave booleana nel secret JSON (es. "testnet" o "paper" per alpaca).
exchange: nome exchange per logging/info.
default_base_url_live / default_base_url_testnet: URL canonici dell'exchange,
applicati se non presenti in creds.
"""
env_value = os.environ.get(env_var)
if env_value is not None:
is_test = env_value.strip().lower() in TRUTHY
environment: Environment = "testnet" if is_test else "mainnet"
source: Source = "env"
elif flag_key in creds:
environment = "testnet" if bool(creds[flag_key]) else "mainnet"
source = "credentials"
else:
environment = "testnet"
source = "default"
if default_base_url_live is not None:
creds.setdefault("base_url_live", default_base_url_live)
if default_base_url_testnet is not None:
creds.setdefault("base_url_testnet", default_base_url_testnet)
base_url = creds["base_url_testnet"] if environment == "testnet" else creds["base_url_live"]
return EnvironmentInfo(
exchange=exchange,
environment=environment,
source=source,
env_value=env_value,
base_url=base_url,
)
def consistency_check(
env_info: EnvironmentInfo,
creds: dict,
*,
strict_mainnet: bool = True,
) -> list[str]:
"""Verifica coerenza environment risolto vs secret JSON. Restituisce
lista di warning string. Solleva EnvironmentMismatchError per mismatch
bloccanti.
Regole:
- Se `creds["environment"]` è presente e DIVERSO da `env_info.environment`:
→ raise EnvironmentMismatchError (declared vs resolved mismatch).
- Se `env_info.environment == "mainnet"` e `creds.get("environment") !=
"mainnet"`: con `strict_mainnet=True` → raise (richiede conferma
esplicita). Con `strict_mainnet=False` → warning.
- Se `env_info.base_url` contiene token testnet ("test", "testnet",
"paper") ma `env_info.environment == "mainnet"` (o viceversa): warning
(URL/environment incoerenti).
"""
warnings: list[str] = []
declared = creds.get("environment")
if declared and declared != env_info.environment:
raise EnvironmentMismatchError(
f"{env_info.exchange}: secret declared environment={declared!r} "
f"but resolver resolved environment={env_info.environment!r}"
)
if env_info.environment == "mainnet" and declared != "mainnet":
msg = (
f"{env_info.exchange}: resolved mainnet without explicit confirmation "
"in secret. Add `\"environment\": \"mainnet\"` to the credentials JSON."
)
if strict_mainnet:
raise EnvironmentMismatchError(msg)
warnings.append(msg)
url_lower = (env_info.base_url or "").lower()
has_test_hint = any(token in url_lower for token in TESTNET_URL_HINTS)
if env_info.environment == "mainnet" and has_test_hint:
warnings.append(
f"{env_info.exchange}: environment=mainnet but base_url contains "
f"testnet hint ({env_info.base_url!r})"
)
if env_info.environment == "testnet" and not has_test_hint and url_lower:
warnings.append(
f"{env_info.exchange}: environment=testnet but base_url does not "
f"appear to be a testnet endpoint ({env_info.base_url!r})"
)
for w in warnings:
logger.warning("environment consistency: %s", w)
return warnings
-85
View File
@@ -1,85 +0,0 @@
"""HTTP client factory con retry/backoff su errori transient.
Wrap leggero attorno a httpx.AsyncClient: aggiunge AsyncHTTPTransport
con retries=N per gestire connection errors / DNS / refused. Per retry
su 5xx HTTP response usa `request_with_retry()` (decoratore separato).
Usage standard:
async with async_client(timeout=15) as http:
resp = await http.get(url)
Equivalente a httpx.AsyncClient(timeout=15) ma con retry transport su
errori di livello connessione.
"""
from __future__ import annotations
import asyncio
import logging
from collections.abc import Awaitable, Callable
from typing import Any, TypeVar
import httpx
logger = logging.getLogger(__name__)
T = TypeVar("T")
DEFAULT_RETRIES = 3
DEFAULT_TIMEOUT = 15.0
def async_client(
*,
timeout: float = DEFAULT_TIMEOUT,
retries: int = DEFAULT_RETRIES,
follow_redirects: bool = False,
**kwargs: Any,
) -> httpx.AsyncClient:
"""httpx.AsyncClient con AsyncHTTPTransport(retries=N) di default.
retries gestisce connection errors / refused / DNS — non 5xx HTTP.
"""
transport = httpx.AsyncHTTPTransport(retries=retries)
return httpx.AsyncClient(
timeout=timeout,
transport=transport,
follow_redirects=follow_redirects,
**kwargs,
)
async def call_with_retry(
fn: Callable[[], Awaitable[T]],
*,
max_attempts: int = 3,
base_delay: float = 0.5,
max_delay: float = 8.0,
retry_on: tuple[type[BaseException], ...] = (httpx.TransportError, httpx.TimeoutException),
) -> T:
"""Retry generico async con exponential backoff.
Ritenta `fn()` se solleva una delle exception in `retry_on`. Backoff
raddoppia (0.5, 1, 2, 4, ...) clipped a max_delay. Solleva l'ultima
exception se max_attempts raggiunto.
Usabile su SDK sincroni avvolti in asyncio.to_thread (pybit, alpaca):
result = await call_with_retry(lambda: client._run(self._http.get_tickers, ...))
"""
delay = base_delay
last_exc: BaseException | None = None
for attempt in range(1, max_attempts + 1):
try:
return await fn()
except retry_on as e:
last_exc = e
if attempt == max_attempts:
break
logger.warning(
"transient error, retrying (%d/%d) in %.1fs: %s",
attempt, max_attempts, delay, type(e).__name__,
)
await asyncio.sleep(delay)
delay = min(delay * 2, max_delay)
assert last_exc is not None
raise last_exc
@@ -1,416 +0,0 @@
from __future__ import annotations
import math
def sma(values: list[float], period: int) -> float | None:
if len(values) < period:
return None
return sum(values[-period:]) / period
def rsi(closes: list[float], period: int = 14) -> float | None:
if len(closes) < period + 1:
return None
gains: list[float] = []
losses: list[float] = []
for i in range(1, len(closes)):
delta = closes[i] - closes[i - 1]
gains.append(max(delta, 0.0))
losses.append(-min(delta, 0.0))
avg_gain = sum(gains[:period]) / period
avg_loss = sum(losses[:period]) / period
for i in range(period, len(gains)):
avg_gain = (avg_gain * (period - 1) + gains[i]) / period
avg_loss = (avg_loss * (period - 1) + losses[i]) / period
if avg_loss == 0:
return 100.0
rs = avg_gain / avg_loss
return 100.0 - (100.0 / (1.0 + rs))
def _ema_series(values: list[float], period: int) -> list[float]:
if len(values) < period:
return []
k = 2.0 / (period + 1)
seed = sum(values[:period]) / period
out = [seed]
for v in values[period:]:
out.append(out[-1] + k * (v - out[-1]))
return out
def macd(
closes: list[float],
fast: int = 12,
slow: int = 26,
signal: int = 9,
) -> dict[str, float | None]:
nothing: dict[str, float | None] = {"macd": None, "signal": None, "hist": None}
if len(closes) < slow + signal:
return nothing
ema_fast = _ema_series(closes, fast)
ema_slow = _ema_series(closes, slow)
offset = slow - fast
aligned_fast = ema_fast[offset:]
macd_line = [f - s for f, s in zip(aligned_fast, ema_slow, strict=False)]
if len(macd_line) < signal:
return nothing
signal_line = _ema_series(macd_line, signal)
if not signal_line:
return nothing
last_macd = macd_line[-1]
last_sig = signal_line[-1]
return {
"macd": last_macd,
"signal": last_sig,
"hist": last_macd - last_sig,
}
def atr(
highs: list[float],
lows: list[float],
closes: list[float],
period: int = 14,
) -> float | None:
if len(closes) < period + 1:
return None
trs: list[float] = []
for i in range(1, len(closes)):
tr = max(
highs[i] - lows[i],
abs(highs[i] - closes[i - 1]),
abs(lows[i] - closes[i - 1]),
)
trs.append(tr)
if len(trs) < period:
return None
avg = sum(trs[:period]) / period
for i in range(period, len(trs)):
avg = (avg * (period - 1) + trs[i]) / period
return avg
def adx(
highs: list[float],
lows: list[float],
closes: list[float],
period: int = 14,
) -> dict[str, float | None]:
nothing: dict[str, float | None] = {"adx": None, "+di": None, "-di": None}
if len(closes) < 2 * period + 1:
return nothing
trs: list[float] = []
plus_dms: list[float] = []
minus_dms: list[float] = []
for i in range(1, len(closes)):
tr = max(
highs[i] - lows[i],
abs(highs[i] - closes[i - 1]),
abs(lows[i] - closes[i - 1]),
)
up = highs[i] - highs[i - 1]
dn = lows[i - 1] - lows[i]
plus_dm = up if (up > dn and up > 0) else 0.0
minus_dm = dn if (dn > up and dn > 0) else 0.0
trs.append(tr)
plus_dms.append(plus_dm)
minus_dms.append(minus_dm)
atr_s = sum(trs[:period])
pdm_s = sum(plus_dms[:period])
mdm_s = sum(minus_dms[:period])
dxs: list[float] = []
pdi = mdi = 0.0
for i in range(period, len(trs)):
atr_s = atr_s - atr_s / period + trs[i]
pdm_s = pdm_s - pdm_s / period + plus_dms[i]
mdm_s = mdm_s - mdm_s / period + minus_dms[i]
pdi = 100.0 * pdm_s / atr_s if atr_s else 0.0
mdi = 100.0 * mdm_s / atr_s if atr_s else 0.0
s = pdi + mdi
dx = 100.0 * abs(pdi - mdi) / s if s else 0.0
dxs.append(dx)
if len(dxs) < period:
return nothing
adx_val = sum(dxs[:period]) / period
for i in range(period, len(dxs)):
adx_val = (adx_val * (period - 1) + dxs[i]) / period
return {"adx": adx_val, "+di": pdi, "-di": mdi}
# ───── Returns helper ─────
def _log_returns(closes: list[float]) -> list[float]:
out: list[float] = []
for i in range(1, len(closes)):
prev = closes[i - 1]
curr = closes[i]
if prev > 0 and curr > 0:
out.append(math.log(curr / prev))
return out
def _percentile(sorted_values: list[float], q: float) -> float:
if not sorted_values:
return 0.0
if len(sorted_values) == 1:
return sorted_values[0]
pos = q * (len(sorted_values) - 1)
lo = int(pos)
hi = min(lo + 1, len(sorted_values) - 1)
frac = pos - lo
return sorted_values[lo] + frac * (sorted_values[hi] - sorted_values[lo])
def _stddev(xs: list[float]) -> float:
if len(xs) < 2:
return 0.0
m = sum(xs) / len(xs)
var = sum((x - m) ** 2 for x in xs) / (len(xs) - 1)
return math.sqrt(var)
# ───── vol_cone ─────
def vol_cone(
closes: list[float],
windows: list[int] | None = None,
annualization: int = 252,
) -> dict[int, dict[str, float | None]]:
"""Realized vol cone: per ogni window restituisce vol corrente e percentili
storici (p10/p50/p90) di tutte le rolling windows del campione.
Annualizzata (default 252 trading days).
"""
windows = windows or [10, 20, 30, 60]
rets = _log_returns(closes)
out: dict[int, dict[str, float | None]] = {}
factor = math.sqrt(annualization)
for w in windows:
if len(rets) < w:
out[w] = {"current": None, "p10": None, "p50": None, "p90": None}
continue
rolling: list[float] = []
for i in range(w, len(rets) + 1):
window_rets = rets[i - w:i]
rolling.append(_stddev(window_rets) * factor)
rolling_sorted = sorted(rolling)
out[w] = {
"current": rolling[-1],
"p10": _percentile(rolling_sorted, 0.10),
"p50": _percentile(rolling_sorted, 0.50),
"p90": _percentile(rolling_sorted, 0.90),
}
return out
# ───── hurst_exponent ─────
def hurst_exponent(closes: list[float], min_lag: int = 2, max_lag: int = 100) -> float | None:
"""Hurst via R/S analysis su log-prices. H≈0.5 random walk, >0.5 trending,
<0.5 mean-reverting.
"""
if len(closes) < max(20, max_lag):
return None
log_p = [math.log(c) for c in closes if c > 0]
if len(log_p) < max(20, max_lag):
return None
upper = min(max_lag, len(log_p) // 2)
if upper < min_lag + 1:
return None
lags = list(range(min_lag, upper))
log_lags: list[float] = []
log_rs: list[float] = []
for lag in lags:
# Build N/lag non-overlapping segments; for each compute R/S
rs_vals: list[float] = []
n_segs = len(log_p) // lag
if n_segs < 1:
continue
for seg in range(n_segs):
chunk = log_p[seg * lag:(seg + 1) * lag]
diffs = [chunk[i] - chunk[i - 1] for i in range(1, len(chunk))]
if len(diffs) < 2:
continue
mean = sum(diffs) / len(diffs)
dev = [d - mean for d in diffs]
cum = []
acc = 0.0
for d in dev:
acc += d
cum.append(acc)
r = max(cum) - min(cum)
s = _stddev(diffs)
if s > 0:
rs_vals.append(r / s)
if rs_vals:
avg_rs = sum(rs_vals) / len(rs_vals)
if avg_rs > 0:
log_lags.append(math.log(lag))
log_rs.append(math.log(avg_rs))
if len(log_lags) < 4:
return None
# Linear regression slope = Hurst
n = len(log_lags)
mx = sum(log_lags) / n
my = sum(log_rs) / n
num = sum((log_lags[i] - mx) * (log_rs[i] - my) for i in range(n))
den = sum((log_lags[i] - mx) ** 2 for i in range(n))
if den == 0:
return None
return num / den
# ───── half_life_mean_reversion ─────
def half_life_mean_reversion(closes: list[float]) -> float | None:
"""Half-life via OU AR(1) fit: y_t - y_{t-1} = a + b*y_{t-1} + eps.
Half-life = -ln(2)/ln(1+b). Se b>=0 → no mean reversion → None.
"""
if len(closes) < 30:
return None
y_lag = closes[:-1]
delta = [closes[i] - closes[i - 1] for i in range(1, len(closes))]
n = len(y_lag)
mx = sum(y_lag) / n
my = sum(delta) / n
num = sum((y_lag[i] - mx) * (delta[i] - my) for i in range(n))
den = sum((y_lag[i] - mx) ** 2 for i in range(n))
if den == 0:
return None
b = num / den
if b >= 0:
return None
one_plus_b = 1.0 + b
if one_plus_b <= 0:
return None
return -math.log(2.0) / math.log(one_plus_b)
# ───── garch11_forecast ─────
def garch11_forecast(
closes: list[float],
max_iter: int = 50,
) -> dict[str, float] | None:
"""Forecast GARCH(1,1) one-step-ahead sigma via metodo dei momenti
semplificato (no MLE). Pure-Python: stima omega, alpha, beta tramite
iterazione di punto fisso minimizzando MSE sul squared-return tracking.
Sufficiente per ranking volatility regimes; non production-grade.
"""
rets = _log_returns(closes)
if len(rets) < 50:
return None
mean = sum(rets) / len(rets)
centered = [r - mean for r in rets]
sq = [r * r for r in centered]
# Sample variance as long-run mean
var_lr = sum(sq) / len(sq)
if var_lr <= 0:
return None
# Simple grid for (alpha, beta) minimizing MSE of sigma2 vs realized sq
best = (1e18, 0.05, 0.90)
for a in [0.02, 0.05, 0.08, 0.10, 0.15]:
for b in [0.80, 0.85, 0.88, 0.90, 0.93]:
if a + b >= 0.999:
continue
omega = var_lr * (1 - a - b)
if omega <= 0:
continue
sigma2 = var_lr
mse = 0.0
for s in sq[:-1]:
sigma2 = omega + a * s + b * sigma2
mse += (sigma2 - s) ** 2
if mse < best[0]:
best = (mse, a, b)
_, alpha, beta = best
omega = var_lr * (1 - alpha - beta)
sigma2 = var_lr
for s in sq:
sigma2 = omega + alpha * s + beta * sigma2
sigma2_next = omega + alpha * sq[-1] + beta * sigma2
return {
"sigma_next": math.sqrt(max(sigma2_next, 0.0)),
"alpha": alpha,
"beta": beta,
"omega": omega,
"long_run_sigma": math.sqrt(var_lr),
}
# ───── autocorrelation ─────
def autocorrelation(values: list[float], max_lag: int = 10) -> dict[int, float]:
"""Autocorrelation function (ACF) lag 1..max_lag. White noise → ≈ 0.
AR(1) phi → lag1 ≈ phi, lag-k ≈ phi^k.
"""
if len(values) < max_lag + 2:
return {}
n = len(values)
mean = sum(values) / n
dev = [v - mean for v in values]
var = sum(d * d for d in dev) / n
if var == 0:
return {lag: 0.0 for lag in range(1, max_lag + 1)}
out: dict[int, float] = {}
for lag in range(1, max_lag + 1):
cov = sum(dev[i] * dev[i + lag] for i in range(n - lag)) / n
out[lag] = cov / var
return out
# ───── rolling_sharpe ─────
def rolling_sharpe(
closes: list[float],
window: int = 60,
annualization: int = 252,
risk_free: float = 0.0,
) -> dict[str, float] | None:
"""Sharpe e Sortino rolling sull'ultimo `window` di log-returns.
Annualizzati. risk_free in tasso annualizzato.
"""
rets = _log_returns(closes)
if len(rets) < window:
return None
sample = rets[-window:]
daily_rf = risk_free / annualization
excess = [r - daily_rf for r in sample]
mean = sum(excess) / len(excess)
sd = _stddev(excess)
sharpe = (mean / sd) * math.sqrt(annualization) if sd > 0 else 0.0
downside = [e for e in excess if e < 0]
if downside:
ds_var = sum(d * d for d in downside) / len(excess)
ds_sd = math.sqrt(ds_var)
sortino = (mean / ds_sd) * math.sqrt(annualization) if ds_sd > 0 else 0.0
else:
sortino = sharpe * 2 # nessun downside → sortino "molto buono"
return {"sharpe": sharpe, "sortino": sortino, "mean_excess": mean, "stddev": sd}
# ───── var_cvar ─────
def var_cvar(returns: list[float], confidences: list[float] | None = None) -> dict[str, float]:
"""Historical VaR e CVaR (Expected Shortfall) ai livelli di confidenza.
returns: serie di rendimenti (qualsiasi periodicità). VaR/CVaR restituiti
come perdite positive (es. var_95=0.03 → -3% al 95%).
"""
confidences = confidences or [0.95, 0.99]
if len(returns) < 30:
return {}
sorted_rets = sorted(returns)
out: dict[str, float] = {}
for c in confidences:
tag = int(round(c * 100))
q = 1.0 - c
var = -_percentile(sorted_rets, q)
cutoff = -var
tail = [r for r in sorted_rets if r <= cutoff]
cvar = -(sum(tail) / len(tail)) if tail else var
out[f"var_{tag}"] = var
out[f"cvar_{tag}"] = cvar
return out
-81
View File
@@ -1,81 +0,0 @@
from __future__ import annotations
import logging
import os
import re
import sys
# pythonjsonlogger rinominato in .json; keep fallback per compat
try:
from pythonjsonlogger.json import JsonFormatter as _JsonFormatter # noqa: N813
except ImportError:
from pythonjsonlogger.jsonlogger import JsonFormatter as _JsonFormatter # noqa: N813
SECRET_PATTERNS = [
(re.compile(r"Bearer\s+[\w\-\._]+", re.IGNORECASE), "Bearer ***"),
(re.compile(r'("api_key"\s*:\s*")[^"]+(")'), r'\1***\2'),
(re.compile(r'("password"\s*:\s*")[^"]+(")'), r'\1***\2'),
(re.compile(r'("private_key"\s*:\s*")[^"]+(")'), r'\1***\2'),
(re.compile(r'("client_secret"\s*:\s*")[^"]+(")'), r'\1***\2'),
(re.compile(r"sk-[\w]{20,}"), "sk-***"),
]
class SecretsFilter(logging.Filter):
def filter(self, record: logging.LogRecord) -> bool:
msg = record.getMessage()
for pattern, replacement in SECRET_PATTERNS:
msg = pattern.sub(replacement, msg)
record.msg = msg
record.args = () # already formatted into msg
return True
def get_json_logger(name: str, level: int = logging.INFO) -> logging.Logger:
logger = logging.getLogger(name)
if logger.handlers:
return logger # already configured
logger.setLevel(level)
handler = logging.StreamHandler(sys.stderr)
formatter = _JsonFormatter("%(asctime)s %(name)s %(levelname)s %(message)s")
handler.setFormatter(formatter)
handler.addFilter(SecretsFilter())
logger.addHandler(handler)
logger.propagate = False
return logger
def configure_root_logging(
*,
level: str | int | None = None,
format_type: str | None = None,
) -> None:
"""CER-P5-009: configura il root logger con JSON o text formatter.
Env overrides:
- LOG_LEVEL (default INFO)
- LOG_FORMAT=json|text (default json — production-ready structured log)
Applica SecretsFilter su entrambi i format.
"""
lvl_raw = level if level is not None else os.environ.get("LOG_LEVEL", "INFO")
lvl = logging.getLevelName(lvl_raw.upper()) if isinstance(lvl_raw, str) else lvl_raw
fmt = (format_type or os.environ.get("LOG_FORMAT") or "json").lower()
root = logging.getLogger()
# Rimuovi handler esistenti (basicConfig li avrebbe lasciati duplicati)
for h in list(root.handlers):
root.removeHandler(h)
handler = logging.StreamHandler(sys.stderr)
if fmt == "json":
handler.setFormatter(
_JsonFormatter("%(asctime)s %(name)s %(levelname)s %(message)s")
)
else:
handler.setFormatter(
logging.Formatter("%(asctime)s %(levelname)s %(name)s %(message)s")
)
handler.addFilter(SecretsFilter())
root.addHandler(handler)
root.setLevel(lvl)
@@ -1,239 +0,0 @@
"""Bridge MCP → endpoint REST esistenti.
Implementa manualmente JSON-RPC 2.0 MCP su `POST /mcp` (no SSE, risposta
diretta in body JSON). Supporta:
- initialize
- notifications/initialized
- tools/list
- tools/call
Claude Code config esempio:
{
"mcpServers": {
"cerbero-memory": {
"type": "http",
"url": "http://localhost:8080/mcp-memory/mcp",
"headers": {"Authorization": "Bearer <observer-token>"}
}
}
}
"""
from __future__ import annotations
import contextlib
from typing import Any
import httpx
from fastapi import FastAPI, Request
from fastapi.responses import JSONResponse
from mcp_common.auth import TokenStore
MCP_PROTOCOL_VERSION = "2024-11-05"
def _derive_input_schemas(app: FastAPI, tool_names: list[str]) -> dict[str, dict]:
"""Estrae JSON schema del body Pydantic per ogni route POST /tools/<name>.
Risolve annotazioni lazy (PEP 563) via `typing.get_type_hints`.
Ritorna mapping {tool_name: json_schema}. Route senza body Pydantic o non
risolvibili vengono saltate: il chiamante userà un fallback.
"""
import typing
from pydantic import BaseModel
names_set = set(tool_names)
out: dict[str, dict] = {}
for route in app.routes:
path = getattr(route, "path", "")
if not path.startswith("/tools/"):
continue
name = path[len("/tools/"):]
if name not in names_set:
continue
endpoint = getattr(route, "endpoint", None)
if endpoint is None:
continue
try:
hints = typing.get_type_hints(endpoint)
except Exception:
continue
for pname, ann in hints.items():
if pname == "return":
continue
if isinstance(ann, type) and issubclass(ann, BaseModel):
with contextlib.suppress(Exception):
out[name] = ann.model_json_schema()
break
return out
def _make_proxy_handler(internal_base_url: str, tool_name: str, token: str):
async def handler(args: dict | None) -> Any:
async with httpx.AsyncClient(timeout=30.0) as c:
r = await c.post(
f"{internal_base_url}/tools/{tool_name}",
headers={"Authorization": f"Bearer {token}"} if token else {},
json=args or {},
)
if r.status_code >= 400:
raise RuntimeError(
f"tool {tool_name} failed: HTTP {r.status_code}{r.text[:500]}"
)
try:
return r.json()
except Exception:
return {"raw": r.text}
return handler
def mount_mcp_endpoint(
app: FastAPI,
*,
name: str,
version: str,
token_store: TokenStore,
internal_base_url: str,
tools: list[dict],
) -> None:
"""Registra un endpoint MCP JSON-RPC 2.0 su POST /mcp.
Ogni tool è proxato verso POST {internal_base_url}/tools/<name> con il
Bearer token del client MCP (preservando le ACL REST esistenti).
Args:
app: istanza FastAPI del service
name: nome server MCP
version: versione del service
token_store: lo stesso usato dai tool REST
internal_base_url: URL base interno (es. "http://localhost:9015")
tools: lista di {"name": str, "description": str, "input_schema"?: dict}
"""
tools_by_name = {t["name"]: t for t in tools}
# Auto-derive input schemas from FastAPI routes (Pydantic body models).
# Permette al LLM di conoscere i nomi dei parametri obbligatori invece di
# indovinarli. Se il tool ha `input_schema` esplicito, vince sull'auto-derive.
derived_schemas = _derive_input_schemas(app, [t["name"] for t in tools])
def _tool_defs() -> list[dict]:
defs = []
for t in tools:
schema = t.get("input_schema") or derived_schemas.get(t["name"]) or {
"type": "object",
"additionalProperties": True,
}
defs.append({
"name": t["name"],
"description": t.get("description", t["name"]),
"inputSchema": schema,
})
return defs
async def _handle_rpc(body: dict, token: str) -> dict | None:
rpc_id = body.get("id")
method = body.get("method")
params = body.get("params") or {}
# Notification (no id) → no response
if method == "notifications/initialized":
return None
if method == "initialize":
return {
"jsonrpc": "2.0",
"id": rpc_id,
"result": {
"protocolVersion": MCP_PROTOCOL_VERSION,
"capabilities": {"tools": {"listChanged": False}},
"serverInfo": {"name": name, "version": version},
},
}
if method == "tools/list":
return {
"jsonrpc": "2.0",
"id": rpc_id,
"result": {"tools": _tool_defs()},
}
if method == "tools/call":
tool_name = params.get("name", "")
args = params.get("arguments") or {}
if tool_name not in tools_by_name:
return {
"jsonrpc": "2.0",
"id": rpc_id,
"error": {"code": -32601, "message": f"tool non trovato: {tool_name}"},
}
handler = _make_proxy_handler(internal_base_url, tool_name, token)
try:
result = await handler(args)
return {
"jsonrpc": "2.0",
"id": rpc_id,
"result": {
"content": [
{
"type": "text",
"text": _to_text(result),
}
],
"isError": False,
},
}
except Exception as e:
return {
"jsonrpc": "2.0",
"id": rpc_id,
"result": {
"content": [{"type": "text", "text": str(e)}],
"isError": True,
},
}
return {
"jsonrpc": "2.0",
"id": rpc_id,
"error": {"code": -32601, "message": f"metodo non supportato: {method}"},
}
@app.post("/mcp")
async def mcp_entry(request: Request):
auth = request.headers.get("Authorization", "")
if not auth.startswith("Bearer "):
return JSONResponse({"error": "missing bearer token"}, status_code=401)
token = auth[len("Bearer "):].strip()
principal = token_store.get(token)
if principal is None:
return JSONResponse({"error": "invalid token"}, status_code=403)
body = await request.json()
# Batch support
if isinstance(body, list):
results = []
for item in body:
resp = await _handle_rpc(item, token)
if resp is not None:
results.append(resp)
return JSONResponse(results)
resp = await _handle_rpc(body, token)
if resp is None:
# Notification (no id) → 204 no content
return JSONResponse(None, status_code=204)
return JSONResponse(resp)
def _to_text(value: Any) -> str:
import json
if isinstance(value, str):
return value
try:
return json.dumps(value, ensure_ascii=False, indent=2)
except Exception:
return str(value)
@@ -1,74 +0,0 @@
"""Microstructure indicators: orderbook imbalance, slope, microprice.
Tutte le funzioni accettano bids/asks come list[list[price, qty]] (formato
standard dei ticker exchange) e ritornano metriche aggregate exchange-agnostic.
"""
from __future__ import annotations
def orderbook_imbalance(
bids: list[list[float]],
asks: list[list[float]],
depth: int = 10,
) -> dict[str, float | None]:
"""Imbalance ratio = (bid_vol - ask_vol) / (bid_vol + ask_vol) sui top-`depth`
livelli. Range [-1, +1]. Positivo = bid pressure, negativo = ask pressure.
Microprice (Stoll-Bertsimas): mid pesato dalla size opposta
→ P_micro = (P_bid * Q_ask + P_ask * Q_bid) / (Q_bid + Q_ask). Best level only.
Slope: variazione cumulata di volume per unità di prezzo (proxy per
liquidità in profondità).
"""
if not bids and not asks:
return {
"imbalance_ratio": None,
"bid_volume": 0.0,
"ask_volume": 0.0,
"microprice": None,
"bid_slope": None,
"ask_slope": None,
}
top_bids = bids[:depth]
top_asks = asks[:depth]
bid_vol = sum(q for _, q in top_bids)
ask_vol = sum(q for _, q in top_asks)
total = bid_vol + ask_vol
ratio = None if total == 0 else (bid_vol - ask_vol) / total
# Microprice: best bid, best ask. Weighted by opposite-side size.
microprice = None
if top_bids and top_asks:
bp, bq = top_bids[0]
ap, aq = top_asks[0]
denom = bq + aq
if denom > 0:
microprice = (bp * aq + ap * bq) / denom
bid_slope = _depth_slope(top_bids, ascending_price=False)
ask_slope = _depth_slope(top_asks, ascending_price=True)
return {
"imbalance_ratio": ratio,
"bid_volume": bid_vol,
"ask_volume": ask_vol,
"microprice": microprice,
"bid_slope": bid_slope,
"ask_slope": ask_slope,
}
def _depth_slope(levels: list[list[float]], ascending_price: bool) -> float | None:
"""Calcola |Δq / Δp| sul primo vs penultimo livello.
Slope alto = liquidità che crolla rapidamente in profondità (book sottile).
"""
if len(levels) < 2:
return None
p_first, q_first = levels[0]
p_last, q_last = levels[-1]
dp = abs(p_last - p_first)
if dp == 0:
return None
return abs(q_first - q_last) / dp
-201
View File
@@ -1,201 +0,0 @@
"""Logiche option-flow exchange-agnostiche.
Ogni funzione accetta una lista di "legs" (dizionari) con i campi rilevanti
e ritorna metriche aggregate. La normalizzazione exchange-specific dei dati
spetta al chiamante (es. mcp-deribit costruisce le legs da chain + ticker).
"""
from __future__ import annotations
from typing import Any
# Convention dealer gamma: i dealer sono SHORT calls (le vendono al retail) e
# LONG puts. Quando spot sale e dealer sono short calls, comprano underlying
# (positive feedback → vol amplificata). Quando spot scende e dealer long puts,
# vendono underlying (positive feedback). Net dealer gamma negativo → mercato
# instabile (squeeze in entrambe le direzioni).
def oi_weighted_skew(legs: list[dict[str, Any]]) -> dict[str, float | int | None]:
"""Skew aggregato pesato per OI: IV media puts - IV media calls.
Positivo = puts richer (paura), negativo = calls richer (greed).
"""
call_num = call_den = 0.0
put_num = put_den = 0.0
for leg in legs:
iv = leg.get("iv")
oi = leg.get("oi") or 0
if iv is None or oi <= 0:
continue
if leg.get("option_type") == "call":
call_num += iv * oi
call_den += oi
elif leg.get("option_type") == "put":
put_num += iv * oi
put_den += oi
call_iv = call_num / call_den if call_den > 0 else None
put_iv = put_num / put_den if put_den > 0 else None
skew = (put_iv - call_iv) if (call_iv is not None and put_iv is not None) else None
return {
"skew": skew,
"call_iv_weighted": call_iv,
"put_iv_weighted": put_iv,
"total_oi": int(call_den + put_den),
}
def smile_asymmetry(legs: list[dict[str, Any]], spot: float) -> dict[str, float | None]:
"""Smile asymmetry: differenza media IV otm puts vs otm calls a parità
di moneyness. Positivo = put-side richer (skew negativo classico equity).
"""
if spot <= 0 or not legs:
return {"atm_iv": None, "asymmetry": None, "otm_put_iv": None, "otm_call_iv": None}
# ATM IV: media IV strike entro ±1% da spot
atm_ivs = [leg["iv"] for leg in legs if leg.get("iv") is not None and abs(leg.get("strike", 0) - spot) / spot < 0.01]
atm_iv = sum(atm_ivs) / len(atm_ivs) if atm_ivs else None
otm_put_ivs = [
leg["iv"] for leg in legs
if leg.get("iv") is not None and leg.get("option_type") == "put" and leg.get("strike", 0) < spot * 0.95
]
otm_call_ivs = [
leg["iv"] for leg in legs
if leg.get("iv") is not None and leg.get("option_type") == "call" and leg.get("strike", 0) > spot * 1.05
]
otm_put = sum(otm_put_ivs) / len(otm_put_ivs) if otm_put_ivs else None
otm_call = sum(otm_call_ivs) / len(otm_call_ivs) if otm_call_ivs else None
asym = (otm_put - otm_call) if (otm_put is not None and otm_call is not None) else None
return {
"atm_iv": atm_iv,
"asymmetry": asym,
"otm_put_iv": otm_put,
"otm_call_iv": otm_call,
}
def atm_vs_wings_vol(legs: list[dict[str, Any]], spot: float) -> dict[str, float | None]:
"""IV ATM vs IV alle ali 25-delta. Wing richness > 0 → smile (kurtosis vol).
"""
if not legs:
return {"atm_iv": None, "wing_25d_call_iv": None, "wing_25d_put_iv": None, "wing_richness": None}
def _closest(target_delta: float, opt_type: str, tol: float = 0.1) -> float | None:
best = None
best_dist = float("inf")
for leg in legs:
d = leg.get("delta")
iv = leg.get("iv")
if d is None or iv is None or leg.get("option_type") != opt_type:
continue
dist = abs(abs(d) - abs(target_delta))
if dist < best_dist:
best_dist = dist
best = iv
return best if best_dist <= tol else None
# ATM IV: leg con delta più vicino a 0.5 (call) o -0.5 (put)
atm_call_iv = _closest(0.5, "call")
atm_put_iv = _closest(-0.5, "put")
atm_ivs = [v for v in (atm_call_iv, atm_put_iv) if v is not None]
atm_iv = sum(atm_ivs) / len(atm_ivs) if atm_ivs else None
wing_call = _closest(0.25, "call")
wing_put = _closest(-0.25, "put")
wing_avg = None
if wing_call is not None and wing_put is not None:
wing_avg = (wing_call + wing_put) / 2
richness = (wing_avg - atm_iv) if (wing_avg is not None and atm_iv is not None) else None
return {
"atm_iv": atm_iv,
"wing_25d_call_iv": wing_call,
"wing_25d_put_iv": wing_put,
"wing_richness": richness,
}
def dealer_gamma_profile(
legs: list[dict[str, Any]],
spot: float,
) -> dict[str, Any]:
"""Net dealer gamma per strike (assume dealer short calls + long puts).
Restituisce per strike: call_dealer_gamma (negativo), put_dealer_gamma
(positivo), net. Aggregato totale + zero-cross strike (gamma flip).
"""
by_strike: dict[float, dict[str, float]] = {}
for leg in legs:
strike = leg.get("strike")
gamma = leg.get("gamma")
oi = leg.get("oi") or 0
if strike is None or gamma is None or oi <= 0 or spot <= 0:
continue
contrib = float(gamma) * oi * (spot ** 2) * 0.01
entry = by_strike.setdefault(
float(strike),
{"strike": float(strike), "call_dealer_gamma": 0.0, "put_dealer_gamma": 0.0},
)
if leg.get("option_type") == "call":
entry["call_dealer_gamma"] -= contrib # dealer short calls
elif leg.get("option_type") == "put":
entry["put_dealer_gamma"] += contrib # dealer long puts
rows: list[dict[str, float]] = []
for s in sorted(by_strike.keys()):
e = by_strike[s]
e["net_dealer_gamma"] = e["call_dealer_gamma"] + e["put_dealer_gamma"]
rows.append(e)
flip_level = None
for a, b in zip(rows, rows[1:], strict=False):
if (a["net_dealer_gamma"] < 0 <= b["net_dealer_gamma"]) or (
a["net_dealer_gamma"] > 0 >= b["net_dealer_gamma"]
):
denom = b["net_dealer_gamma"] - a["net_dealer_gamma"]
if denom != 0:
frac = -a["net_dealer_gamma"] / denom
flip_level = round(a["strike"] + frac * (b["strike"] - a["strike"]), 2)
break
total = sum(r["net_dealer_gamma"] for r in rows)
return {
"by_strike": [
{
"strike": r["strike"],
"call_dealer_gamma": round(r["call_dealer_gamma"], 2),
"put_dealer_gamma": round(r["put_dealer_gamma"], 2),
"net_dealer_gamma": round(r["net_dealer_gamma"], 2),
}
for r in rows
],
"total_net_dealer_gamma": round(total, 2),
"gamma_flip_level": flip_level,
}
def vanna_charm_aggregate(
legs: list[dict[str, Any]],
spot: float,
) -> dict[str, Any]:
"""Vanna (∂delta/∂IV) e Charm (∂delta/∂t) aggregati pesati per OI.
Vanna positiva → IV up, calls hedge buys; charm negativa → time decay
pushes delta down (calls only).
"""
total_vanna = 0.0
total_charm = 0.0
legs_used = 0
for leg in legs:
vanna = leg.get("vanna")
charm = leg.get("charm")
oi = leg.get("oi") or 0
if vanna is None or charm is None or oi <= 0:
continue
sign = 1 if leg.get("option_type") == "call" else -1
total_vanna += float(vanna) * oi * sign
total_charm += float(charm) * oi * sign
legs_used += 1
return {
"total_vanna": total_vanna,
"total_charm": total_charm,
"legs_analyzed": legs_used,
"spot": spot,
}
-220
View File
@@ -1,220 +0,0 @@
from __future__ import annotations
import json
import os
import time
import uuid
from collections.abc import Callable
from contextlib import AbstractAsyncContextManager
from datetime import UTC, datetime
from typing import Any
from fastapi import FastAPI, HTTPException, Request
from fastapi.exceptions import RequestValidationError
from fastapi.responses import JSONResponse, Response
from starlette.middleware.base import BaseHTTPMiddleware
from mcp_common.auth import TokenStore
Lifespan = Callable[[FastAPI], AbstractAsyncContextManager[None]]
def _error_envelope(
*,
type_: str,
code: str,
message: str,
retryable: bool,
suggested_fix: str | None = None,
details: dict | None = None,
request_id: str | None = None,
) -> dict:
env: dict[str, Any] = {
"error": {
"type": type_,
"code": code,
"message": message,
"retryable": retryable,
},
"request_id": request_id or uuid.uuid4().hex,
"data_timestamp": datetime.now(UTC).isoformat(),
}
if suggested_fix:
env["error"]["suggested_fix"] = suggested_fix
if details:
env["error"]["details"] = details
return env
class _TimestampInjectorMiddleware(BaseHTTPMiddleware):
"""CER-P5-001: inietta data_timestamp nei response tool.
- Dict response: body gains `data_timestamp` se mancante.
- List of dicts: ogni item gains `data_timestamp` se mancante.
- Header `X-Data-Timestamp` sempre presente (universale per list primitive).
Skips /health (già popolato) e /mcp (JSON-RPC bridge) e non-JSON responses.
"""
async def dispatch(self, request: Request, call_next):
response = await call_next(request)
path = request.url.path
if not path.startswith("/tools/"):
return response
ctype = response.headers.get("content-type", "")
if "application/json" not in ctype:
return response
body = b""
async for chunk in response.body_iterator:
body += chunk
ts = datetime.now(UTC).isoformat()
try:
data = json.loads(body) if body else None
except Exception:
headers = dict(response.headers)
headers["X-Data-Timestamp"] = ts
return Response(
content=body,
status_code=response.status_code,
headers=headers,
media_type=response.media_type,
)
modified = False
if isinstance(data, dict) and "data_timestamp" not in data:
data["data_timestamp"] = ts
modified = True
elif isinstance(data, list):
for item in data:
if isinstance(item, dict) and "data_timestamp" not in item:
item["data_timestamp"] = ts
modified = True
headers = dict(response.headers)
headers["X-Data-Timestamp"] = ts
if modified:
new_body = json.dumps(data, default=str).encode()
headers.pop("content-length", None)
return Response(
content=new_body,
status_code=response.status_code,
headers=headers,
media_type="application/json",
)
return Response(
content=body,
status_code=response.status_code,
headers=headers,
media_type=response.media_type,
)
def build_app(
*,
name: str,
version: str,
token_store: TokenStore,
lifespan: Lifespan | None = None,
) -> FastAPI:
root_path = os.getenv("ROOT_PATH", "")
app = FastAPI(title=name, version=version, root_path=root_path, lifespan=lifespan)
app.state.token_store = token_store
app.state.boot_at = time.time()
app.add_middleware(_TimestampInjectorMiddleware)
@app.middleware("http")
async def _latency_header(request: Request, call_next):
t0 = time.perf_counter()
response = await call_next(request)
dur_ms = (time.perf_counter() - t0) * 1000
response.headers["X-Duration-Ms"] = f"{dur_ms:.2f}"
return response
# CER-P5-002 error envelope: exception handlers globali
@app.exception_handler(HTTPException)
async def _http_exc(request: Request, exc: HTTPException):
retryable = exc.status_code in (408, 429, 502, 503, 504)
code_map = {
400: "BAD_REQUEST", 401: "UNAUTHORIZED", 403: "FORBIDDEN",
404: "NOT_FOUND", 408: "TIMEOUT", 409: "CONFLICT",
422: "VALIDATION_ERROR", 429: "RATE_LIMIT",
500: "INTERNAL_ERROR", 502: "UPSTREAM_ERROR",
503: "UNAVAILABLE", 504: "GATEWAY_TIMEOUT",
}
code = code_map.get(exc.status_code, f"HTTP_{exc.status_code}")
message = "HTTP error"
details: dict | None = None
detail = exc.detail
# Preserve rail-style detail {"error": "..", "message": ".."} as code
if isinstance(detail, dict):
if isinstance(detail.get("error"), str):
code = detail["error"].upper()
message = str(detail.get("message") or detail.get("error") or message)
details = detail
elif isinstance(detail, str):
message = detail
return JSONResponse(
status_code=exc.status_code,
content=_error_envelope(
type_="http_error",
code=code,
message=message,
retryable=retryable,
details=details,
),
)
@app.exception_handler(RequestValidationError)
async def _validation_exc(request: Request, exc: RequestValidationError):
errs = exc.errors()
first_loc = ".".join(str(x) for x in errs[0]["loc"]) if errs else "body"
suggestion = (
f"check field '{first_loc}': "
+ (errs[0]["msg"] if errs else "invalid input")
)
# Sanitize ctx values: pydantic v2 può mettere ValueError in ctx['error'],
# non serializzabile JSON. Riduci a stringhe.
safe_errs: list[dict] = []
for e in errs[:5]:
ne: dict = {}
for k, v in e.items():
if k == "ctx" and isinstance(v, dict):
ne[k] = {ck: str(cv) for ck, cv in v.items()}
else:
ne[k] = v
safe_errs.append(ne)
return JSONResponse(
status_code=422,
content=_error_envelope(
type_="validation_error",
code="INVALID_INPUT",
message=f"request body validation failed on {first_loc}",
retryable=False,
suggested_fix=suggestion,
details={"errors": safe_errs},
),
)
@app.exception_handler(Exception)
async def _unhandled(request: Request, exc: Exception):
return JSONResponse(
status_code=500,
content=_error_envelope(
type_="internal_error",
code="UNHANDLED_EXCEPTION",
message=f"{type(exc).__name__}: {str(exc)[:300]}",
retryable=True,
),
)
@app.get("/health")
def health():
return {
"status": "healthy",
"name": name,
"version": version,
"uptime_seconds": int(time.time() - app.state.boot_at),
"data_timestamp": datetime.now(UTC).isoformat(),
}
return app
-96
View File
@@ -1,96 +0,0 @@
"""Test statistici puri (cointegration, ADF, half-life già in indicators).
Nessuna dipendenza esterna: pure-Python.
"""
from __future__ import annotations
import math
def _ols_slope_intercept(xs: list[float], ys: list[float]) -> tuple[float, float] | None:
if len(xs) != len(ys) or len(xs) < 3:
return None
n = len(xs)
mx = sum(xs) / n
my = sum(ys) / n
num = sum((xs[i] - mx) * (ys[i] - my) for i in range(n))
den = sum((xs[i] - mx) ** 2 for i in range(n))
if den == 0:
return None
slope = num / den
intercept = my - slope * mx
return slope, intercept
def _adf_t_stat(series: list[float]) -> float | None:
"""Augmented Dickey-Fuller test stat semplificato (lag=0 → DF):
Δy_t = a + b*y_{t-1} + eps. t-stat di b vs zero.
Più negativo = più stazionario. Approssimazione: critical value ~ -2.86 al 5%.
"""
if len(series) < 30:
return None
y_lag = series[:-1]
delta = [series[i] - series[i - 1] for i in range(1, len(series))]
res = _ols_slope_intercept(y_lag, delta)
if res is None:
return None
b, a = res
n = len(y_lag)
mx = sum(y_lag) / n
den = sum((x - mx) ** 2 for x in y_lag)
if den == 0:
return None
fitted = [a + b * y_lag[i] for i in range(n)]
resid = [delta[i] - fitted[i] for i in range(n)]
rss = sum(r * r for r in resid)
if n - 2 <= 0:
return None
sigma2 = rss / (n - 2)
se_b = math.sqrt(sigma2 / den)
if se_b == 0:
return None
return b / se_b
def cointegration_test(
series_a: list[float],
series_b: list[float],
significance_t: float = -2.86,
) -> dict[str, float | bool | None]:
"""Engle-Granger cointegration:
1. OLS: y_t = alpha + beta * x_t + eps
2. ADF su residui: se t-stat < critical (-2.86 @ 5%) → cointegrate.
"""
if len(series_a) != len(series_b) or len(series_a) < 50:
return {
"cointegrated": None,
"beta": None,
"alpha": None,
"adf_t_stat": None,
"spread_mean": None,
"spread_std": None,
}
res = _ols_slope_intercept(series_b, series_a)
if res is None:
return {
"cointegrated": None,
"beta": None,
"alpha": None,
"adf_t_stat": None,
"spread_mean": None,
"spread_std": None,
}
beta, alpha = res
spread = [series_a[i] - alpha - beta * series_b[i] for i in range(len(series_a))]
t_stat = _adf_t_stat(spread)
cointegrated = (t_stat is not None and t_stat < significance_t)
n = len(spread)
mean = sum(spread) / n
var = sum((s - mean) ** 2 for s in spread) / (n - 1) if n > 1 else 0.0
return {
"cointegrated": cointegrated,
"beta": beta,
"alpha": alpha,
"adf_t_stat": t_stat,
"spread_mean": mean,
"spread_std": math.sqrt(var),
}
-137
View File
@@ -1,137 +0,0 @@
from __future__ import annotations
import json
from unittest.mock import MagicMock, patch
import pytest
from mcp_common.app_factory import ExchangeAppSpec, run_exchange_main
from mcp_common.environment import EnvironmentInfo
def _make_spec(build_client=None, build_app=None) -> ExchangeAppSpec:
return ExchangeAppSpec(
exchange="testex",
creds_env_var="TESTEX_CREDENTIALS_FILE",
env_var="TESTEX_TESTNET",
flag_key="testnet",
default_base_url_live="https://api.testex.com",
default_base_url_testnet="https://test.testex.com",
default_port=9999,
build_client=build_client or (lambda creds, env_info: MagicMock(name="client")),
build_app=build_app or (lambda **kwargs: MagicMock(name="app")),
)
def test_run_exchange_main_loads_creds_and_resolves_env(tmp_path, monkeypatch):
creds_file = tmp_path / "creds.json"
creds_file.write_text(json.dumps({"api_key": "k", "api_secret": "s"}))
monkeypatch.setenv("TESTEX_CREDENTIALS_FILE", str(creds_file))
monkeypatch.setenv("PORT", "10000")
monkeypatch.delenv("TESTEX_TESTNET", raising=False)
captured: dict = {}
def build_client(creds, env_info):
captured["creds"] = creds
captured["env_info"] = env_info
return MagicMock()
def build_app(**kwargs):
captured["app_kwargs"] = kwargs
return MagicMock()
spec = _make_spec(build_client=build_client, build_app=build_app)
with patch("mcp_common.app_factory.uvicorn.run") as mock_run:
run_exchange_main(spec)
assert captured["creds"]["api_key"] == "k"
assert captured["creds"]["base_url_live"] == "https://api.testex.com"
assert captured["creds"]["base_url_testnet"] == "https://test.testex.com"
assert isinstance(captured["env_info"], EnvironmentInfo)
assert captured["env_info"].environment == "testnet"
assert captured["env_info"].exchange == "testex"
assert "client" in captured["app_kwargs"]
assert "token_store" in captured["app_kwargs"]
assert "creds" in captured["app_kwargs"]
assert "env_info" in captured["app_kwargs"]
call_kwargs = mock_run.call_args.kwargs
assert call_kwargs["port"] == 10000 # PORT override
def test_run_exchange_main_uses_default_port(tmp_path, monkeypatch):
creds_file = tmp_path / "creds.json"
creds_file.write_text(json.dumps({}))
monkeypatch.setenv("TESTEX_CREDENTIALS_FILE", str(creds_file))
monkeypatch.delenv("PORT", raising=False)
spec = _make_spec()
with patch("mcp_common.app_factory.uvicorn.run") as mock_run:
run_exchange_main(spec)
assert mock_run.call_args.kwargs["port"] == 9999
def test_run_exchange_main_env_var_overrides_creds(tmp_path, monkeypatch):
creds_file = tmp_path / "creds.json"
# `environment: mainnet` esplicito perché env var override → mainnet
# e consistency_check richiede conferma per evitare switch accidentale.
creds_file.write_text(json.dumps({"testnet": True, "environment": "mainnet"}))
monkeypatch.setenv("TESTEX_CREDENTIALS_FILE", str(creds_file))
monkeypatch.setenv("TESTEX_TESTNET", "false")
captured: dict = {}
def build_client(creds, env_info):
captured["env_info"] = env_info
return MagicMock()
spec = _make_spec(build_client=build_client)
with patch("mcp_common.app_factory.uvicorn.run"):
run_exchange_main(spec)
# env var "false" overrides creds.testnet=True → mainnet
assert captured["env_info"].environment == "mainnet"
assert captured["env_info"].source == "env"
def test_run_exchange_main_aborts_on_mainnet_without_confirmation(tmp_path, monkeypatch):
"""Mainnet senza creds['environment']='mainnet' → boot abort fail-fast."""
from mcp_common.environment import EnvironmentMismatchError
creds_file = tmp_path / "creds.json"
creds_file.write_text(json.dumps({"testnet": False}))
monkeypatch.setenv("TESTEX_CREDENTIALS_FILE", str(creds_file))
monkeypatch.delenv("TESTEX_TESTNET", raising=False)
monkeypatch.delenv("STRICT_MAINNET", raising=False)
spec = _make_spec()
with (
pytest.raises(EnvironmentMismatchError),
patch("mcp_common.app_factory.uvicorn.run"),
):
run_exchange_main(spec)
def test_run_exchange_main_strict_mainnet_disabled_via_env(tmp_path, monkeypatch):
"""STRICT_MAINNET=false permette mainnet senza conferma (warning soltanto)."""
creds_file = tmp_path / "creds.json"
creds_file.write_text(json.dumps({"testnet": False}))
monkeypatch.setenv("TESTEX_CREDENTIALS_FILE", str(creds_file))
monkeypatch.setenv("STRICT_MAINNET", "false")
spec = _make_spec()
with patch("mcp_common.app_factory.uvicorn.run"):
run_exchange_main(spec) # non solleva
def test_run_exchange_main_missing_creds_file_exits(monkeypatch):
monkeypatch.delenv("TESTEX_CREDENTIALS_FILE", raising=False)
spec = _make_spec()
import pytest
with pytest.raises(SystemExit) as exc_info:
run_exchange_main(spec)
assert exc_info.value.code == 2
-155
View File
@@ -1,155 +0,0 @@
from __future__ import annotations
import logging
import pytest
from mcp_common import audit as audit_mod
from mcp_common.audit import audit_write_op
from mcp_common.auth import Principal
@pytest.fixture
def captured_records(monkeypatch):
"""Cattura i record emessi dal logger mcp.audit (propagate=False blocca caplog).
Sostituisce il logger del modulo con uno che ha caplog attaccato.
"""
records: list[logging.LogRecord] = []
class ListHandler(logging.Handler):
def emit(self, record: logging.LogRecord) -> None:
records.append(record)
test_logger = logging.getLogger("mcp.audit.test")
test_logger.handlers.clear()
test_logger.addHandler(ListHandler())
test_logger.setLevel(logging.DEBUG)
test_logger.propagate = False
monkeypatch.setattr(audit_mod, "_logger", test_logger)
return records
def test_audit_write_op_emits_structured_record(captured_records):
p = Principal("core", {"core"})
audit_write_op(
principal=p,
action="place_order",
exchange="deribit",
target="BTC-PERPETUAL",
payload={"side": "buy", "amount": 10, "leverage": 3},
result={"order_id": "abc", "state": "open"},
)
assert len(captured_records) == 1
rec = captured_records[0]
assert rec.action == "place_order"
assert rec.exchange == "deribit"
assert rec.target == "BTC-PERPETUAL"
assert rec.principal == "core"
assert rec.payload == {"side": "buy", "amount": 10, "leverage": 3}
assert rec.result == {"order_id": "abc", "state": "open"}
def test_audit_write_op_error_uses_error_level(captured_records):
p = Principal("core", {"core"})
audit_write_op(
principal=p,
action="cancel_order",
exchange="bybit",
target="ord-123",
payload={},
error="not_found",
)
assert len(captured_records) == 1
rec = captured_records[0]
assert rec.levelname == "ERROR"
assert rec.error == "not_found"
def test_audit_write_op_summarizes_result_fields(captured_records):
p = Principal("core", {"core"})
big_result = {
"order_id": "ord-1",
"state": "submitted",
"extra_huge_field": "x" * 10000,
"orders": [{"id": 1}, {"id": 2}, {"id": 3}],
}
audit_write_op(
principal=p,
action="place_combo_order",
exchange="bybit",
payload={},
result=big_result,
)
rec = captured_records[0]
assert "extra_huge_field" not in rec.result
assert rec.result["order_id"] == "ord-1"
assert rec.result["orders_count"] == 3
def test_audit_write_op_no_principal(captured_records):
audit_write_op(
principal=None,
action="place_order",
exchange="alpaca",
payload={},
)
rec = captured_records[0]
assert rec.principal is None
def test_audit_write_op_writes_to_file_when_AUDIT_LOG_FILE_set(tmp_path, monkeypatch):
"""Con env AUDIT_LOG_FILE settato, una riga JSON appare nel file."""
import json
from mcp_common import audit as audit_mod
audit_file = tmp_path / "audit.jsonl"
monkeypatch.setenv("AUDIT_LOG_FILE", str(audit_file))
# Reset state idempotency flag così il test riesegue setup
audit_mod._file_handler_attached = False
# Pulisci handlers preesistenti dal logger (potrebbe avere file vecchio)
for h in list(audit_mod._logger.handlers):
from logging.handlers import TimedRotatingFileHandler
if isinstance(h, TimedRotatingFileHandler):
audit_mod._logger.removeHandler(h)
audit_write_op(
principal=Principal("core", {"core"}),
action="place_order",
exchange="bybit",
target="BTCUSDT",
payload={"side": "Buy", "qty": 0.01},
result={"order_id": "abc123", "status": "submitted"},
)
# Forza flush dei file handler
for h in audit_mod._logger.handlers:
h.flush()
assert audit_file.exists()
content = audit_file.read_text().strip()
assert content, "audit file empty"
record = json.loads(content.splitlines()[-1])
assert record["audit_event"] == "write_op"
assert record["action"] == "place_order"
assert record["exchange"] == "bybit"
assert record["target"] == "BTCUSDT"
assert record["principal"] == "core"
def test_audit_no_file_when_env_unset(tmp_path, monkeypatch):
"""Senza AUDIT_LOG_FILE, nessun file viene creato."""
from mcp_common import audit as audit_mod
monkeypatch.delenv("AUDIT_LOG_FILE", raising=False)
audit_mod._file_handler_attached = False
audit_write_op(
principal=Principal("core", {"core"}),
action="cancel_order",
exchange="bybit",
target="ord-1",
payload={},
)
# Niente file creato in tmp_path
files = list(tmp_path.iterdir())
assert files == []
-84
View File
@@ -1,84 +0,0 @@
import pytest
from fastapi import Depends, FastAPI
from fastapi.testclient import TestClient
from mcp_common.auth import (
Principal,
TokenStore,
acl_requires,
require_principal,
)
@pytest.fixture
def token_store():
return TokenStore(tokens={
"token-core-123": Principal(name="core", capabilities={"core"}),
"token-obs-456": Principal(name="observer", capabilities={"observer"}),
})
@pytest.fixture
def app(token_store):
app = FastAPI()
app.state.token_store = token_store
@app.get("/public")
def public():
return {"ok": True}
@app.get("/private")
def private(principal: Principal = Depends(require_principal)):
return {"name": principal.name}
@app.post("/core-only")
@acl_requires(core=True, observer=False)
def core_only(principal: Principal = Depends(require_principal)):
return {"who": principal.name}
@app.post("/observer-only")
@acl_requires(core=False, observer=True)
def observer_only(principal: Principal = Depends(require_principal)):
return {"who": principal.name}
return app
def test_public_endpoint_no_auth(app):
client = TestClient(app)
assert client.get("/public").status_code == 200
def test_private_without_header_401(app):
client = TestClient(app)
assert client.get("/private").status_code == 401
def test_private_bad_token_403(app):
client = TestClient(app)
r = client.get("/private", headers={"Authorization": "Bearer nope"})
assert r.status_code == 403
def test_private_good_token_200(app):
client = TestClient(app)
r = client.get("/private", headers={"Authorization": "Bearer token-core-123"})
assert r.status_code == 200
assert r.json() == {"name": "core"}
def test_acl_core_token_on_core_only_endpoint(app):
client = TestClient(app)
r = client.post("/core-only", headers={"Authorization": "Bearer token-core-123"})
assert r.status_code == 200
def test_acl_observer_on_core_only_rejected(app):
client = TestClient(app)
r = client.post("/core-only", headers={"Authorization": "Bearer token-obs-456"})
assert r.status_code == 403
def test_acl_observer_on_observer_only_ok(app):
client = TestClient(app)
r = client.post("/observer-only", headers={"Authorization": "Bearer token-obs-456"})
assert r.status_code == 200
-189
View File
@@ -1,189 +0,0 @@
from __future__ import annotations
import pytest
from mcp_common.environment import (
EnvironmentInfo,
EnvironmentMismatchError,
consistency_check,
resolve_environment,
)
def test_env_var_overrides_secret(monkeypatch):
monkeypatch.setenv("DERIBIT_TESTNET", "false")
creds = {"testnet": True, "base_url_live": "L", "base_url_testnet": "T"}
info = resolve_environment(
creds,
env_var="DERIBIT_TESTNET",
flag_key="testnet",
exchange="deribit",
)
assert info.environment == "mainnet"
assert info.source == "env"
assert info.env_value == "false"
assert info.base_url == "L"
def test_secret_used_when_env_missing(monkeypatch):
monkeypatch.delenv("DERIBIT_TESTNET", raising=False)
creds = {"testnet": True, "base_url_live": "L", "base_url_testnet": "T"}
info = resolve_environment(
creds,
env_var="DERIBIT_TESTNET",
flag_key="testnet",
exchange="deribit",
)
assert info.environment == "testnet"
assert info.source == "credentials"
assert info.env_value is None
assert info.base_url == "T"
def test_default_when_both_missing(monkeypatch):
monkeypatch.delenv("FOO_TESTNET", raising=False)
creds = {"base_url_live": "L", "base_url_testnet": "T"}
info = resolve_environment(
creds,
env_var="FOO_TESTNET",
flag_key="testnet",
exchange="foo",
)
assert info.environment == "testnet"
assert info.source == "default"
assert info.env_value is None
@pytest.mark.parametrize("raw,expected", [
("1", "testnet"),
("true", "testnet"),
("yes", "testnet"),
("on", "testnet"),
("TRUE", "testnet"),
("0", "mainnet"),
("false", "mainnet"),
("no", "mainnet"),
("off", "mainnet"),
("garbage", "mainnet"),
])
def test_env_value_truthy_parsing(monkeypatch, raw, expected):
monkeypatch.setenv("X_TESTNET", raw)
info = resolve_environment(
{"base_url_live": "L", "base_url_testnet": "T"},
env_var="X_TESTNET",
flag_key="testnet",
exchange="x",
)
assert info.environment == expected
def test_default_base_urls_applied_when_creds_missing(monkeypatch):
monkeypatch.delenv("X_TESTNET", raising=False)
creds: dict = {}
info = resolve_environment(
creds,
env_var="X_TESTNET",
flag_key="testnet",
exchange="x",
default_base_url_live="https://live.example",
default_base_url_testnet="https://test.example",
)
assert info.base_url == "https://test.example"
assert creds["base_url_live"] == "https://live.example"
assert creds["base_url_testnet"] == "https://test.example"
def test_creds_base_urls_override_defaults(monkeypatch):
monkeypatch.delenv("X_TESTNET", raising=False)
creds = {"base_url_live": "L", "base_url_testnet": "T"}
info = resolve_environment(
creds,
env_var="X_TESTNET",
flag_key="testnet",
exchange="x",
default_base_url_live="https://live.example",
default_base_url_testnet="https://test.example",
)
assert info.base_url == "T"
assert creds["base_url_live"] == "L"
def test_alpaca_paper_flag_key(monkeypatch):
"""Alpaca usa 'paper' invece di 'testnet' nel secret."""
monkeypatch.delenv("ALPACA_PAPER", raising=False)
creds = {"paper": False, "base_url_live": "L", "base_url_testnet": "T"}
info = resolve_environment(
creds,
env_var="ALPACA_PAPER",
flag_key="paper",
exchange="alpaca",
)
assert info.environment == "mainnet"
assert info.source == "credentials"
# ───────── consistency_check ─────────
def _info(env: str, exchange: str = "deribit") -> EnvironmentInfo:
"""Helper costruisce EnvironmentInfo per test."""
return EnvironmentInfo(
exchange=exchange,
environment=env,
source="env",
env_value="false" if env == "mainnet" else "true",
base_url=f"https://api.{exchange}.com" if env == "mainnet" else f"https://test.{exchange}.com",
)
def test_consistency_check_testnet_no_confirmation_ok():
"""Testnet senza conferma esplicita → ok, ritorna []. Default safe."""
info = _info("testnet")
creds = {"api_key": "k", "api_secret": "s"}
warnings = consistency_check(info, creds)
assert warnings == []
def test_consistency_check_mainnet_no_confirmation_raises():
"""Mainnet senza creds['environment']='mainnet' esplicito → fail-fast."""
info = _info("mainnet")
creds = {"api_key": "k", "api_secret": "s"}
with pytest.raises(EnvironmentMismatchError, match="mainnet.*explicit confirmation"):
consistency_check(info, creds)
def test_consistency_check_mainnet_with_confirmation_ok():
info = _info("mainnet")
creds = {"api_key": "k", "api_secret": "s", "environment": "mainnet"}
warnings = consistency_check(info, creds)
assert warnings == []
def test_consistency_check_explicit_mismatch_raises():
"""Secret dichiara mainnet ma resolver risolve testnet → fail-fast."""
info = _info("testnet")
creds = {"environment": "mainnet"}
with pytest.raises(EnvironmentMismatchError, match="declared.*resolved"):
consistency_check(info, creds)
def test_consistency_check_strict_mainnet_disabled():
"""Con strict_mainnet=False mainnet senza conferma logga warning ma non raise."""
info = _info("mainnet")
creds = {"api_key": "k", "api_secret": "s"}
warnings = consistency_check(info, creds, strict_mainnet=False)
assert any("mainnet" in w for w in warnings)
def test_consistency_check_url_does_not_match_environment_warns():
"""Base URL contiene 'test' ma environment='mainnet' → warning."""
from mcp_common.environment import EnvironmentInfo
info = EnvironmentInfo(
exchange="bybit",
environment="mainnet",
source="env",
env_value="false",
base_url="https://api-testnet.bybit.com", # url DICE testnet ma resolver MAINNET
)
creds = {"environment": "mainnet"}
warnings = consistency_check(info, creds)
assert any("base_url" in w.lower() for w in warnings)
-72
View File
@@ -1,72 +0,0 @@
from __future__ import annotations
import asyncio
import httpx
import pytest
from mcp_common.http import async_client, call_with_retry
def test_async_client_uses_retry_transport():
c = async_client(retries=5)
assert isinstance(c._transport, httpx.AsyncHTTPTransport)
# internal _retries on transport
assert c._transport._pool._retries == 5
@pytest.mark.asyncio
async def test_call_with_retry_succeeds_first_try():
calls = 0
async def fn():
nonlocal calls
calls += 1
return "ok"
result = await call_with_retry(fn)
assert result == "ok"
assert calls == 1
@pytest.mark.asyncio
async def test_call_with_retry_recovers_after_transient(monkeypatch):
monkeypatch.setattr(asyncio, "sleep", asyncio.coroutine(lambda *_: None) if False else _no_sleep)
calls = 0
async def fn():
nonlocal calls
calls += 1
if calls < 3:
raise httpx.ConnectError("boom")
return "ok"
result = await call_with_retry(fn, max_attempts=5, base_delay=0.0)
assert result == "ok"
assert calls == 3
async def _no_sleep(_):
return None
@pytest.mark.asyncio
async def test_call_with_retry_gives_up_after_max():
calls = 0
async def fn():
nonlocal calls
calls += 1
raise httpx.TimeoutException("slow")
with pytest.raises(httpx.TimeoutException):
await call_with_retry(fn, max_attempts=3, base_delay=0.0)
assert calls == 3
@pytest.mark.asyncio
async def test_call_with_retry_does_not_catch_unexpected():
async def fn():
raise ValueError("not transient")
with pytest.raises(ValueError):
await call_with_retry(fn, max_attempts=5, base_delay=0.0)
-260
View File
@@ -1,260 +0,0 @@
import math
from mcp_common.indicators import (
adx,
atr,
autocorrelation,
garch11_forecast,
half_life_mean_reversion,
hurst_exponent,
macd,
rolling_sharpe,
rsi,
sma,
var_cvar,
vol_cone,
)
def test_rsi_simple():
closes = [44, 44.34, 44.09, 44.15, 43.61, 44.33, 44.83, 45.10, 45.42, 45.84,
46.08, 45.89, 46.03, 45.61, 46.28]
r = rsi(closes, period=14)
assert r is not None
# Known textbook RSI value ballpark
assert 65.0 < r < 75.0
def test_rsi_insufficient_data():
assert rsi([1, 2, 3], period=14) is None
def test_sma_simple():
assert sma([1, 2, 3, 4, 5], period=5) == 3.0
assert sma([1, 2, 3], period=5) is None
def test_atr_simple():
# highs, lows, closes
highs = [10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24]
lows = [ 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23]
closes = [9.5,10.5,11.5,12.5,13.5,14.5,15.5,16.5,17.5,18.5,19.5,20.5,21.5,22.5,23.5]
a = atr(highs, lows, closes, period=14)
assert a is not None
assert 0.9 < a <= 1.5
def test_macd_trend_up():
# monotonic uptrend → MACD > 0, histogram > 0
closes = [float(i) for i in range(1, 60)]
m = macd(closes)
assert m["macd"] is not None
assert m["signal"] is not None
assert m["hist"] is not None
assert m["macd"] > 0
assert m["hist"] >= 0
def test_macd_insufficient_data():
m = macd([1.0, 2.0, 3.0])
assert m == {"macd": None, "signal": None, "hist": None}
def test_macd_trend_down():
closes = [float(i) for i in range(60, 1, -1)]
m = macd(closes)
assert m["macd"] < 0
assert m["hist"] <= 0
def test_adx_insufficient_data():
a = adx([1.0] * 10, [0.5] * 10, [0.7] * 10, period=14)
assert a == {"adx": None, "+di": None, "-di": None}
def test_adx_strong_uptrend():
highs = [float(i) + 1.0 for i in range(1, 40)]
lows = [float(i) for i in range(1, 40)]
closes = [float(i) + 0.5 for i in range(1, 40)]
a = adx(highs, lows, closes, period=14)
assert a["adx"] is not None
assert a["+di"] is not None and a["-di"] is not None
# strong uptrend → +DI >> -DI, ADX high
assert a["+di"] > a["-di"]
assert a["adx"] > 50.0
def test_adx_flat_market():
highs = [10.0] * 40
lows = [9.0] * 40
closes = [9.5] * 40
a = adx(highs, lows, closes, period=14)
# no directional movement → ADX near 0
assert a["adx"] is not None
assert a["adx"] < 5.0
# ---------- vol_cone ----------
def _gbm_series(mu: float, sigma: float, n: int, seed: int = 42) -> list[float]:
"""Mock GBM closes: deterministic for tests."""
import random
r = random.Random(seed)
p = [100.0]
for _ in range(n):
z = r.gauss(0.0, 1.0)
p.append(p[-1] * math.exp(mu / 252 + sigma / math.sqrt(252) * z))
return p
def test_vol_cone_returns_percentiles_per_window():
closes = _gbm_series(mu=0.0, sigma=0.5, n=400)
out = vol_cone(closes, windows=[10, 30, 60])
assert set(out.keys()) == {10, 30, 60}
for _w, stats in out.items():
assert "current" in stats
assert "p10" in stats and "p50" in stats and "p90" in stats
assert stats["p10"] <= stats["p50"] <= stats["p90"]
# annualized — sensible range for sigma=0.5
assert 0.1 < stats["p50"] < 1.5
def test_vol_cone_insufficient_data():
out = vol_cone([100.0, 101.0], windows=[10, 30])
assert out[10]["current"] is None
assert out[30]["current"] is None
# ---------- hurst_exponent ----------
def test_hurst_random_walk_near_half():
closes = _gbm_series(mu=0.0, sigma=0.3, n=500, seed=7)
h = hurst_exponent(closes)
assert h is not None
# Random walk → Hurst ≈ 0.5; R/S bias positivo ben noto su sample finiti.
# Bound largo: distinguere comunque random walk da trending forte (>0.85).
assert 0.35 < h < 0.85
def test_hurst_persistent_trend():
# Strong monotonic trend → H >> 0.5
closes = [100.0 + i * 0.5 + math.sin(i / 10) * 0.1 for i in range(400)]
h = hurst_exponent(closes)
assert h is not None
assert h > 0.85
def test_hurst_insufficient_data():
assert hurst_exponent([1.0, 2.0, 3.0]) is None
# ---------- half_life_mean_reversion ----------
def test_half_life_mean_reverting_series():
"""OU process with theta=0.1 → half-life ≈ ln(2)/0.1 ≈ 6.93."""
import random
r = random.Random(123)
theta = 0.1
mu = 100.0
sigma = 0.5
s = [mu]
for _ in range(500):
s.append(s[-1] + theta * (mu - s[-1]) + sigma * r.gauss(0, 1))
hl = half_life_mean_reversion(s)
assert hl is not None
# broad tolerance — finite-sample noise
assert 3.0 < hl < 20.0
def test_half_life_trending_returns_none():
closes = [100.0 + i for i in range(200)]
hl = half_life_mean_reversion(closes)
# No mean reversion → returns None or +inf
assert hl is None or hl > 1000
# ---------- garch11_forecast ----------
def test_garch11_forecast_returns_positive_sigma():
closes = _gbm_series(mu=0.0, sigma=0.4, n=500, seed=11)
out = garch11_forecast(closes)
assert out is not None
assert out["sigma_next"] > 0
assert 0 < out["alpha"] < 1
assert 0 < out["beta"] < 1
assert out["alpha"] + out["beta"] < 1.0 # stationarity
def test_garch11_insufficient_data():
assert garch11_forecast([100.0, 101.0]) is None
# ---------- autocorrelation ----------
def test_autocorrelation_white_noise_low():
import random
r = random.Random(1)
rets = [r.gauss(0, 0.01) for _ in range(500)]
out = autocorrelation(rets, max_lag=5)
assert len(out) == 5
# white noise → all autocorr ≈ 0 (within ±2/sqrt(N))
bound = 2.0 / math.sqrt(len(rets))
for _lag, val in out.items():
assert abs(val) < bound * 2 # generous
def test_autocorrelation_lag1_strong_for_ar1():
"""AR(1) with phi=0.7 → autocorr lag-1 ≈ 0.7."""
import random
r = random.Random(2)
s = [0.0]
for _ in range(500):
s.append(0.7 * s[-1] + r.gauss(0, 0.1))
out = autocorrelation(s, max_lag=3)
assert out[1] > 0.5
assert out[2] > 0.2 # geometric decay
def test_autocorrelation_insufficient_data():
assert autocorrelation([1.0], max_lag=5) == {}
# ---------- rolling_sharpe ----------
def test_rolling_sharpe_positive_for_uptrend():
closes = [100.0 * (1 + 0.001 * i) for i in range(252)]
s = rolling_sharpe(closes, window=60)
assert s is not None
assert s["sharpe"] > 0
assert s["sortino"] >= s["sharpe"] / 2 # sortino can be high if no downside
def test_rolling_sharpe_zero_volatility():
closes = [100.0] * 100
s = rolling_sharpe(closes, window=60)
assert s is not None
assert s["sharpe"] == 0.0 # no variance → 0 by convention
def test_rolling_sharpe_insufficient_data():
assert rolling_sharpe([100.0, 101.0], window=60) is None
# ---------- var_cvar ----------
def test_var_cvar_basic():
import random
r = random.Random(3)
rets = [r.gauss(0.0005, 0.02) for _ in range(1000)]
out = var_cvar(rets, confidences=[0.95, 0.99])
assert "var_95" in out and "cvar_95" in out
assert "var_99" in out and "cvar_99" in out
# VaR is loss → positive number representing percentile loss
assert out["var_95"] > 0
assert out["cvar_95"] >= out["var_95"] # CVaR worse than VaR
assert out["var_99"] >= out["var_95"]
def test_var_cvar_insufficient_data():
assert var_cvar([0.01], confidences=[0.95]) == {}
-77
View File
@@ -1,77 +0,0 @@
import json
import logging
from mcp_common.logging import (
SecretsFilter,
configure_root_logging,
get_json_logger,
)
def test_secrets_filter_masks_bearer():
f = SecretsFilter()
rec = logging.LogRecord(
name="t", level=logging.INFO, pathname="", lineno=0,
msg="Got Bearer abcdef123456 from client",
args=(), exc_info=None,
)
f.filter(rec)
assert "abcdef" not in rec.msg
assert "***" in rec.msg
def test_secrets_filter_masks_api_key_json():
f = SecretsFilter()
rec = logging.LogRecord(
name="t", level=logging.INFO, pathname="", lineno=0,
msg='{"api_key": "sk-live-abc123xyz"}',
args=(), exc_info=None,
)
f.filter(rec)
assert "sk-live-abc123xyz" not in rec.msg
def test_json_logger_outputs_json(capsys):
logger = get_json_logger("test")
logger.info("hello", extra={"user_id": 42})
captured = capsys.readouterr()
# output is on stderr by default for json logger
line = (captured.err or captured.out).strip().splitlines()[-1]
data = json.loads(line)
assert data["message"] == "hello"
assert data["user_id"] == 42
def test_configure_root_json_format(monkeypatch, capsys):
monkeypatch.setenv("LOG_FORMAT", "json")
monkeypatch.setenv("LOG_LEVEL", "INFO")
configure_root_logging()
logging.info("root json test")
line = capsys.readouterr().err.strip().splitlines()[-1]
data = json.loads(line)
assert data["message"] == "root json test"
assert data["levelname"] == "INFO"
def test_configure_root_text_format(monkeypatch, capsys):
monkeypatch.setenv("LOG_FORMAT", "text")
configure_root_logging()
logging.info("root text test")
line = capsys.readouterr().err.strip().splitlines()[-1]
# text format non è JSON parseable
try:
json.loads(line)
raise AssertionError("expected text format, got JSON")
except json.JSONDecodeError:
pass
assert "root text test" in line
def test_configure_root_applies_secrets_filter(monkeypatch, capsys):
monkeypatch.setenv("LOG_FORMAT", "json")
configure_root_logging()
logging.info("calling with Bearer sk-live-leak123456 token")
line = capsys.readouterr().err.strip().splitlines()[-1]
data = json.loads(line)
assert "sk-live-leak123456" not in data["message"]
assert "***" in data["message"]
-112
View File
@@ -1,112 +0,0 @@
from __future__ import annotations
from fastapi import Depends, FastAPI
from fastapi.testclient import TestClient
from mcp_common.auth import Principal, TokenStore, require_principal
from mcp_common.mcp_bridge import _derive_input_schemas, mount_mcp_endpoint
from mcp_common.server import build_app
from pydantic import BaseModel
class EchoBody(BaseModel):
msg: str
n: int = 1
def _make_app() -> tuple[FastAPI, TokenStore]:
store = TokenStore(tokens={"t": Principal("obs", {"observer"})})
app = build_app(name="t", version="v", token_store=store)
@app.post("/tools/echo")
def echo(body: EchoBody, principal: Principal = Depends(require_principal)):
return {"echo": body.msg, "n": body.n}
@app.post("/tools/ping")
def ping(principal: Principal = Depends(require_principal)):
return {"pong": True}
return app, store
def test_derive_input_schemas_resolves_lazy_annotations():
app, _ = _make_app()
schemas = _derive_input_schemas(app, ["echo", "ping"])
assert "echo" in schemas
echo_schema = schemas["echo"]
assert echo_schema["type"] == "object"
assert "msg" in echo_schema["properties"]
assert "n" in echo_schema["properties"]
assert "msg" in echo_schema["required"]
# ping has no Pydantic body → not in map (fallback applied by caller)
assert "ping" not in schemas
def test_mount_mcp_endpoint_exposes_derived_schemas():
app, store = _make_app()
mount_mcp_endpoint(
app,
name="test",
version="1.0",
token_store=store,
internal_base_url="http://localhost:0",
tools=[
{"name": "echo", "description": "Echo a message."},
{"name": "ping", "description": "Ping."},
],
)
c = TestClient(app)
r = c.post(
"/mcp",
headers={"Authorization": "Bearer t"},
json={"jsonrpc": "2.0", "id": 1, "method": "tools/list"},
)
assert r.status_code == 200
tools = r.json()["result"]["tools"]
by_name = {t["name"]: t for t in tools}
assert set(by_name["echo"]["inputSchema"]["required"]) == {"msg"}
# ping fallback su schema generico
assert by_name["ping"]["inputSchema"] == {
"type": "object",
"additionalProperties": True,
}
def test_mount_mcp_endpoint_requires_auth():
app, store = _make_app()
mount_mcp_endpoint(
app,
name="test",
version="1.0",
token_store=store,
internal_base_url="http://localhost:0",
tools=[{"name": "echo"}],
)
c = TestClient(app)
r = c.post("/mcp", json={"jsonrpc": "2.0", "id": 1, "method": "tools/list"})
assert r.status_code == 401
r = c.post(
"/mcp",
headers={"Authorization": "Bearer WRONG"},
json={"jsonrpc": "2.0", "id": 1, "method": "tools/list"},
)
assert r.status_code == 403
def test_explicit_input_schema_overrides_derived():
app, store = _make_app()
custom = {"type": "object", "properties": {"custom": {"type": "string"}}, "required": ["custom"]}
mount_mcp_endpoint(
app,
name="test",
version="1.0",
token_store=store,
internal_base_url="http://localhost:0",
tools=[{"name": "echo", "input_schema": custom}],
)
c = TestClient(app)
r = c.post(
"/mcp",
headers={"Authorization": "Bearer t"},
json={"jsonrpc": "2.0", "id": 1, "method": "tools/list"},
)
assert r.json()["result"]["tools"][0]["inputSchema"] == custom
@@ -1,59 +0,0 @@
from __future__ import annotations
from mcp_common.microstructure import orderbook_imbalance
def test_orderbook_imbalance_balanced():
bids = [[100.0, 1.0], [99.5, 1.0], [99.0, 1.0]]
asks = [[100.5, 1.0], [101.0, 1.0], [101.5, 1.0]]
out = orderbook_imbalance(bids, asks, depth=3)
assert abs(out["imbalance_ratio"]) < 0.01 # bilanciato
assert out["bid_volume"] == 3.0
assert out["ask_volume"] == 3.0
assert out["microprice"] is not None
def test_orderbook_imbalance_bid_heavy():
bids = [[100.0, 5.0], [99.5, 5.0]]
asks = [[100.5, 1.0], [101.0, 1.0]]
out = orderbook_imbalance(bids, asks, depth=2)
assert out["imbalance_ratio"] > 0.5 # forte bid pressure
assert out["bid_volume"] == 10.0
assert out["ask_volume"] == 2.0
def test_orderbook_imbalance_ask_heavy():
bids = [[100.0, 1.0], [99.5, 1.0]]
asks = [[100.5, 5.0], [101.0, 5.0]]
out = orderbook_imbalance(bids, asks, depth=2)
assert out["imbalance_ratio"] < -0.5
def test_orderbook_imbalance_microprice_skew():
"""Microprice è weighted mid: pesato bid/ask depth opposto."""
bids = [[100.0, 9.0]]
asks = [[101.0, 1.0]]
out = orderbook_imbalance(bids, asks, depth=1)
# large bid → microprice closer to ask (paradox: weighted by *opposite* size)
assert out["microprice"] > 100.5
def test_orderbook_imbalance_empty():
out = orderbook_imbalance([], [], depth=5)
assert out["imbalance_ratio"] is None
assert out["microprice"] is None
def test_orderbook_imbalance_one_sided():
out = orderbook_imbalance([[100.0, 1.0]], [], depth=1)
assert out["imbalance_ratio"] == 1.0 # all bid
def test_orderbook_imbalance_slope():
"""Slope = velocity of liquidity dropoff: ripido = poca liquidità in profondità."""
bids_steep = [[100.0, 10.0], [99.0, 1.0]] # depth crolla → slope alto
asks_steep = [[101.0, 10.0], [102.0, 1.0]]
out = orderbook_imbalance(bids_steep, asks_steep, depth=2)
assert out["bid_slope"] is not None
# bid liquidity drops by 9 per 1 price unit → slope ~9
assert out["bid_slope"] > 5.0
-144
View File
@@ -1,144 +0,0 @@
"""Test puri per mcp_common.options (logiche option-flow indipendenti
dall'exchange).
"""
from __future__ import annotations
import pytest
from mcp_common.options import (
atm_vs_wings_vol,
dealer_gamma_profile,
oi_weighted_skew,
smile_asymmetry,
vanna_charm_aggregate,
)
# ---------- oi_weighted_skew ----------
def test_oi_weighted_skew_balanced():
"""OI distribuito 50/50 calls/puts → skew vicino a 0."""
legs = [
{"iv": 0.5, "delta": 0.5, "oi": 100, "option_type": "call"},
{"iv": 0.5, "delta": -0.5, "oi": 100, "option_type": "put"},
]
out = oi_weighted_skew(legs)
assert abs(out["skew"]) < 0.01
def test_oi_weighted_skew_put_heavy():
"""Put heavy → IV media puts > IV media calls → skew positivo (put > call)."""
legs = [
{"iv": 0.4, "delta": 0.5, "oi": 50, "option_type": "call"},
{"iv": 0.7, "delta": -0.5, "oi": 500, "option_type": "put"},
]
out = oi_weighted_skew(legs)
assert out["skew"] > 0
assert out["call_iv_weighted"] > 0
assert out["put_iv_weighted"] > out["call_iv_weighted"]
def test_oi_weighted_skew_empty():
out = oi_weighted_skew([])
assert out == {"skew": None, "call_iv_weighted": None, "put_iv_weighted": None, "total_oi": 0}
# ---------- smile_asymmetry ----------
def test_smile_asymmetry_symmetric():
"""Smile simmetrico ATM → asymmetry ≈ 0."""
legs = [
{"strike": 80, "iv": 0.55, "option_type": "put"},
{"strike": 90, "iv": 0.50, "option_type": "put"},
{"strike": 100, "iv": 0.45, "option_type": "call"},
{"strike": 110, "iv": 0.50, "option_type": "call"},
{"strike": 120, "iv": 0.55, "option_type": "call"},
]
out = smile_asymmetry(legs, spot=100.0)
assert out["atm_iv"] is not None
assert abs(out["asymmetry"]) < 0.05
def test_smile_asymmetry_put_skew():
"""OTM puts (low strike) IV >> OTM calls (high strike) IV → asymmetry > 0."""
legs = [
{"strike": 80, "iv": 0.80, "option_type": "put"},
{"strike": 100, "iv": 0.50, "option_type": "call"},
{"strike": 120, "iv": 0.45, "option_type": "call"},
]
out = smile_asymmetry(legs, spot=100.0)
assert out["asymmetry"] > 0.1
def test_smile_asymmetry_no_atm():
legs = [{"strike": 200, "iv": 0.5, "option_type": "call"}]
out = smile_asymmetry(legs, spot=100.0)
assert out["atm_iv"] is None
# ---------- atm_vs_wings_vol ----------
def test_atm_vs_wings_vol_basic():
legs = [
{"strike": 90, "iv": 0.55, "delta": -0.25, "option_type": "put"},
{"strike": 100, "iv": 0.45, "delta": 0.5, "option_type": "call"},
{"strike": 110, "iv": 0.50, "delta": 0.25, "option_type": "call"},
]
out = atm_vs_wings_vol(legs, spot=100.0)
assert out["atm_iv"] == pytest.approx(0.45, rel=1e-3)
assert out["wing_25d_call_iv"] == pytest.approx(0.50, rel=1e-3)
assert out["wing_25d_put_iv"] == pytest.approx(0.55, rel=1e-3)
# ATM<wings → richness positiva
assert out["wing_richness"] > 0
def test_atm_vs_wings_vol_no_data():
out = atm_vs_wings_vol([], spot=100.0)
assert out["atm_iv"] is None
# ---------- dealer_gamma_profile ----------
def test_dealer_gamma_profile_assumes_dealer_short_calls():
"""Convention: dealer SHORT calls (sells calls to retail), LONG puts.
Calls oi → negative dealer gamma, puts oi → positive dealer gamma.
"""
legs = [
{"strike": 100, "gamma": 0.01, "oi": 1000, "option_type": "call"},
{"strike": 100, "gamma": 0.01, "oi": 500, "option_type": "put"},
]
out = dealer_gamma_profile(legs, spot=100.0)
# call gamma greater than put gamma at same strike → net dealer short gamma
assert len(out["by_strike"]) == 1
row = out["by_strike"][0]
assert row["call_dealer_gamma"] < 0
assert row["put_dealer_gamma"] > 0
assert row["net_dealer_gamma"] < 0 # calls dominate
assert out["total_net_dealer_gamma"] < 0
def test_dealer_gamma_profile_empty():
out = dealer_gamma_profile([], spot=100.0)
assert out["by_strike"] == []
assert out["total_net_dealer_gamma"] == 0.0
# ---------- vanna_charm_aggregate ----------
def test_vanna_charm_aggregate_basic():
legs = [
{"strike": 100, "vanna": 0.05, "charm": -0.001, "oi": 1000, "option_type": "call"},
{"strike": 100, "vanna": -0.05, "charm": 0.001, "oi": 500, "option_type": "put"},
]
out = vanna_charm_aggregate(legs, spot=100.0)
assert out["total_vanna"] != 0 # some net exposure
assert "total_charm" in out
assert out["legs_analyzed"] == 2
def test_vanna_charm_aggregate_skip_missing_greeks():
legs = [
{"strike": 100, "vanna": None, "charm": -0.001, "oi": 1000, "option_type": "call"},
{"strike": 100, "vanna": 0.05, "charm": None, "oi": 500, "option_type": "put"},
]
out = vanna_charm_aggregate(legs, spot=100.0)
# entrambe le legs hanno almeno una greca None → skippate
assert out["legs_analyzed"] == 0
-90
View File
@@ -1,90 +0,0 @@
from fastapi.testclient import TestClient
from mcp_common.auth import Principal, TokenStore
from mcp_common.server import build_app
def test_build_app_health():
store = TokenStore(tokens={})
app = build_app(name="test-mcp", version="0.0.1", token_store=store)
client = TestClient(app)
r = client.get("/health")
assert r.status_code == 200
body = r.json()
assert body["status"] == "healthy"
assert body["name"] == "test-mcp"
assert body["version"] == "0.0.1"
assert "uptime_seconds" in body
assert "data_timestamp" in body
assert r.headers.get("X-Duration-Ms") is not None
def test_build_app_adds_token_store():
store = TokenStore(tokens={"t1": Principal("x", {"core"})})
app = build_app(name="t", version="v", token_store=store)
assert app.state.token_store is store
def test_timestamp_injector_dict_response():
"""CER-P5-001: dict response gets data_timestamp + X-Data-Timestamp header."""
store = TokenStore(tokens={})
app = build_app(name="t", version="v", token_store=store)
@app.post("/tools/foo")
def foo():
return {"ok": True}
client = TestClient(app)
r = client.post("/tools/foo")
assert r.status_code == 200
body = r.json()
assert body["ok"] is True
assert "data_timestamp" in body
assert r.headers.get("X-Data-Timestamp") is not None
def test_timestamp_injector_list_of_dicts():
"""CER-P5-001: list of dicts → each item gets data_timestamp."""
store = TokenStore(tokens={})
app = build_app(name="t", version="v", token_store=store)
@app.post("/tools/list_items")
def list_items():
return [{"x": 1}, {"x": 2}]
client = TestClient(app)
r = client.post("/tools/list_items")
body = r.json()
assert isinstance(body, list)
assert len(body) == 2
for item in body:
assert "data_timestamp" in item
assert r.headers.get("X-Data-Timestamp") is not None
def test_timestamp_injector_preserves_existing():
"""CER-P5-001: se già presente, non override."""
store = TokenStore(tokens={})
app = build_app(name="t", version="v", token_store=store)
@app.post("/tools/already")
def already():
return {"data_timestamp": "2020-01-01T00:00:00Z", "x": 1}
client = TestClient(app)
body = client.post("/tools/already").json()
assert body["data_timestamp"] == "2020-01-01T00:00:00Z"
def test_timestamp_injector_empty_list_gets_header_only():
"""CER-P5-001: list vuota — no body modification, ma header presente."""
store = TokenStore(tokens={})
app = build_app(name="t", version="v", token_store=store)
@app.post("/tools/empty_list")
def empty_list():
return []
client = TestClient(app)
r = client.post("/tools/empty_list")
assert r.json() == []
assert r.headers.get("X-Data-Timestamp") is not None
-51
View File
@@ -1,51 +0,0 @@
from __future__ import annotations
import random
from mcp_common.stats import cointegration_test
def test_cointegrated_synthetic_pair():
"""Costruisco coppia cointegrata: B random walk, A = 2*B + noise stazionario."""
r = random.Random(1)
b = [100.0]
for _ in range(300):
b.append(b[-1] + r.gauss(0, 1))
a = [2 * b[i] + r.gauss(0, 0.5) for i in range(len(b))]
out = cointegration_test(a, b)
assert out["cointegrated"] is True
assert out["beta"] == pytest_approx(2.0, rel=0.05)
assert out["adf_t_stat"] is not None
assert out["adf_t_stat"] < -2.86
def test_not_cointegrated_independent_walks():
"""Due random walk indipendenti → spread non stazionario → no cointegration."""
r = random.Random(2)
a = [100.0]
b = [100.0]
for _ in range(300):
a.append(a[-1] + r.gauss(0, 1))
b.append(b[-1] + r.gauss(0, 1))
out = cointegration_test(a, b)
# Per due RW indipendenti, t-stat ADF è solitamente > -2.86 → non cointegrate
assert out["cointegrated"] is False or out["adf_t_stat"] > -3.0
def test_cointegration_short_series():
out = cointegration_test([1.0, 2.0], [3.0, 4.0])
assert out["cointegrated"] is None
assert out["beta"] is None
def test_cointegration_mismatched_length():
out = cointegration_test([1.0, 2.0, 3.0], [1.0, 2.0])
assert out["cointegrated"] is None
def pytest_approx(value, rel):
"""Tiny helper to avoid importing pytest just for approx."""
class _Approx:
def __eq__(self, other):
return abs(other - value) <= abs(value) * rel
return _Approx()
-29
View File
@@ -1,29 +0,0 @@
[project]
name = "mcp-alpaca"
version = "0.1.0"
requires-python = ">=3.11"
dependencies = [
"mcp-common",
"fastapi>=0.115",
"uvicorn[standard]>=0.30",
"httpx>=0.27",
"pydantic>=2.6",
"alpaca-py>=0.32",
"pytz>=2024.1",
]
[project.optional-dependencies]
dev = ["pytest>=8", "pytest-asyncio>=0.23"]
[build-system]
requires = ["hatchling"]
build-backend = "hatchling.build"
[tool.hatch.build.targets.wheel]
packages = ["src/mcp_alpaca"]
[tool.uv.sources]
mcp-common = { workspace = true }
[project.scripts]
mcp-alpaca = "mcp_alpaca.__main__:main"
@@ -1,30 +0,0 @@
from __future__ import annotations
from mcp_common.app_factory import ExchangeAppSpec, run_exchange_main
from mcp_alpaca.client import AlpacaClient
from mcp_alpaca.server import create_app
SPEC = ExchangeAppSpec(
exchange="alpaca",
creds_env_var="ALPACA_CREDENTIALS_FILE",
env_var="ALPACA_PAPER",
flag_key="paper",
default_base_url_live="https://api.alpaca.markets",
default_base_url_testnet="https://paper-api.alpaca.markets",
default_port=9020,
build_client=lambda creds, env_info: AlpacaClient(
api_key=creds["api_key_id"],
secret_key=creds["secret_key"],
paper=(env_info.environment == "testnet"),
),
build_app=create_app,
)
def main():
run_exchange_main(SPEC)
if __name__ == "__main__":
main()
@@ -1,385 +0,0 @@
from __future__ import annotations
import asyncio
import datetime as _dt
from typing import Any
from alpaca.data.historical import (
CryptoHistoricalDataClient,
OptionHistoricalDataClient,
StockHistoricalDataClient,
)
from alpaca.data.requests import (
CryptoBarsRequest,
CryptoLatestQuoteRequest,
CryptoLatestTradeRequest,
OptionBarsRequest,
OptionChainRequest,
OptionLatestQuoteRequest,
StockBarsRequest,
StockLatestQuoteRequest,
StockLatestTradeRequest,
StockSnapshotRequest,
)
from alpaca.data.timeframe import TimeFrame, TimeFrameUnit
from alpaca.trading.client import TradingClient
from alpaca.trading.enums import (
AssetClass,
OrderSide,
QueryOrderStatus,
TimeInForce,
)
from alpaca.trading.requests import (
ClosePositionRequest,
GetAssetsRequest,
GetOrdersRequest,
LimitOrderRequest,
MarketOrderRequest,
ReplaceOrderRequest,
StopOrderRequest,
)
_TF_MAP = {
"1min": TimeFrame(1, TimeFrameUnit.Minute),
"5min": TimeFrame(5, TimeFrameUnit.Minute),
"15min": TimeFrame(15, TimeFrameUnit.Minute),
"30min": TimeFrame(30, TimeFrameUnit.Minute),
"1h": TimeFrame(1, TimeFrameUnit.Hour),
"1d": TimeFrame(1, TimeFrameUnit.Day),
"1w": TimeFrame(1, TimeFrameUnit.Week),
}
_ASSET_CLASSES = {"stocks", "crypto", "options"}
def _tf(interval: str) -> TimeFrame:
if interval in _TF_MAP:
return _TF_MAP[interval]
raise ValueError(f"unsupported timeframe: {interval}")
def _asset_class_enum(ac: str) -> AssetClass:
ac = ac.lower()
if ac == "stocks":
return AssetClass.US_EQUITY
if ac == "crypto":
return AssetClass.CRYPTO
if ac == "options":
return AssetClass.US_OPTION
raise ValueError(f"invalid asset_class: {ac}")
def _serialize(obj: Any) -> Any:
"""Recursively convert pydantic/datetime objects → json-safe."""
if obj is None or isinstance(obj, str | int | float | bool):
return obj
if isinstance(obj, _dt.datetime | _dt.date):
return obj.isoformat()
if isinstance(obj, dict):
return {k: _serialize(v) for k, v in obj.items()}
if isinstance(obj, list | tuple):
return [_serialize(v) for v in obj]
if hasattr(obj, "model_dump"):
return _serialize(obj.model_dump())
if hasattr(obj, "__dict__"):
return _serialize(vars(obj))
return str(obj)
class AlpacaClient:
def __init__(
self,
api_key: str,
secret_key: str,
paper: bool = True,
trading: Any | None = None,
stock_data: Any | None = None,
crypto_data: Any | None = None,
option_data: Any | None = None,
) -> None:
self.api_key = api_key
self.secret_key = secret_key
self.paper = paper
self._trading = trading or TradingClient(
api_key=api_key, secret_key=secret_key, paper=paper
)
self._stock = stock_data or StockHistoricalDataClient(
api_key=api_key, secret_key=secret_key
)
self._crypto = crypto_data or CryptoHistoricalDataClient(
api_key=api_key, secret_key=secret_key
)
self._option = option_data or OptionHistoricalDataClient(
api_key=api_key, secret_key=secret_key
)
async def _run(self, fn, /, *args, **kwargs):
return await asyncio.to_thread(fn, *args, **kwargs)
# ── Account / positions ──────────────────────────────────────
async def get_account(self) -> dict:
acc = await self._run(self._trading.get_account)
return _serialize(acc)
async def get_positions(self) -> list[dict]:
pos = await self._run(self._trading.get_all_positions)
return [_serialize(p) for p in pos]
async def get_activities(self, limit: int = 50) -> list[dict]:
acts = await self._run(self._trading.get_account_activities)
data = [_serialize(a) for a in acts]
return data[:limit]
# ── Assets ──────────────────────────────────────────────────
async def get_assets(
self, asset_class: str = "stocks", status: str = "active"
) -> list[dict]:
req = GetAssetsRequest(
asset_class=_asset_class_enum(asset_class),
status=status,
)
assets = await self._run(self._trading.get_all_assets, req)
return [_serialize(a) for a in assets[:500]]
# ── Market data ─────────────────────────────────────────────
async def get_ticker(self, symbol: str, asset_class: str = "stocks") -> dict:
ac = asset_class.lower()
if ac == "stocks":
req = StockLatestTradeRequest(symbol_or_symbols=symbol)
data = await self._run(self._stock.get_stock_latest_trade, req)
trade = data.get(symbol)
q_req = StockLatestQuoteRequest(symbol_or_symbols=symbol)
qdata = await self._run(self._stock.get_stock_latest_quote, q_req)
quote = qdata.get(symbol)
return {
"symbol": symbol,
"asset_class": "stocks",
"last_price": getattr(trade, "price", None),
"bid": getattr(quote, "bid_price", None),
"ask": getattr(quote, "ask_price", None),
"bid_size": getattr(quote, "bid_size", None),
"ask_size": getattr(quote, "ask_size", None),
"timestamp": _serialize(getattr(trade, "timestamp", None)),
}
if ac == "crypto":
req = CryptoLatestTradeRequest(symbol_or_symbols=symbol)
data = await self._run(self._crypto.get_crypto_latest_trade, req)
trade = data.get(symbol)
q_req = CryptoLatestQuoteRequest(symbol_or_symbols=symbol)
qdata = await self._run(self._crypto.get_crypto_latest_quote, q_req)
quote = qdata.get(symbol)
return {
"symbol": symbol,
"asset_class": "crypto",
"last_price": getattr(trade, "price", None),
"bid": getattr(quote, "bid_price", None),
"ask": getattr(quote, "ask_price", None),
"timestamp": _serialize(getattr(trade, "timestamp", None)),
}
if ac == "options":
req = OptionLatestQuoteRequest(symbol_or_symbols=symbol)
data = await self._run(self._option.get_option_latest_quote, req)
quote = data.get(symbol)
return {
"symbol": symbol,
"asset_class": "options",
"bid": getattr(quote, "bid_price", None),
"ask": getattr(quote, "ask_price", None),
"timestamp": _serialize(getattr(quote, "timestamp", None)),
}
raise ValueError(f"invalid asset_class: {asset_class}")
async def get_bars(
self,
symbol: str,
asset_class: str = "stocks",
interval: str = "1d",
start: str | None = None,
end: str | None = None,
limit: int = 1000,
) -> dict:
tf = _tf(interval)
start_dt = _dt.datetime.fromisoformat(start) if start else (
_dt.datetime.now(_dt.UTC) - _dt.timedelta(days=30)
)
end_dt = _dt.datetime.fromisoformat(end) if end else _dt.datetime.now(_dt.UTC)
ac = asset_class.lower()
if ac == "stocks":
req = StockBarsRequest(
symbol_or_symbols=symbol, timeframe=tf,
start=start_dt, end=end_dt, limit=limit,
)
data = await self._run(self._stock.get_stock_bars, req)
elif ac == "crypto":
req = CryptoBarsRequest(
symbol_or_symbols=symbol, timeframe=tf,
start=start_dt, end=end_dt, limit=limit,
)
data = await self._run(self._crypto.get_crypto_bars, req)
elif ac == "options":
req = OptionBarsRequest(
symbol_or_symbols=symbol, timeframe=tf,
start=start_dt, end=end_dt, limit=limit,
)
data = await self._run(self._option.get_option_bars, req)
else:
raise ValueError(f"invalid asset_class: {asset_class}")
bars_dict = getattr(data, "data", {}) or {}
rows = bars_dict.get(symbol, []) or []
bars = [
{
"timestamp": _serialize(getattr(b, "timestamp", None)),
"open": getattr(b, "open", None),
"high": getattr(b, "high", None),
"low": getattr(b, "low", None),
"close": getattr(b, "close", None),
"volume": getattr(b, "volume", None),
}
for b in rows
]
return {"symbol": symbol, "asset_class": ac, "interval": interval, "bars": bars}
async def get_snapshot(self, symbol: str) -> dict:
req = StockSnapshotRequest(symbol_or_symbols=symbol)
data = await self._run(self._stock.get_stock_snapshot, req)
return _serialize(data.get(symbol))
async def get_option_chain(
self,
underlying: str,
expiry: str | None = None,
) -> dict:
kwargs: dict[str, Any] = {"underlying_symbol": underlying}
if expiry:
kwargs["expiration_date"] = _dt.date.fromisoformat(expiry)
req = OptionChainRequest(**kwargs)
data = await self._run(self._option.get_option_chain, req)
return {
"underlying": underlying,
"expiry": expiry,
"contracts": _serialize(data),
}
# ── Orders ──────────────────────────────────────────────────
async def get_open_orders(self, limit: int = 50) -> list[dict]:
req = GetOrdersRequest(status=QueryOrderStatus.OPEN, limit=limit)
orders = await self._run(self._trading.get_orders, filter=req)
return [_serialize(o) for o in orders]
async def place_order(
self,
symbol: str,
side: str,
qty: float | None = None,
notional: float | None = None,
order_type: str = "market",
limit_price: float | None = None,
stop_price: float | None = None,
tif: str = "day",
asset_class: str = "stocks",
) -> dict:
side_enum = OrderSide.BUY if side.lower() == "buy" else OrderSide.SELL
tif_enum = TimeInForce(tif.lower())
ot = order_type.lower()
common = {
"symbol": symbol,
"side": side_enum,
"time_in_force": tif_enum,
}
if qty is not None:
common["qty"] = qty
if notional is not None:
common["notional"] = notional
if ot == "market":
req = MarketOrderRequest(**common)
elif ot == "limit":
if limit_price is None:
raise ValueError("limit_price required for limit order")
req = LimitOrderRequest(**common, limit_price=limit_price)
elif ot == "stop":
if stop_price is None:
raise ValueError("stop_price required for stop order")
req = StopOrderRequest(**common, stop_price=stop_price)
else:
raise ValueError(f"unsupported order_type: {order_type}")
order = await self._run(self._trading.submit_order, req)
return _serialize(order)
async def amend_order(
self,
order_id: str,
qty: float | None = None,
limit_price: float | None = None,
stop_price: float | None = None,
tif: str | None = None,
) -> dict:
kwargs: dict[str, Any] = {}
if qty is not None:
kwargs["qty"] = qty
if limit_price is not None:
kwargs["limit_price"] = limit_price
if stop_price is not None:
kwargs["stop_price"] = stop_price
if tif is not None:
kwargs["time_in_force"] = TimeInForce(tif.lower())
req = ReplaceOrderRequest(**kwargs)
order = await self._run(self._trading.replace_order_by_id, order_id, req)
return _serialize(order)
async def cancel_order(self, order_id: str) -> dict:
await self._run(self._trading.cancel_order_by_id, order_id)
return {"order_id": order_id, "canceled": True}
async def cancel_all_orders(self) -> list[dict]:
resp = await self._run(self._trading.cancel_orders)
return [_serialize(r) for r in resp]
# ── Position close ──────────────────────────────────────────
async def close_position(
self, symbol: str, qty: float | None = None, percentage: float | None = None
) -> dict:
req = None
if qty is not None or percentage is not None:
kwargs: dict[str, Any] = {}
if qty is not None:
kwargs["qty"] = str(qty)
if percentage is not None:
kwargs["percentage"] = str(percentage)
req = ClosePositionRequest(**kwargs)
order = await self._run(
self._trading.close_position, symbol, close_options=req
)
return _serialize(order)
async def close_all_positions(self, cancel_orders: bool = True) -> list[dict]:
resp = await self._run(
self._trading.close_all_positions, cancel_orders=cancel_orders
)
return [_serialize(r) for r in resp]
# ── Clock / calendar ────────────────────────────────────────
async def get_clock(self) -> dict:
clock = await self._run(self._trading.get_clock)
return _serialize(clock)
async def get_calendar(
self, start: str | None = None, end: str | None = None
) -> list[dict]:
from alpaca.trading.requests import GetCalendarRequest
kwargs: dict[str, Any] = {}
if start:
kwargs["start"] = _dt.date.fromisoformat(start)
if end:
kwargs["end"] = _dt.date.fromisoformat(end)
req = GetCalendarRequest(**kwargs) if kwargs else None
cal = await self._run(
self._trading.get_calendar, filters=req
) if req else await self._run(self._trading.get_calendar)
return [_serialize(c) for c in cal]
@@ -1,56 +0,0 @@
"""Leverage cap server-side per place_order.
Cap letto dal secret JSON via campo `max_leverage`. Default 1 (cash) se assente.
"""
from __future__ import annotations
from fastapi import HTTPException
def get_max_leverage(creds: dict) -> int:
"""Legge max_leverage dal secret. Default 1 se mancante."""
raw = creds.get("max_leverage", 1)
try:
value = int(raw)
except (TypeError, ValueError):
value = 1
return max(1, value)
def enforce_leverage(
requested: int | float | None,
*,
creds: dict,
exchange: str,
) -> int:
"""Verifica e applica leverage cap. Ritorna leverage applicabile.
Solleva HTTPException(403, LEVERAGE_CAP_EXCEEDED) se requested > cap.
Se requested is None, applica il cap come default.
"""
cap = get_max_leverage(creds)
if requested is None:
return cap
lev = int(requested)
if lev < 1:
raise HTTPException(
status_code=403,
detail={
"error": "LEVERAGE_CAP_EXCEEDED",
"exchange": exchange,
"requested": lev,
"max": cap,
"reason": "leverage must be >= 1",
},
)
if lev > cap:
raise HTTPException(
status_code=403,
detail={
"error": "LEVERAGE_CAP_EXCEEDED",
"exchange": exchange,
"requested": lev,
"max": cap,
},
)
return lev
@@ -1,321 +0,0 @@
from __future__ import annotations
import os
from fastapi import Depends, HTTPException
from mcp_common.audit import audit_write_op
from mcp_common.auth import Principal, TokenStore, require_principal
from mcp_common.environment import EnvironmentInfo
from mcp_common.mcp_bridge import mount_mcp_endpoint
from mcp_common.server import build_app
from pydantic import BaseModel
from mcp_alpaca.client import AlpacaClient
from mcp_alpaca.leverage_cap import get_max_leverage
# --- Body models: reads ---
class AccountReq(BaseModel):
pass
class PositionsReq(BaseModel):
pass
class ActivitiesReq(BaseModel):
limit: int = 50
class AssetsReq(BaseModel):
asset_class: str = "stocks"
status: str = "active"
class TickerReq(BaseModel):
symbol: str
asset_class: str = "stocks"
class BarsReq(BaseModel):
symbol: str
asset_class: str = "stocks"
interval: str = "1d"
start: str | None = None
end: str | None = None
limit: int = 1000
class SnapshotReq(BaseModel):
symbol: str
class OptionChainReq(BaseModel):
underlying: str
expiry: str | None = None
class OpenOrdersReq(BaseModel):
limit: int = 50
class ClockReq(BaseModel):
pass
class CalendarReq(BaseModel):
start: str | None = None
end: str | None = None
# --- Body models: writes ---
class PlaceOrderReq(BaseModel):
symbol: str
side: str
qty: float | None = None
notional: float | None = None
order_type: str = "market"
limit_price: float | None = None
stop_price: float | None = None
tif: str = "day"
asset_class: str = "stocks"
class AmendOrderReq(BaseModel):
order_id: str
qty: float | None = None
limit_price: float | None = None
stop_price: float | None = None
tif: str | None = None
class CancelOrderReq(BaseModel):
order_id: str
class CancelAllReq(BaseModel):
pass
class ClosePositionReq(BaseModel):
symbol: str
qty: float | None = None
percentage: float | None = None
class CloseAllPositionsReq(BaseModel):
cancel_orders: bool = True
# --- ACL helper ---
def _check(principal: Principal, *, core: bool = False, observer: bool = False) -> None:
allowed: set[str] = set()
if core:
allowed.add("core")
if observer:
allowed.add("observer")
if not (principal.capabilities & allowed):
raise HTTPException(status_code=403, detail="forbidden")
def create_app(
*,
client: AlpacaClient,
token_store: TokenStore,
creds: dict | None = None,
env_info: EnvironmentInfo | None = None,
):
creds = creds or {}
app = build_app(name="mcp-alpaca", version="0.1.0", token_store=token_store)
# ── Reads ──────────────────────────────────────────────
@app.post("/tools/environment_info", tags=["reads"])
async def t_environment_info(principal: Principal = Depends(require_principal)):
_check(principal, core=True, observer=True)
if env_info is None:
return {
"exchange": "alpaca",
"environment": "testnet" if getattr(client, "paper", True) else "mainnet",
"source": "credentials",
"env_value": None,
"base_url": getattr(client, "base_url", None),
"max_leverage": get_max_leverage(creds),
}
return {
"exchange": env_info.exchange,
"environment": env_info.environment,
"source": env_info.source,
"env_value": env_info.env_value,
"base_url": env_info.base_url,
"max_leverage": get_max_leverage(creds),
}
@app.post("/tools/get_account", tags=["reads"])
async def t_get_account(body: AccountReq, principal: Principal = Depends(require_principal)):
_check(principal, core=True, observer=True)
return await client.get_account()
@app.post("/tools/get_positions", tags=["reads"])
async def t_get_positions(body: PositionsReq, principal: Principal = Depends(require_principal)):
_check(principal, core=True, observer=True)
return {"positions": await client.get_positions()}
@app.post("/tools/get_activities", tags=["reads"])
async def t_get_activities(body: ActivitiesReq, principal: Principal = Depends(require_principal)):
_check(principal, core=True, observer=True)
return {"activities": await client.get_activities(body.limit)}
@app.post("/tools/get_assets", tags=["reads"])
async def t_get_assets(body: AssetsReq, principal: Principal = Depends(require_principal)):
_check(principal, core=True, observer=True)
return {"assets": await client.get_assets(body.asset_class, body.status)}
@app.post("/tools/get_ticker", tags=["reads"])
async def t_get_ticker(body: TickerReq, principal: Principal = Depends(require_principal)):
_check(principal, core=True, observer=True)
return await client.get_ticker(body.symbol, body.asset_class)
@app.post("/tools/get_bars", tags=["reads"])
async def t_get_bars(body: BarsReq, principal: Principal = Depends(require_principal)):
_check(principal, core=True, observer=True)
return await client.get_bars(
body.symbol, body.asset_class, body.interval, body.start, body.end, body.limit,
)
@app.post("/tools/get_snapshot", tags=["reads"])
async def t_get_snapshot(body: SnapshotReq, principal: Principal = Depends(require_principal)):
_check(principal, core=True, observer=True)
return await client.get_snapshot(body.symbol)
@app.post("/tools/get_option_chain", tags=["reads"])
async def t_get_option_chain(body: OptionChainReq, principal: Principal = Depends(require_principal)):
_check(principal, core=True, observer=True)
return await client.get_option_chain(body.underlying, body.expiry)
@app.post("/tools/get_open_orders", tags=["reads"])
async def t_get_open_orders(body: OpenOrdersReq, principal: Principal = Depends(require_principal)):
_check(principal, core=True, observer=True)
return {"orders": await client.get_open_orders(body.limit)}
@app.post("/tools/get_clock", tags=["reads"])
async def t_get_clock(body: ClockReq, principal: Principal = Depends(require_principal)):
_check(principal, core=True, observer=True)
return await client.get_clock()
@app.post("/tools/get_calendar", tags=["reads"])
async def t_get_calendar(body: CalendarReq, principal: Principal = Depends(require_principal)):
_check(principal, core=True, observer=True)
return {"calendar": await client.get_calendar(body.start, body.end)}
# ── Writes ─────────────────────────────────────────────
@app.post("/tools/place_order", tags=["writes"])
async def t_place_order(body: PlaceOrderReq, principal: Principal = Depends(require_principal)):
_check(principal, core=True)
result = await client.place_order(
body.symbol, body.side, body.qty, body.notional,
body.order_type, body.limit_price, body.stop_price, body.tif, body.asset_class,
)
audit_write_op(
principal=principal, action="place_order", exchange="alpaca",
target=body.symbol,
payload={"side": body.side, "qty": body.qty, "notional": body.notional,
"order_type": body.order_type, "limit_price": body.limit_price,
"stop_price": body.stop_price, "tif": body.tif,
"asset_class": body.asset_class},
result=result,
)
return result
@app.post("/tools/amend_order", tags=["writes"])
async def t_amend_order(body: AmendOrderReq, principal: Principal = Depends(require_principal)):
_check(principal, core=True)
result = await client.amend_order(
body.order_id, body.qty, body.limit_price, body.stop_price, body.tif,
)
audit_write_op(
principal=principal, action="amend_order", exchange="alpaca",
target=body.order_id,
payload={"qty": body.qty, "limit_price": body.limit_price,
"stop_price": body.stop_price, "tif": body.tif},
result=result,
)
return result
@app.post("/tools/cancel_order", tags=["writes"])
async def t_cancel_order(body: CancelOrderReq, principal: Principal = Depends(require_principal)):
_check(principal, core=True)
result = await client.cancel_order(body.order_id)
audit_write_op(
principal=principal, action="cancel_order", exchange="alpaca",
target=body.order_id, payload={}, result=result,
)
return result
@app.post("/tools/cancel_all_orders", tags=["writes"])
async def t_cancel_all(body: CancelAllReq, principal: Principal = Depends(require_principal)):
_check(principal, core=True)
result = {"canceled": await client.cancel_all_orders()}
audit_write_op(
principal=principal, action="cancel_all_orders", exchange="alpaca",
payload={}, result=result,
)
return result
@app.post("/tools/close_position", tags=["writes"])
async def t_close(body: ClosePositionReq, principal: Principal = Depends(require_principal)):
_check(principal, core=True)
result = await client.close_position(body.symbol, body.qty, body.percentage)
audit_write_op(
principal=principal, action="close_position", exchange="alpaca",
target=body.symbol,
payload={"qty": body.qty, "percentage": body.percentage},
result=result,
)
return result
@app.post("/tools/close_all_positions", tags=["writes"])
async def t_close_all(body: CloseAllPositionsReq, principal: Principal = Depends(require_principal)):
_check(principal, core=True)
result = {"closed": await client.close_all_positions(body.cancel_orders)}
audit_write_op(
principal=principal, action="close_all_positions", exchange="alpaca",
payload={"cancel_orders": body.cancel_orders}, result=result,
)
return result
# ── MCP mount ──────────────────────────────────────────
port = int(os.environ.get("PORT", "9020"))
mount_mcp_endpoint(
app,
name="cerbero-alpaca",
version="0.1.0",
token_store=token_store,
internal_base_url=f"http://localhost:{port}",
tools=[
{"name": "environment_info", "description": "Ambiente operativo (paper/live), source, base_url, max_leverage cap."},
{"name": "get_account", "description": "Alpaca account summary (equity, cash, buying_power)."},
{"name": "get_positions", "description": "Posizioni aperte (stocks/crypto/options)."},
{"name": "get_activities", "description": "Activity log (fills, dividends, transfers)."},
{"name": "get_assets", "description": "Universo asset per asset_class."},
{"name": "get_ticker", "description": "Last trade + quote per simbolo (stocks/crypto/options)."},
{"name": "get_bars", "description": "OHLCV candles (stocks/crypto/options)."},
{"name": "get_snapshot", "description": "Snapshot completo stock (last trade+quote+bar)."},
{"name": "get_option_chain", "description": "Option chain per underlying."},
{"name": "get_open_orders", "description": "Ordini pending."},
{"name": "get_clock", "description": "Market clock (open/close, next_open)."},
{"name": "get_calendar", "description": "Calendar sessioni trading."},
{"name": "place_order", "description": "Invia ordine (CORE only)."},
{"name": "amend_order", "description": "Replace ordine esistente."},
{"name": "cancel_order", "description": "Cancella ordine."},
{"name": "cancel_all_orders", "description": "Cancella tutti ordini aperti."},
{"name": "close_position", "description": "Chiude posizione (tutta o parziale)."},
{"name": "close_all_positions", "description": "Liquida tutto il portafoglio."},
],
)
return app
-39
View File
@@ -1,39 +0,0 @@
from __future__ import annotations
from unittest.mock import MagicMock
import pytest
from mcp_alpaca.client import AlpacaClient
@pytest.fixture
def mock_trading():
return MagicMock(name="alpaca_TradingClient")
@pytest.fixture
def mock_stock():
return MagicMock(name="alpaca_StockHistoricalDataClient")
@pytest.fixture
def mock_crypto():
return MagicMock(name="alpaca_CryptoHistoricalDataClient")
@pytest.fixture
def mock_option():
return MagicMock(name="alpaca_OptionHistoricalDataClient")
@pytest.fixture
def client(mock_trading, mock_stock, mock_crypto, mock_option):
return AlpacaClient(
api_key="test_key",
secret_key="test_secret",
paper=True,
trading=mock_trading,
stock_data=mock_stock,
crypto_data=mock_crypto,
option_data=mock_option,
)
-80
View File
@@ -1,80 +0,0 @@
from __future__ import annotations
from unittest.mock import MagicMock
import pytest
@pytest.mark.asyncio
async def test_init_paper_mode(client, mock_trading):
assert client.paper is True
assert client._trading is mock_trading
@pytest.mark.asyncio
async def test_get_account_calls_trading(client, mock_trading):
mock_trading.get_account.return_value = MagicMock(
model_dump=lambda: {"equity": 100000, "cash": 50000}
)
result = await client.get_account()
mock_trading.get_account.assert_called_once()
assert result["equity"] == 100000
@pytest.mark.asyncio
async def test_get_positions_returns_list(client, mock_trading):
pos_mock = MagicMock(model_dump=lambda: {"symbol": "AAPL", "qty": 10})
mock_trading.get_all_positions.return_value = [pos_mock]
result = await client.get_positions()
assert len(result) == 1
assert result[0]["symbol"] == "AAPL"
@pytest.mark.asyncio
async def test_place_market_order_stocks(client, mock_trading):
order_mock = MagicMock(model_dump=lambda: {"id": "o123", "symbol": "AAPL"})
mock_trading.submit_order.return_value = order_mock
result = await client.place_order(
symbol="AAPL", side="buy", qty=1, order_type="market", asset_class="stocks",
)
assert result["id"] == "o123"
assert mock_trading.submit_order.called
@pytest.mark.asyncio
async def test_place_limit_order_requires_price(client):
with pytest.raises(ValueError, match="limit_price"):
await client.place_order(
symbol="AAPL", side="buy", qty=1, order_type="limit",
)
@pytest.mark.asyncio
async def test_cancel_order(client, mock_trading):
mock_trading.cancel_order_by_id.return_value = None
result = await client.cancel_order("o1")
mock_trading.cancel_order_by_id.assert_called_once_with("o1")
assert result == {"order_id": "o1", "canceled": True}
@pytest.mark.asyncio
async def test_close_position_no_options(client, mock_trading):
order_mock = MagicMock(model_dump=lambda: {"id": "close-1"})
mock_trading.close_position.return_value = order_mock
result = await client.close_position("AAPL")
assert mock_trading.close_position.called
assert result["id"] == "close-1"
@pytest.mark.asyncio
async def test_get_clock(client, mock_trading):
clock_mock = MagicMock(model_dump=lambda: {"is_open": True, "next_close": "2026-04-21T20:00:00Z"})
mock_trading.get_clock.return_value = clock_mock
result = await client.get_clock()
assert result["is_open"] is True
@pytest.mark.asyncio
async def test_invalid_asset_class(client):
with pytest.raises(ValueError, match="invalid asset_class"):
await client.get_ticker("AAPL", asset_class="forex")
@@ -1,50 +0,0 @@
from __future__ import annotations
from unittest.mock import MagicMock
from fastapi.testclient import TestClient
from mcp_alpaca.server import create_app
from mcp_common.auth import Principal, TokenStore
from mcp_common.environment import EnvironmentInfo
def _make_app(env_info, creds):
c = MagicMock()
c.paper = True
store = TokenStore(tokens={
"ct": Principal("core", {"core"}),
"ot": Principal("observer", {"observer"}),
})
return create_app(client=c, token_store=store, creds=creds, env_info=env_info)
def test_environment_info_paper_is_testnet():
"""Alpaca: 'paper' nel secret mappa a environment='testnet'."""
env = EnvironmentInfo(
exchange="alpaca",
environment="testnet",
source="env",
env_value="true",
base_url="https://paper-api.alpaca.markets",
)
app = _make_app(env, creds={"max_leverage": 1})
c = TestClient(app)
r = c.post("/tools/environment_info", headers={"Authorization": "Bearer ot"})
assert r.status_code == 200
body = r.json()
assert body["exchange"] == "alpaca"
assert body["environment"] == "testnet"
assert body["source"] == "env"
assert body["base_url"] == "https://paper-api.alpaca.markets"
assert body["max_leverage"] == 1
def test_environment_info_requires_auth():
env = EnvironmentInfo(
exchange="alpaca", environment="testnet", source="default",
env_value=None, base_url="https://paper-api.alpaca.markets",
)
app = _make_app(env, creds={"max_leverage": 1})
c = TestClient(app)
r = c.post("/tools/environment_info")
assert r.status_code == 401
@@ -1,46 +0,0 @@
from __future__ import annotations
import pytest
from fastapi import HTTPException
from mcp_alpaca.leverage_cap import enforce_leverage, get_max_leverage
def test_get_max_leverage_returns_creds_value():
creds = {"max_leverage": 4}
assert get_max_leverage(creds) == 4
def test_get_max_leverage_default_when_missing():
"""Default 1 (cash) se il secret non ha max_leverage."""
assert get_max_leverage({}) == 1
def test_enforce_leverage_pass_at_cap_one():
"""Alpaca cash account: cap 1, leverage 1 OK."""
creds = {"max_leverage": 1}
enforce_leverage(1, creds=creds, exchange="alpaca") # no raise
def test_enforce_leverage_reject_over_cap_one():
creds = {"max_leverage": 1}
with pytest.raises(HTTPException) as exc:
enforce_leverage(2, creds=creds, exchange="alpaca")
assert exc.value.status_code == 403
assert exc.value.detail["error"] == "LEVERAGE_CAP_EXCEEDED"
assert exc.value.detail["exchange"] == "alpaca"
assert exc.value.detail["requested"] == 2
assert exc.value.detail["max"] == 1
def test_enforce_leverage_reject_when_below_one():
creds = {"max_leverage": 1}
with pytest.raises(HTTPException) as exc:
enforce_leverage(0, creds=creds, exchange="alpaca")
assert exc.value.status_code == 403
def test_enforce_leverage_default_when_none():
"""Se requested è None, applica il cap come default."""
creds = {"max_leverage": 1}
result = enforce_leverage(None, creds=creds, exchange="alpaca")
assert result == 1
@@ -1,110 +0,0 @@
from __future__ import annotations
from unittest.mock import AsyncMock, MagicMock
import pytest
from fastapi.testclient import TestClient
from mcp_alpaca.server import create_app
from mcp_common.auth import Principal, TokenStore
@pytest.fixture
def token_store():
return TokenStore(
tokens={
"core-tok": Principal("core", {"core"}),
"obs-tok": Principal("observer", {"observer"}),
}
)
@pytest.fixture
def mock_client():
c = MagicMock()
c.get_account = AsyncMock(return_value={"equity": 100000})
c.get_positions = AsyncMock(return_value=[])
c.get_activities = AsyncMock(return_value=[])
c.get_assets = AsyncMock(return_value=[])
c.get_ticker = AsyncMock(return_value={"symbol": "AAPL"})
c.get_bars = AsyncMock(return_value={"bars": []})
c.get_snapshot = AsyncMock(return_value={})
c.get_option_chain = AsyncMock(return_value={"contracts": []})
c.get_open_orders = AsyncMock(return_value=[])
c.get_clock = AsyncMock(return_value={"is_open": True})
c.get_calendar = AsyncMock(return_value=[])
c.place_order = AsyncMock(return_value={"id": "o1"})
c.amend_order = AsyncMock(return_value={"id": "o1"})
c.cancel_order = AsyncMock(return_value={"canceled": True})
c.cancel_all_orders = AsyncMock(return_value=[])
c.close_position = AsyncMock(return_value={"id": "close1"})
c.close_all_positions = AsyncMock(return_value=[])
return c
@pytest.fixture
def http(mock_client, token_store):
app = create_app(client=mock_client, token_store=token_store, creds={"max_leverage": 1})
return TestClient(app)
CORE = {"Authorization": "Bearer core-tok"}
OBS = {"Authorization": "Bearer obs-tok"}
READ_ENDPOINTS = [
("/tools/get_account", {}),
("/tools/get_positions", {}),
("/tools/get_activities", {}),
("/tools/get_assets", {}),
("/tools/get_ticker", {"symbol": "AAPL"}),
("/tools/get_bars", {"symbol": "AAPL"}),
("/tools/get_snapshot", {"symbol": "AAPL"}),
("/tools/get_option_chain", {"underlying": "AAPL"}),
("/tools/get_open_orders", {}),
("/tools/get_clock", {}),
("/tools/get_calendar", {}),
]
WRITE_ENDPOINTS = [
("/tools/place_order", {"symbol": "AAPL", "side": "buy", "qty": 1}),
("/tools/amend_order", {"order_id": "o1", "qty": 2}),
("/tools/cancel_order", {"order_id": "o1"}),
("/tools/cancel_all_orders", {}),
("/tools/close_position", {"symbol": "AAPL"}),
("/tools/close_all_positions", {}),
]
@pytest.mark.parametrize("path,payload", READ_ENDPOINTS)
def test_read_core_ok(http, path, payload):
r = http.post(path, json=payload, headers=CORE)
assert r.status_code == 200, (path, r.text)
@pytest.mark.parametrize("path,payload", READ_ENDPOINTS)
def test_read_observer_ok(http, path, payload):
r = http.post(path, json=payload, headers=OBS)
assert r.status_code == 200, (path, r.text)
@pytest.mark.parametrize("path,payload", READ_ENDPOINTS)
def test_read_no_auth_401(http, path, payload):
r = http.post(path, json=payload)
assert r.status_code == 401, (path, r.text)
@pytest.mark.parametrize("path,payload", WRITE_ENDPOINTS)
def test_write_core_ok(http, path, payload):
r = http.post(path, json=payload, headers=CORE)
assert r.status_code == 200, (path, r.text)
@pytest.mark.parametrize("path,payload", WRITE_ENDPOINTS)
def test_write_observer_403(http, path, payload):
r = http.post(path, json=payload, headers=OBS)
assert r.status_code == 403, (path, r.text)
@pytest.mark.parametrize("path,payload", WRITE_ENDPOINTS)
def test_write_no_auth_401(http, path, payload):
r = http.post(path, json=payload)
assert r.status_code == 401, (path, r.text)
-28
View File
@@ -1,28 +0,0 @@
[project]
name = "mcp-bybit"
version = "0.1.0"
requires-python = ">=3.11"
dependencies = [
"mcp-common",
"fastapi>=0.115",
"uvicorn[standard]>=0.30",
"httpx>=0.27",
"pydantic>=2.6",
"pybit>=5.8",
]
[project.optional-dependencies]
dev = ["pytest>=8", "pytest-asyncio>=0.23"]
[build-system]
requires = ["hatchling"]
build-backend = "hatchling.build"
[tool.hatch.build.targets.wheel]
packages = ["src/mcp_bybit"]
[tool.uv.sources]
mcp-common = { workspace = true }
[project.scripts]
mcp-bybit = "mcp_bybit.__main__:main"
@@ -1,30 +0,0 @@
from __future__ import annotations
from mcp_common.app_factory import ExchangeAppSpec, run_exchange_main
from mcp_bybit.client import BybitClient
from mcp_bybit.server import create_app
SPEC = ExchangeAppSpec(
exchange="bybit",
creds_env_var="BYBIT_CREDENTIALS_FILE",
env_var="BYBIT_TESTNET",
flag_key="testnet",
default_base_url_live="https://api.bybit.com",
default_base_url_testnet="https://api-testnet.bybit.com",
default_port=9019,
build_client=lambda creds, env_info: BybitClient(
api_key=creds["api_key"],
api_secret=creds["api_secret"],
testnet=(env_info.environment == "testnet"),
),
build_app=create_app,
)
def main():
run_exchange_main(SPEC)
if __name__ == "__main__":
main()
-672
View File
@@ -1,672 +0,0 @@
from __future__ import annotations
import asyncio
from typing import Any
from mcp_common import indicators as ind
from mcp_common import microstructure as micro
from pybit.unified_trading import HTTP
def _f(v: Any) -> float | None:
try:
return float(v)
except (TypeError, ValueError):
return None
def _i(v: Any) -> int | None:
try:
return int(v)
except (TypeError, ValueError):
return None
class BybitClient:
def __init__(
self,
api_key: str,
api_secret: str,
testnet: bool = True,
http: Any | None = None,
) -> None:
self.api_key = api_key
self.api_secret = api_secret
self.testnet = testnet
self._http = http or HTTP(
api_key=api_key,
api_secret=api_secret,
testnet=testnet,
)
async def _run(self, fn, /, **kwargs):
return await asyncio.to_thread(fn, **kwargs)
@staticmethod
def _parse_ticker(row: dict) -> dict:
return {
"symbol": row.get("symbol"),
"last_price": _f(row.get("lastPrice")),
"mark_price": _f(row.get("markPrice")),
"bid": _f(row.get("bid1Price")),
"ask": _f(row.get("ask1Price")),
"volume_24h": _f(row.get("volume24h")),
"turnover_24h": _f(row.get("turnover24h")),
"funding_rate": _f(row.get("fundingRate")),
"open_interest": _f(row.get("openInterest")),
}
async def get_ticker(self, symbol: str, category: str = "linear") -> dict:
resp = await self._run(
self._http.get_tickers, category=category, symbol=symbol
)
rows = (resp.get("result") or {}).get("list") or []
if not rows:
return {"symbol": symbol, "error": "not_found"}
return self._parse_ticker(rows[0])
async def get_ticker_batch(
self, symbols: list[str], category: str = "linear"
) -> dict[str, dict]:
out: dict[str, dict] = {}
for sym in symbols:
out[sym] = await self.get_ticker(sym, category=category)
return out
async def get_orderbook(
self, symbol: str, category: str = "linear", limit: int = 50
) -> dict:
resp = await self._run(
self._http.get_orderbook, category=category, symbol=symbol, limit=limit
)
r = resp.get("result") or {}
return {
"symbol": r.get("s"),
"bids": [[float(p), float(q)] for p, q in (r.get("b") or [])],
"asks": [[float(p), float(q)] for p, q in (r.get("a") or [])],
"timestamp": r.get("ts"),
}
async def get_historical(
self,
symbol: str,
category: str = "linear",
interval: str = "60",
start: int | None = None,
end: int | None = None,
limit: int = 1000,
) -> dict:
kwargs = dict(
category=category,
symbol=symbol,
interval=interval,
limit=limit,
)
if start is not None:
kwargs["start"] = start
if end is not None:
kwargs["end"] = end
resp = await self._run(self._http.get_kline, **kwargs)
rows = (resp.get("result") or {}).get("list") or []
rows_sorted = sorted(rows, key=lambda r: int(r[0]))
candles = [
{
"timestamp": int(r[0]),
"open": float(r[1]),
"high": float(r[2]),
"low": float(r[3]),
"close": float(r[4]),
"volume": float(r[5]),
}
for r in rows_sorted
]
return {"symbol": symbol, "candles": candles}
async def get_indicators(
self,
symbol: str,
category: str = "linear",
indicators: list[str] | None = None,
interval: str = "60",
start: int | None = None,
end: int | None = None,
) -> dict:
indicators = indicators or ["rsi", "atr", "macd", "adx"]
historical = await self.get_historical(
symbol, category=category, interval=interval, start=start, end=end
)
candles = historical.get("candles", [])
closes = [c["close"] for c in candles]
highs = [c["high"] for c in candles]
lows = [c["low"] for c in candles]
out: dict[str, Any] = {"symbol": symbol, "category": category}
for name in indicators:
n = name.lower()
if n == "sma":
out["sma"] = ind.sma(closes, 20)
elif n == "rsi":
out["rsi"] = ind.rsi(closes)
elif n == "atr":
out["atr"] = ind.atr(highs, lows, closes)
elif n == "macd":
out["macd"] = ind.macd(closes)
elif n == "adx":
out["adx"] = ind.adx(highs, lows, closes)
else:
out[n] = None
return out
async def get_funding_rate(self, symbol: str, category: str = "linear") -> dict:
resp = await self._run(
self._http.get_tickers, category=category, symbol=symbol
)
rows = (resp.get("result") or {}).get("list") or []
if not rows:
return {"symbol": symbol, "error": "not_found"}
row = rows[0]
return {
"symbol": row.get("symbol"),
"funding_rate": _f(row.get("fundingRate")),
"next_funding_time": _i(row.get("nextFundingTime")),
}
async def get_funding_history(
self, symbol: str, category: str = "linear", limit: int = 100
) -> dict:
resp = await self._run(
self._http.get_funding_rate_history,
category=category, symbol=symbol, limit=limit,
)
rows = (resp.get("result") or {}).get("list") or []
hist = [
{
"timestamp": int(r.get("fundingRateTimestamp", 0)),
"rate": float(r.get("fundingRate", 0)),
}
for r in rows
]
return {"symbol": symbol, "history": hist}
async def get_open_interest(
self,
symbol: str,
category: str = "linear",
interval: str = "5min",
limit: int = 288,
) -> dict:
resp = await self._run(
self._http.get_open_interest,
category=category, symbol=symbol, intervalTime=interval, limit=limit,
)
rows = (resp.get("result") or {}).get("list") or []
points = [
{
"timestamp": int(r.get("timestamp", 0)),
"oi": float(r.get("openInterest", 0)),
}
for r in rows
]
current_oi = points[0]["oi"] if points else None
return {
"symbol": symbol,
"category": category,
"interval": interval,
"current_oi": current_oi,
"points": points,
}
async def get_instruments(self, category: str = "linear", symbol: str | None = None) -> dict:
kwargs: dict[str, Any] = {"category": category}
if symbol:
kwargs["symbol"] = symbol
resp = await self._run(self._http.get_instruments_info, **kwargs)
rows = (resp.get("result") or {}).get("list") or []
instruments = []
for r in rows:
pf = r.get("priceFilter") or {}
lf = r.get("lotSizeFilter") or {}
instruments.append({
"symbol": r.get("symbol"),
"status": r.get("status"),
"base_coin": r.get("baseCoin"),
"quote_coin": r.get("quoteCoin"),
"tick_size": _f(pf.get("tickSize")),
"qty_step": _f(lf.get("qtyStep")),
"min_qty": _f(lf.get("minOrderQty")),
})
return {"category": category, "instruments": instruments}
async def get_option_chain(self, base_coin: str, expiry: str | None = None) -> dict:
kwargs: dict[str, Any] = {"category": "option", "baseCoin": base_coin.upper()}
resp = await self._run(self._http.get_instruments_info, **kwargs)
rows = (resp.get("result") or {}).get("list") or []
options = []
for r in rows:
delivery = r.get("deliveryTime")
if expiry and expiry not in r.get("symbol", ""):
continue
options.append({
"symbol": r.get("symbol"),
"base_coin": r.get("baseCoin"),
"settle_coin": r.get("settleCoin"),
"type": r.get("optionsType"),
"launch_time": int(r.get("launchTime", 0)),
"delivery_time": int(delivery) if delivery else None,
})
return {"base_coin": base_coin.upper(), "options": options}
async def get_positions(
self, category: str = "linear", settle_coin: str = "USDT"
) -> list[dict]:
kwargs: dict[str, Any] = {"category": category}
if category in ("linear", "inverse"):
kwargs["settleCoin"] = settle_coin
resp = await self._run(self._http.get_positions, **kwargs)
rows = (resp.get("result") or {}).get("list") or []
out = []
for r in rows:
out.append({
"symbol": r.get("symbol"),
"side": r.get("side"),
"size": _f(r.get("size")),
"entry_price": _f(r.get("avgPrice")),
"unrealized_pnl": _f(r.get("unrealisedPnl")),
"leverage": _f(r.get("leverage")),
"liquidation_price": _f(r.get("liqPrice")),
"position_value": _f(r.get("positionValue")),
})
return out
async def get_account_summary(self, account_type: str = "UNIFIED") -> dict:
resp = await self._run(
self._http.get_wallet_balance, accountType=account_type
)
rows = (resp.get("result") or {}).get("list") or []
if not rows:
return {"error": "no_account"}
a = rows[0]
coins = []
for c in a.get("coin") or []:
coins.append({
"coin": c.get("coin"),
"wallet_balance": _f(c.get("walletBalance")),
"equity": _f(c.get("equity")),
})
return {
"account_type": a.get("accountType"),
"equity": _f(a.get("totalEquity")),
"wallet_balance": _f(a.get("totalWalletBalance")),
"margin_balance": _f(a.get("totalMarginBalance")),
"available_balance": _f(a.get("totalAvailableBalance")),
"unrealized_pnl": _f(a.get("totalPerpUPL")),
"coins": coins,
}
async def get_trade_history(
self, category: str = "linear", limit: int = 50
) -> list[dict]:
resp = await self._run(
self._http.get_executions, category=category, limit=limit
)
rows = (resp.get("result") or {}).get("list") or []
return [
{
"symbol": r.get("symbol"),
"side": r.get("side"),
"size": _f(r.get("execQty")),
"price": _f(r.get("execPrice")),
"fee": _f(r.get("execFee")),
"timestamp": _i(r.get("execTime")),
"order_id": r.get("orderId"),
}
for r in rows
]
async def get_open_orders(
self,
category: str = "linear",
symbol: str | None = None,
settle_coin: str = "USDT",
) -> list[dict]:
kwargs: dict[str, Any] = {"category": category}
if category in ("linear", "inverse") and not symbol:
kwargs["settleCoin"] = settle_coin
if symbol:
kwargs["symbol"] = symbol
resp = await self._run(self._http.get_open_orders, **kwargs)
rows = (resp.get("result") or {}).get("list") or []
return [
{
"order_id": r.get("orderId"),
"symbol": r.get("symbol"),
"side": r.get("side"),
"qty": _f(r.get("qty")),
"price": _f(r.get("price")),
"type": r.get("orderType"),
"status": r.get("orderStatus"),
"reduce_only": bool(r.get("reduceOnly")),
}
for r in rows
]
async def get_orderbook_imbalance(
self,
symbol: str,
category: str = "linear",
depth: int = 10,
) -> dict:
"""Microstructure: bid/ask imbalance ratio + microprice + slope."""
ob = await self.get_orderbook(symbol=symbol, category=category, limit=max(depth, 50))
result = micro.orderbook_imbalance(ob.get("bids") or [], ob.get("asks") or [], depth=depth)
return {
"symbol": symbol,
"category": category,
"depth": depth,
**result,
"timestamp": ob.get("timestamp"),
}
async def get_basis_term_structure(self, asset: str) -> dict:
"""Basis curve futures (dated) vs perp + spot. Filtra contratti future
BTCUSDT / ETHUSDT con scadenza, calcola annualized basis per ognuno.
"""
import datetime as _dt
asset = asset.upper()
spot = await self.get_ticker(f"{asset}USDT", category="spot")
perp = await self.get_ticker(f"{asset}USDT", category="linear")
sp = spot.get("last_price")
pp = perp.get("last_price")
# Lista futures dated (linear/inverse)
instr = await self.get_instruments(category="linear")
items = (instr.get("instruments") or [])
futures = [
x for x in items
if x.get("symbol", "").startswith(f"{asset}-") or x.get("symbol", "").startswith(f"{asset}USDT-")
]
rows: list[dict[str, Any]] = []
if sp:
now_ms = int(_dt.datetime.now(_dt.UTC).timestamp() * 1000)
for f in futures[:10]:
tk = await self.get_ticker(f["symbol"], category="linear")
fp = tk.get("last_price")
expiry_ms = f.get("delivery_time")
if not fp or not expiry_ms:
continue
days = max((int(expiry_ms) - now_ms) / 86_400_000, 1)
basis_pct = 100.0 * (fp - sp) / sp
annualized = basis_pct * 365.0 / days
rows.append({
"symbol": f["symbol"],
"expiry_ms": int(expiry_ms),
"days_to_expiry": round(days, 2),
"future_price": fp,
"basis_pct": round(basis_pct, 4),
"annualized_basis_pct": round(annualized, 4),
})
rows.sort(key=lambda r: r["days_to_expiry"])
return {
"asset": asset,
"spot_price": sp,
"perp_price": pp,
"perp_basis_pct": round(100.0 * (pp - sp) / sp, 4) if (sp and pp) else None,
"term_structure": rows,
"data_timestamp": _dt.datetime.now(_dt.UTC).isoformat(),
}
async def get_basis_spot_perp(self, asset: str) -> dict:
asset = asset.upper()
symbol = f"{asset}USDT"
spot = await self.get_ticker(symbol, category="spot")
perp = await self.get_ticker(symbol, category="linear")
sp = spot.get("last_price")
pp = perp.get("last_price")
basis_abs = basis_pct = None
if sp and pp:
basis_abs = pp - sp
basis_pct = 100.0 * basis_abs / sp
return {
"asset": asset,
"symbol": symbol,
"spot_price": sp,
"perp_price": pp,
"basis_abs": basis_abs,
"basis_pct": basis_pct,
"funding_rate": perp.get("funding_rate"),
}
def _envelope(self, resp: dict, payload: dict) -> dict:
code = resp.get("retCode", 0)
if code != 0:
return {"error": resp.get("retMsg", "bybit_error"), "code": code}
return payload
async def place_order(
self,
category: str,
symbol: str,
side: str,
qty: float,
order_type: str = "Limit",
price: float | None = None,
tif: str = "GTC",
reduce_only: bool = False,
position_idx: int | None = None,
) -> dict:
kwargs: dict[str, Any] = {
"category": category,
"symbol": symbol,
"side": side,
"qty": str(qty),
"orderType": order_type,
"timeInForce": tif,
"reduceOnly": reduce_only,
}
if price is not None:
kwargs["price"] = str(price)
if position_idx is not None:
kwargs["positionIdx"] = position_idx
if category == "option":
import uuid
kwargs["orderLinkId"] = f"cerbero-{uuid.uuid4().hex[:16]}"
resp = await self._run(self._http.place_order, **kwargs)
r = resp.get("result") or {}
return self._envelope(resp, {
"order_id": r.get("orderId"),
"order_link_id": r.get("orderLinkId"),
"status": "submitted",
})
async def place_combo_order(
self,
category: str,
legs: list[dict[str, Any]],
) -> dict:
"""Atomic multi-leg via /v5/order/create-batch (Bybit option only).
Bybit supporta batch_order solo su category='option'. Per perp/linear
usare loop di place_order (non atomic).
legs: [{symbol, side, qty, order_type, price?, tif?, reduce_only?}].
"""
if category != "option":
raise ValueError("place_combo_order: Bybit batch_order è disponibile solo su category='option'")
if len(legs) < 2:
raise ValueError("combo requires at least 2 legs")
import uuid
request: list[dict[str, Any]] = []
for leg in legs:
entry: dict[str, Any] = {
"symbol": leg["symbol"],
"side": leg["side"],
"qty": str(leg["qty"]),
"orderType": leg.get("order_type", "Limit"),
"timeInForce": leg.get("tif", "GTC"),
"reduceOnly": leg.get("reduce_only", False),
"orderLinkId": f"cerbero-{uuid.uuid4().hex[:16]}",
}
if leg.get("price") is not None:
entry["price"] = str(leg["price"])
request.append(entry)
resp = await self._run(self._http.place_batch_order, category=category, request=request)
result_list = (resp.get("result") or {}).get("list") or []
orders = [
{
"order_id": r.get("orderId"),
"order_link_id": r.get("orderLinkId"),
"status": "submitted",
}
for r in result_list
]
return self._envelope(resp, {"orders": orders})
async def amend_order(
self,
category: str,
symbol: str,
order_id: str,
new_qty: float | None = None,
new_price: float | None = None,
) -> dict:
kwargs: dict[str, Any] = {
"category": category,
"symbol": symbol,
"orderId": order_id,
}
if new_qty is not None:
kwargs["qty"] = str(new_qty)
if new_price is not None:
kwargs["price"] = str(new_price)
resp = await self._run(self._http.amend_order, **kwargs)
r = resp.get("result") or {}
return self._envelope(resp, {
"order_id": r.get("orderId", order_id),
"status": "amended",
})
async def cancel_order(
self, category: str, symbol: str, order_id: str
) -> dict:
resp = await self._run(
self._http.cancel_order,
category=category, symbol=symbol, orderId=order_id,
)
r = resp.get("result") or {}
return self._envelope(resp, {
"order_id": r.get("orderId", order_id),
"status": "cancelled",
})
async def cancel_all_orders(
self, category: str, symbol: str | None = None
) -> dict:
kwargs: dict[str, Any] = {"category": category}
if symbol:
kwargs["symbol"] = symbol
resp = await self._run(self._http.cancel_all_orders, **kwargs)
r = resp.get("result") or {}
ids = [x.get("orderId") for x in (r.get("list") or [])]
return self._envelope(resp, {
"cancelled_ids": ids,
"count": len(ids),
})
async def set_stop_loss(
self, category: str, symbol: str, stop_loss: float,
position_idx: int = 0,
) -> dict:
resp = await self._run(
self._http.set_trading_stop,
category=category, symbol=symbol,
stopLoss=str(stop_loss), positionIdx=position_idx,
)
return self._envelope(resp, {
"symbol": symbol, "stop_loss": stop_loss,
"status": "stop_loss_set",
})
async def set_take_profit(
self, category: str, symbol: str, take_profit: float,
position_idx: int = 0,
) -> dict:
resp = await self._run(
self._http.set_trading_stop,
category=category, symbol=symbol,
takeProfit=str(take_profit), positionIdx=position_idx,
)
return self._envelope(resp, {
"symbol": symbol, "take_profit": take_profit,
"status": "take_profit_set",
})
async def close_position(self, category: str, symbol: str) -> dict:
positions = await self.get_positions(category=category)
target = next((p for p in positions if p["symbol"] == symbol and (p["size"] or 0) > 0), None)
if not target:
return {"error": "no_open_position", "symbol": symbol}
close_side = "Sell" if target["side"] == "Buy" else "Buy"
return await self.place_order(
category=category,
symbol=symbol,
side=close_side,
qty=target["size"],
order_type="Market",
reduce_only=True,
tif="IOC",
)
async def set_leverage(
self, category: str, symbol: str, leverage: int
) -> dict:
resp = await self._run(
self._http.set_leverage,
category=category, symbol=symbol,
buyLeverage=str(leverage), sellLeverage=str(leverage),
)
return self._envelope(resp, {
"symbol": symbol, "leverage": leverage,
"status": "leverage_set",
})
async def switch_position_mode(
self, category: str, symbol: str, mode: str
) -> dict:
mode_code = 3 if mode.lower() == "hedge" else 0
resp = await self._run(
self._http.switch_position_mode,
category=category, symbol=symbol, mode=mode_code,
)
return self._envelope(resp, {
"symbol": symbol, "mode": mode,
"status": "mode_switched",
})
async def transfer_asset(
self,
coin: str,
amount: float,
from_type: str,
to_type: str,
) -> dict:
import uuid
resp = await self._run(
self._http.create_internal_transfer,
transferId=str(uuid.uuid4()),
coin=coin,
amount=str(amount),
fromAccountType=from_type,
toAccountType=to_type,
)
r = resp.get("result") or {}
return self._envelope(resp, {
"transfer_id": r.get("transferId"),
"coin": coin,
"amount": amount,
"status": "submitted",
})
@@ -1,56 +0,0 @@
"""Leverage cap server-side per place_order.
Cap letto dal secret JSON via campo `max_leverage`. Default 1 (cash) se assente.
"""
from __future__ import annotations
from fastapi import HTTPException
def get_max_leverage(creds: dict) -> int:
"""Legge max_leverage dal secret. Default 1 se mancante."""
raw = creds.get("max_leverage", 1)
try:
value = int(raw)
except (TypeError, ValueError):
value = 1
return max(1, value)
def enforce_leverage(
requested: int | float | None,
*,
creds: dict,
exchange: str,
) -> int:
"""Verifica e applica leverage cap. Ritorna leverage applicabile.
Solleva HTTPException(403, LEVERAGE_CAP_EXCEEDED) se requested > cap.
Se requested is None, applica il cap come default.
"""
cap = get_max_leverage(creds)
if requested is None:
return cap
lev = int(requested)
if lev < 1:
raise HTTPException(
status_code=403,
detail={
"error": "LEVERAGE_CAP_EXCEEDED",
"exchange": exchange,
"requested": lev,
"max": cap,
"reason": "leverage must be >= 1",
},
)
if lev > cap:
raise HTTPException(
status_code=403,
detail={
"error": "LEVERAGE_CAP_EXCEEDED",
"exchange": exchange,
"requested": lev,
"max": cap,
},
)
return lev
-522
View File
@@ -1,522 +0,0 @@
from __future__ import annotations
import os
from fastapi import Depends, HTTPException
from mcp_common.audit import audit_write_op
from mcp_common.auth import Principal, TokenStore, require_principal
from mcp_common.environment import EnvironmentInfo
from mcp_common.mcp_bridge import mount_mcp_endpoint
from mcp_common.server import build_app
from pydantic import BaseModel, Field
from mcp_bybit.client import BybitClient
from mcp_bybit.leverage_cap import enforce_leverage as _enforce_leverage
from mcp_bybit.leverage_cap import get_max_leverage
# --- Body models: reads ---
class TickerReq(BaseModel):
symbol: str
category: str = "linear"
class TickerBatchReq(BaseModel):
symbols: list[str]
category: str = "linear"
class OrderbookReq(BaseModel):
symbol: str
category: str = "linear"
limit: int = 50
class HistoricalReq(BaseModel):
symbol: str
category: str = "linear"
interval: str = "60"
start: int | None = None
end: int | None = None
limit: int = 1000
class IndicatorsReq(BaseModel):
symbol: str
category: str = "linear"
indicators: list[str] = ["rsi", "atr", "macd", "adx"]
interval: str = "60"
start: int | None = None
end: int | None = None
class FundingRateReq(BaseModel):
symbol: str
category: str = "linear"
class FundingHistoryReq(BaseModel):
symbol: str
category: str = "linear"
limit: int = 100
class OpenInterestReq(BaseModel):
symbol: str
category: str = "linear"
interval: str = "5min"
limit: int = 288
class InstrumentsReq(BaseModel):
category: str = "linear"
symbol: str | None = None
class OptionChainReq(BaseModel):
base_coin: str
expiry: str | None = None
class PositionsReq(BaseModel):
category: str = "linear"
class AccountSummaryReq(BaseModel):
pass
class TradeHistoryReq(BaseModel):
category: str = "linear"
limit: int = 50
class OpenOrdersReq(BaseModel):
category: str = "linear"
symbol: str | None = None
class BasisSpotPerpReq(BaseModel):
asset: str
class OrderbookImbalanceReq(BaseModel):
symbol: str
category: str = "linear"
depth: int = 10
class BasisTermStructureReq(BaseModel):
asset: str
# --- Body models: writes ---
class PlaceOrderReq(BaseModel):
category: str
symbol: str
side: str
qty: float
order_type: str = "Limit"
price: float | None = None
tif: str = "GTC"
reduce_only: bool = False
position_idx: int | None = None
class ComboLegReq(BaseModel):
symbol: str
side: str
qty: float
order_type: str = "Limit"
price: float | None = None
tif: str = "GTC"
reduce_only: bool = False
class PlaceComboOrderReq(BaseModel):
category: str = "option"
legs: list[ComboLegReq] = Field(..., min_length=2)
class AmendOrderReq(BaseModel):
category: str
symbol: str
order_id: str
new_qty: float | None = None
new_price: float | None = None
class CancelOrderReq(BaseModel):
category: str
symbol: str
order_id: str
class CancelAllReq(BaseModel):
category: str
symbol: str | None = None
class SetStopLossReq(BaseModel):
category: str
symbol: str
stop_loss: float
position_idx: int = 0
class SetTakeProfitReq(BaseModel):
category: str
symbol: str
take_profit: float
position_idx: int = 0
class ClosePositionReq(BaseModel):
category: str
symbol: str
class SetLeverageReq(BaseModel):
category: str
symbol: str
leverage: int
class SwitchModeReq(BaseModel):
category: str
symbol: str
mode: str
class TransferReq(BaseModel):
coin: str
amount: float
from_type: str
to_type: str
# --- ACL helper ---
def _check(principal: Principal, *, core: bool = False, observer: bool = False) -> None:
allowed: set[str] = set()
if core:
allowed.add("core")
if observer:
allowed.add("observer")
if not (principal.capabilities & allowed):
raise HTTPException(status_code=403, detail="forbidden")
def create_app(
*,
client: BybitClient,
token_store: TokenStore,
creds: dict | None = None,
env_info: EnvironmentInfo | None = None,
):
creds = creds or {}
app = build_app(name="mcp-bybit", version="0.1.0", token_store=token_store)
# ── Reads ──────────────────────────────────────────────
@app.post("/tools/environment_info", tags=["reads"])
async def t_environment_info(principal: Principal = Depends(require_principal)):
_check(principal, core=True, observer=True)
if env_info is None:
return {
"exchange": "bybit",
"environment": "testnet" if client.testnet else "mainnet",
"source": "credentials",
"env_value": None,
"base_url": getattr(client, "base_url", None),
"max_leverage": get_max_leverage(creds),
}
return {
"exchange": env_info.exchange,
"environment": env_info.environment,
"source": env_info.source,
"env_value": env_info.env_value,
"base_url": env_info.base_url,
"max_leverage": get_max_leverage(creds),
}
@app.post("/tools/get_ticker", tags=["reads"])
async def t_get_ticker(body: TickerReq, principal: Principal = Depends(require_principal)):
_check(principal, core=True, observer=True)
return await client.get_ticker(body.symbol, body.category)
@app.post("/tools/get_ticker_batch", tags=["reads"])
async def t_get_ticker_batch(body: TickerBatchReq, principal: Principal = Depends(require_principal)):
_check(principal, core=True, observer=True)
return await client.get_ticker_batch(body.symbols, body.category)
@app.post("/tools/get_orderbook", tags=["reads"])
async def t_get_orderbook(body: OrderbookReq, principal: Principal = Depends(require_principal)):
_check(principal, core=True, observer=True)
return await client.get_orderbook(body.symbol, body.category, body.limit)
@app.post("/tools/get_historical", tags=["reads"])
async def t_get_historical(body: HistoricalReq, principal: Principal = Depends(require_principal)):
_check(principal, core=True, observer=True)
return await client.get_historical(
body.symbol, body.category, body.interval, body.start, body.end, body.limit,
)
@app.post("/tools/get_indicators", tags=["reads"])
async def t_get_indicators(body: IndicatorsReq, principal: Principal = Depends(require_principal)):
_check(principal, core=True, observer=True)
return await client.get_indicators(
body.symbol, body.category, body.indicators,
body.interval, body.start, body.end,
)
@app.post("/tools/get_funding_rate", tags=["reads"])
async def t_get_funding_rate(body: FundingRateReq, principal: Principal = Depends(require_principal)):
_check(principal, core=True, observer=True)
return await client.get_funding_rate(body.symbol, body.category)
@app.post("/tools/get_funding_history", tags=["reads"])
async def t_get_funding_history(body: FundingHistoryReq, principal: Principal = Depends(require_principal)):
_check(principal, core=True, observer=True)
return await client.get_funding_history(body.symbol, body.category, body.limit)
@app.post("/tools/get_open_interest", tags=["reads"])
async def t_get_open_interest(body: OpenInterestReq, principal: Principal = Depends(require_principal)):
_check(principal, core=True, observer=True)
return await client.get_open_interest(body.symbol, body.category, body.interval, body.limit)
@app.post("/tools/get_instruments", tags=["reads"])
async def t_get_instruments(body: InstrumentsReq, principal: Principal = Depends(require_principal)):
_check(principal, core=True, observer=True)
return await client.get_instruments(body.category, body.symbol)
@app.post("/tools/get_option_chain", tags=["reads"])
async def t_get_option_chain(body: OptionChainReq, principal: Principal = Depends(require_principal)):
_check(principal, core=True, observer=True)
return await client.get_option_chain(body.base_coin, body.expiry)
@app.post("/tools/get_positions", tags=["reads"])
async def t_get_positions(body: PositionsReq, principal: Principal = Depends(require_principal)):
_check(principal, core=True, observer=True)
return {"positions": await client.get_positions(body.category)}
@app.post("/tools/get_account_summary", tags=["reads"])
async def t_get_account_summary(body: AccountSummaryReq, principal: Principal = Depends(require_principal)):
_check(principal, core=True, observer=True)
return await client.get_account_summary()
@app.post("/tools/get_trade_history", tags=["reads"])
async def t_get_trade_history(body: TradeHistoryReq, principal: Principal = Depends(require_principal)):
_check(principal, core=True, observer=True)
return {"trades": await client.get_trade_history(body.category, body.limit)}
@app.post("/tools/get_open_orders", tags=["reads"])
async def t_get_open_orders(body: OpenOrdersReq, principal: Principal = Depends(require_principal)):
_check(principal, core=True, observer=True)
return {"orders": await client.get_open_orders(body.category, body.symbol)}
@app.post("/tools/get_basis_spot_perp", tags=["reads"])
async def t_get_basis_spot_perp(body: BasisSpotPerpReq, principal: Principal = Depends(require_principal)):
_check(principal, core=True, observer=True)
return await client.get_basis_spot_perp(body.asset)
@app.post("/tools/get_orderbook_imbalance", tags=["reads"])
async def t_get_ob_imbalance(body: OrderbookImbalanceReq, principal: Principal = Depends(require_principal)):
_check(principal, core=True, observer=True)
return await client.get_orderbook_imbalance(body.symbol, body.category, body.depth)
@app.post("/tools/get_basis_term_structure", tags=["reads"])
async def t_get_basis_term_structure(body: BasisTermStructureReq, principal: Principal = Depends(require_principal)):
_check(principal, core=True, observer=True)
return await client.get_basis_term_structure(body.asset)
# ── Writes ─────────────────────────────────────────────
@app.post("/tools/place_order", tags=["writes"])
async def t_place_order(body: PlaceOrderReq, principal: Principal = Depends(require_principal)):
_check(principal, core=True)
result = await client.place_order(
body.category, body.symbol, body.side, body.qty,
body.order_type, body.price, body.tif, body.reduce_only, body.position_idx,
)
audit_write_op(
principal=principal, action="place_order", exchange="bybit",
target=body.symbol,
payload={"category": body.category, "side": body.side, "qty": body.qty,
"order_type": body.order_type, "price": body.price, "tif": body.tif,
"reduce_only": body.reduce_only},
result=result,
)
return result
@app.post("/tools/place_combo_order", tags=["writes"])
async def t_place_combo_order(body: PlaceComboOrderReq, principal: Principal = Depends(require_principal)):
_check(principal, core=True)
result = await client.place_combo_order(
category=body.category,
legs=[leg.model_dump() for leg in body.legs],
)
audit_write_op(
principal=principal, action="place_combo_order", exchange="bybit",
payload={"category": body.category,
"legs": [leg.model_dump() for leg in body.legs]},
result=result if isinstance(result, dict) else None,
)
return result
@app.post("/tools/amend_order", tags=["writes"])
async def t_amend_order(body: AmendOrderReq, principal: Principal = Depends(require_principal)):
_check(principal, core=True)
result = await client.amend_order(
body.category, body.symbol, body.order_id, body.new_qty, body.new_price,
)
audit_write_op(
principal=principal, action="amend_order", exchange="bybit",
target=body.order_id,
payload={"category": body.category, "symbol": body.symbol,
"new_qty": body.new_qty, "new_price": body.new_price},
result=result,
)
return result
@app.post("/tools/cancel_order", tags=["writes"])
async def t_cancel_order(body: CancelOrderReq, principal: Principal = Depends(require_principal)):
_check(principal, core=True)
result = await client.cancel_order(body.category, body.symbol, body.order_id)
audit_write_op(
principal=principal, action="cancel_order", exchange="bybit",
target=body.order_id,
payload={"category": body.category, "symbol": body.symbol},
result=result,
)
return result
@app.post("/tools/cancel_all_orders", tags=["writes"])
async def t_cancel_all(body: CancelAllReq, principal: Principal = Depends(require_principal)):
_check(principal, core=True)
result = await client.cancel_all_orders(body.category, body.symbol)
audit_write_op(
principal=principal, action="cancel_all_orders", exchange="bybit",
target=body.symbol,
payload={"category": body.category},
result=result,
)
return result
@app.post("/tools/set_stop_loss", tags=["writes"])
async def t_set_sl(body: SetStopLossReq, principal: Principal = Depends(require_principal)):
_check(principal, core=True)
result = await client.set_stop_loss(body.category, body.symbol, body.stop_loss, body.position_idx)
audit_write_op(
principal=principal, action="set_stop_loss", exchange="bybit",
target=body.symbol,
payload={"stop_loss": body.stop_loss, "position_idx": body.position_idx},
result=result,
)
return result
@app.post("/tools/set_take_profit", tags=["writes"])
async def t_set_tp(body: SetTakeProfitReq, principal: Principal = Depends(require_principal)):
_check(principal, core=True)
result = await client.set_take_profit(body.category, body.symbol, body.take_profit, body.position_idx)
audit_write_op(
principal=principal, action="set_take_profit", exchange="bybit",
target=body.symbol,
payload={"take_profit": body.take_profit, "position_idx": body.position_idx},
result=result,
)
return result
@app.post("/tools/close_position", tags=["writes"])
async def t_close(body: ClosePositionReq, principal: Principal = Depends(require_principal)):
_check(principal, core=True)
result = await client.close_position(body.category, body.symbol)
audit_write_op(
principal=principal, action="close_position", exchange="bybit",
target=body.symbol,
payload={"category": body.category},
result=result,
)
return result
@app.post("/tools/set_leverage", tags=["writes"])
async def t_set_leverage(body: SetLeverageReq, principal: Principal = Depends(require_principal)):
_enforce_leverage(body.leverage, creds=creds, exchange="bybit")
_check(principal, core=True)
result = await client.set_leverage(body.category, body.symbol, body.leverage)
audit_write_op(
principal=principal, action="set_leverage", exchange="bybit",
target=body.symbol,
payload={"category": body.category, "leverage": body.leverage},
result=result,
)
return result
@app.post("/tools/switch_position_mode", tags=["writes"])
async def t_switch_mode(body: SwitchModeReq, principal: Principal = Depends(require_principal)):
_check(principal, core=True)
result = await client.switch_position_mode(body.category, body.symbol, body.mode)
audit_write_op(
principal=principal, action="switch_position_mode", exchange="bybit",
target=body.symbol,
payload={"category": body.category, "mode": body.mode},
result=result,
)
return result
@app.post("/tools/transfer_asset", tags=["writes"])
async def t_transfer(body: TransferReq, principal: Principal = Depends(require_principal)):
_check(principal, core=True)
result = await client.transfer_asset(body.coin, body.amount, body.from_type, body.to_type)
audit_write_op(
principal=principal, action="transfer_asset", exchange="bybit",
payload={"coin": body.coin, "amount": body.amount,
"from_type": body.from_type, "to_type": body.to_type},
result=result,
)
return result
# ── MCP mount ──────────────────────────────────────────
port = int(os.environ.get("PORT", "9019"))
mount_mcp_endpoint(
app,
name="cerbero-bybit",
version="0.1.0",
token_store=token_store,
internal_base_url=f"http://localhost:{port}",
tools=[
{"name": "environment_info", "description": "Ambiente operativo (testnet/mainnet), source, base_url, max_leverage cap."},
{"name": "get_ticker", "description": "Ticker Bybit (spot/linear/inverse/option)."},
{"name": "get_ticker_batch", "description": "Ticker per più simboli."},
{"name": "get_orderbook", "description": "Orderbook profondità N."},
{"name": "get_historical", "description": "OHLCV candles Bybit."},
{"name": "get_indicators", "description": "Indicatori tecnici (RSI, ATR, MACD, ADX)."},
{"name": "get_funding_rate", "description": "Funding corrente perp."},
{"name": "get_funding_history", "description": "Funding storico perp."},
{"name": "get_open_interest", "description": "Open interest history perp."},
{"name": "get_instruments", "description": "Specs contratti."},
{"name": "get_option_chain", "description": "Option chain BTC/ETH/SOL."},
{"name": "get_positions", "description": "Posizioni aperte."},
{"name": "get_account_summary", "description": "Wallet balance e margine."},
{"name": "get_trade_history", "description": "Fills recenti."},
{"name": "get_open_orders", "description": "Ordini pending."},
{"name": "get_basis_spot_perp", "description": "Basis spot vs linear perp."},
{"name": "get_orderbook_imbalance", "description": "Microstructure: imbalance ratio + microprice + slope su top-N livelli book."},
{"name": "get_basis_term_structure", "description": "Basis curve futures dated vs spot, annualizzato."},
{"name": "place_order", "description": "Invia ordine (CORE only)."},
{"name": "place_combo_order", "description": "Multi-leg atomico via place_batch_order (solo category=option)."},
{"name": "amend_order", "description": "Modifica ordine esistente."},
{"name": "cancel_order", "description": "Cancella ordine."},
{"name": "cancel_all_orders", "description": "Cancella tutti ordini."},
{"name": "set_stop_loss", "description": "Setta stop loss su posizione."},
{"name": "set_take_profit", "description": "Setta take profit su posizione."},
{"name": "close_position", "description": "Chiude posizione aperta."},
{"name": "set_leverage", "description": "Leva buy+sell uniforme."},
{"name": "switch_position_mode", "description": "Hedge vs one-way."},
{"name": "transfer_asset", "description": "Trasferimento interno tra account types."},
],
)
return app
-21
View File
@@ -1,21 +0,0 @@
from __future__ import annotations
from unittest.mock import MagicMock
import pytest
from mcp_bybit.client import BybitClient
@pytest.fixture
def mock_http():
return MagicMock(name="pybit_HTTP")
@pytest.fixture
def client(mock_http):
return BybitClient(
api_key="test_key",
api_secret="test_secret",
testnet=True,
http=mock_http,
)
-588
View File
@@ -1,588 +0,0 @@
from __future__ import annotations
import pytest
from mcp_bybit.client import BybitClient
def test_client_init_stores_attrs(client, mock_http):
assert client.testnet is True
assert client._http is mock_http
def test_client_init_default_http(monkeypatch):
created = {}
class FakeHTTP:
def __init__(self, **kwargs):
created.update(kwargs)
monkeypatch.setattr("mcp_bybit.client.HTTP", FakeHTTP)
BybitClient(api_key="k", api_secret="s", testnet=False)
assert created["api_key"] == "k"
assert created["api_secret"] == "s"
assert created["testnet"] is False
@pytest.mark.asyncio
async def test_get_ticker(client, mock_http):
mock_http.get_tickers.return_value = {
"retCode": 0,
"result": {
"list": [{
"symbol": "BTCUSDT",
"lastPrice": "60000",
"markPrice": "60010",
"bid1Price": "59995",
"ask1Price": "60005",
"volume24h": "1500.5",
"turnover24h": "90000000",
"fundingRate": "0.0001",
"openInterest": "50000",
}]
},
}
t = await client.get_ticker("BTCUSDT", category="linear")
mock_http.get_tickers.assert_called_once_with(category="linear", symbol="BTCUSDT")
assert t["symbol"] == "BTCUSDT"
assert t["last_price"] == 60000.0
assert t["mark_price"] == 60010.0
assert t["bid"] == 59995.0
assert t["ask"] == 60005.0
assert t["volume_24h"] == 1500.5
assert t["funding_rate"] == 0.0001
assert t["open_interest"] == 50000.0
@pytest.mark.asyncio
async def test_get_ticker_batch(client, mock_http):
def side_effect(**kwargs):
symbol = kwargs["symbol"]
return {"retCode": 0, "result": {"list": [{
"symbol": symbol, "lastPrice": "1", "markPrice": "1",
"bid1Price": "1", "ask1Price": "1", "volume24h": "0",
"turnover24h": "0", "fundingRate": "0", "openInterest": "0",
}]}}
mock_http.get_tickers.side_effect = side_effect
out = await client.get_ticker_batch(["BTCUSDT", "ETHUSDT"], category="linear")
assert set(out.keys()) == {"BTCUSDT", "ETHUSDT"}
assert mock_http.get_tickers.call_count == 2
@pytest.mark.asyncio
async def test_get_ticker_not_found(client, mock_http):
mock_http.get_tickers.return_value = {"retCode": 0, "result": {"list": []}}
t = await client.get_ticker("UNKNOWNUSDT", category="linear")
assert t == {"symbol": "UNKNOWNUSDT", "error": "not_found"}
def test_parse_helpers():
from mcp_bybit.client import _f, _i
assert _f("1.5") == 1.5
assert _f("") is None
assert _f(None) is None
assert _i("42") == 42
assert _i("") is None
assert _i(None) is None
@pytest.mark.asyncio
async def test_get_orderbook(client, mock_http):
mock_http.get_orderbook.return_value = {
"retCode": 0,
"result": {
"s": "BTCUSDT",
"b": [["59990", "0.5"], ["59980", "1.0"]],
"a": [["60010", "0.3"], ["60020", "0.7"]],
"ts": 1700000000000,
},
}
ob = await client.get_orderbook("BTCUSDT", category="linear", limit=25)
mock_http.get_orderbook.assert_called_once_with(
category="linear", symbol="BTCUSDT", limit=25
)
assert ob["symbol"] == "BTCUSDT"
assert ob["bids"] == [[59990.0, 0.5], [59980.0, 1.0]]
assert ob["asks"] == [[60010.0, 0.3], [60020.0, 0.7]]
assert ob["timestamp"] == 1700000000000
@pytest.mark.asyncio
async def test_get_historical(client, mock_http):
mock_http.get_kline.return_value = {
"retCode": 0,
"result": {
"list": [
["1700000000000", "60000", "60500", "59500", "60200", "100", "6020000"],
["1700003600000", "60200", "60700", "60000", "60400", "80", "4832000"],
]
},
}
out = await client.get_historical(
"BTCUSDT", category="linear", interval="60",
start=1700000000000, end=1700003600000,
)
mock_http.get_kline.assert_called_once_with(
category="linear", symbol="BTCUSDT", interval="60",
start=1700000000000, end=1700003600000, limit=1000,
)
assert len(out["candles"]) == 2
c0 = out["candles"][0]
assert c0["timestamp"] == 1700000000000
assert c0["open"] == 60000.0
assert c0["high"] == 60500.0
assert c0["low"] == 59500.0
assert c0["close"] == 60200.0
assert c0["volume"] == 100.0
@pytest.mark.asyncio
async def test_get_indicators(client, mock_http):
rows = [
[str(1700000000000 + i * 3600_000),
str(60000 + i * 10), str(60000 + i * 10 + 5),
str(60000 + i * 10 - 5), str(60000 + i * 10 + 2),
"100", "6000000"]
for i in range(35)
]
mock_http.get_kline.return_value = {"retCode": 0, "result": {"list": rows}}
out = await client.get_indicators(
"BTCUSDT", category="linear",
indicators=["rsi", "atr", "macd", "adx"],
interval="60",
)
assert "rsi" in out and out["rsi"] is not None
assert "atr" in out and out["atr"] is not None
assert "macd" in out and out["macd"]["macd"] is not None
assert "adx" in out and out["adx"]["adx"] is not None
@pytest.mark.asyncio
async def test_get_funding_rate(client, mock_http):
mock_http.get_tickers.return_value = {
"retCode": 0,
"result": {"list": [{
"symbol": "BTCUSDT", "fundingRate": "0.0001",
"nextFundingTime": "1700003600000",
"lastPrice": "60000", "markPrice": "60000",
"bid1Price": "0", "ask1Price": "0",
"volume24h": "0", "turnover24h": "0", "openInterest": "0",
}]},
}
out = await client.get_funding_rate("BTCUSDT", category="linear")
assert out["symbol"] == "BTCUSDT"
assert out["funding_rate"] == 0.0001
assert out["next_funding_time"] == 1700003600000
@pytest.mark.asyncio
async def test_get_funding_history(client, mock_http):
mock_http.get_funding_rate_history.return_value = {
"retCode": 0,
"result": {"list": [
{"symbol": "BTCUSDT", "fundingRate": "0.0001", "fundingRateTimestamp": "1700000000000"},
{"symbol": "BTCUSDT", "fundingRate": "0.00008", "fundingRateTimestamp": "1699996400000"},
]},
}
out = await client.get_funding_history("BTCUSDT", category="linear", limit=50)
mock_http.get_funding_rate_history.assert_called_once_with(
category="linear", symbol="BTCUSDT", limit=50
)
assert len(out["history"]) == 2
assert out["history"][0]["rate"] == 0.0001
@pytest.mark.asyncio
async def test_get_open_interest(client, mock_http):
mock_http.get_open_interest.return_value = {
"retCode": 0,
"result": {"list": [
{"openInterest": "50000", "timestamp": "1700000000000"},
{"openInterest": "49000", "timestamp": "1699996400000"},
]},
}
out = await client.get_open_interest("BTCUSDT", category="linear", interval="5min", limit=100)
mock_http.get_open_interest.assert_called_once_with(
category="linear", symbol="BTCUSDT", intervalTime="5min", limit=100
)
assert len(out["points"]) == 2
assert out["current_oi"] == 50000.0
@pytest.mark.asyncio
async def test_get_instruments(client, mock_http):
mock_http.get_instruments_info.return_value = {
"retCode": 0,
"result": {"list": [
{"symbol": "BTCUSDT", "status": "Trading", "baseCoin": "BTC",
"quoteCoin": "USDT", "priceFilter": {"tickSize": "0.1"},
"lotSizeFilter": {"qtyStep": "0.001", "minOrderQty": "0.001"}},
]},
}
out = await client.get_instruments(category="linear")
mock_http.get_instruments_info.assert_called_once_with(category="linear")
assert len(out["instruments"]) == 1
inst = out["instruments"][0]
assert inst["symbol"] == "BTCUSDT"
assert inst["tick_size"] == 0.1
assert inst["qty_step"] == 0.001
@pytest.mark.asyncio
async def test_get_option_chain(client, mock_http):
mock_http.get_instruments_info.return_value = {
"retCode": 0,
"result": {"list": [
{"symbol": "BTC-30JUN25-50000-C", "baseCoin": "BTC",
"settleCoin": "USDC", "optionsType": "Call",
"launchTime": "1700000000000", "deliveryTime": "1719734400000"},
{"symbol": "BTC-30JUN25-50000-P", "baseCoin": "BTC",
"settleCoin": "USDC", "optionsType": "Put",
"launchTime": "1700000000000", "deliveryTime": "1719734400000"},
]},
}
out = await client.get_option_chain(base_coin="BTC")
mock_http.get_instruments_info.assert_called_once_with(category="option", baseCoin="BTC")
assert len(out["options"]) == 2
assert out["options"][0]["type"] == "Call"
@pytest.mark.asyncio
async def test_get_positions(client, mock_http):
mock_http.get_positions.return_value = {
"retCode": 0,
"result": {"list": [
{"symbol": "BTCUSDT", "side": "Buy", "size": "0.1",
"avgPrice": "60000", "unrealisedPnl": "50",
"leverage": "10", "liqPrice": "50000", "positionValue": "6000"},
]},
}
out = await client.get_positions(category="linear")
mock_http.get_positions.assert_called_once_with(category="linear", settleCoin="USDT")
assert len(out) == 1
p = out[0]
assert p["symbol"] == "BTCUSDT"
assert p["side"] == "Buy"
assert p["size"] == 0.1
assert p["entry_price"] == 60000.0
assert p["liquidation_price"] == 50000.0
@pytest.mark.asyncio
async def test_get_account_summary(client, mock_http):
mock_http.get_wallet_balance.return_value = {
"retCode": 0,
"result": {"list": [{
"accountType": "UNIFIED",
"totalEquity": "10000",
"totalWalletBalance": "9500",
"totalMarginBalance": "9800",
"totalAvailableBalance": "9000",
"totalPerpUPL": "200",
"coin": [
{"coin": "USDT", "walletBalance": "9500", "equity": "9700"}
],
}]},
}
out = await client.get_account_summary()
mock_http.get_wallet_balance.assert_called_once_with(accountType="UNIFIED")
assert out["equity"] == 10000.0
assert out["available_balance"] == 9000.0
assert out["unrealized_pnl"] == 200.0
assert len(out["coins"]) == 1
assert out["coins"][0]["coin"] == "USDT"
@pytest.mark.asyncio
async def test_get_trade_history(client, mock_http):
mock_http.get_executions.return_value = {
"retCode": 0,
"result": {"list": [
{"symbol": "BTCUSDT", "side": "Buy", "execQty": "0.01",
"execPrice": "60000", "execFee": "0.1",
"execTime": "1700000000000", "orderId": "abc"},
]},
}
out = await client.get_trade_history(category="linear", limit=50)
mock_http.get_executions.assert_called_once_with(category="linear", limit=50)
assert len(out) == 1
assert out[0]["symbol"] == "BTCUSDT"
assert out[0]["size"] == 0.01
assert out[0]["price"] == 60000.0
@pytest.mark.asyncio
async def test_get_open_orders(client, mock_http):
mock_http.get_open_orders.return_value = {
"retCode": 0,
"result": {"list": [
{"symbol": "BTCUSDT", "orderId": "o1", "side": "Buy",
"qty": "0.1", "price": "59000", "orderType": "Limit",
"orderStatus": "New", "reduceOnly": False},
]},
}
out = await client.get_open_orders(category="linear")
mock_http.get_open_orders.assert_called_once_with(category="linear", settleCoin="USDT")
assert len(out) == 1
assert out[0]["order_id"] == "o1"
assert out[0]["price"] == 59000.0
@pytest.mark.asyncio
async def test_get_basis_spot_perp(client, mock_http):
def side(**kwargs):
if kwargs["category"] == "spot":
return {"retCode": 0, "result": {"list": [{
"symbol": "BTCUSDT", "lastPrice": "60000", "markPrice": "60000",
"bid1Price": "59995", "ask1Price": "60005",
"volume24h": "0", "turnover24h": "0",
"fundingRate": "0", "openInterest": "0",
}]}}
else:
return {"retCode": 0, "result": {"list": [{
"symbol": "BTCUSDT", "lastPrice": "60120", "markPrice": "60120",
"bid1Price": "60115", "ask1Price": "60125",
"volume24h": "0", "turnover24h": "0",
"fundingRate": "0.0001", "openInterest": "0",
}]}}
mock_http.get_tickers.side_effect = side
out = await client.get_basis_spot_perp("BTC")
assert out["asset"] == "BTC"
assert out["spot_price"] == 60000.0
assert out["perp_price"] == 60120.0
assert out["basis_abs"] == 120.0
assert round(out["basis_pct"], 3) == 0.2
@pytest.mark.asyncio
async def test_place_order_limit(client, mock_http):
mock_http.place_order.return_value = {
"retCode": 0,
"result": {"orderId": "ord123", "orderLinkId": ""},
}
out = await client.place_order(
category="linear", symbol="BTCUSDT", side="Buy",
qty=0.01, order_type="Limit", price=60000.0, tif="GTC",
)
assert out["order_id"] == "ord123"
kwargs = mock_http.place_order.call_args.kwargs
assert kwargs["category"] == "linear"
assert kwargs["symbol"] == "BTCUSDT"
assert kwargs["side"] == "Buy"
assert kwargs["qty"] == "0.01"
assert kwargs["orderType"] == "Limit"
assert kwargs["price"] == "60000.0"
assert kwargs["timeInForce"] == "GTC"
@pytest.mark.asyncio
async def test_place_order_error(client, mock_http):
mock_http.place_order.return_value = {"retCode": 10001, "retMsg": "insufficient balance"}
out = await client.place_order(
category="linear", symbol="BTCUSDT", side="Buy", qty=0.01, order_type="Market"
)
assert out.get("error") == "insufficient balance"
assert out.get("code") == 10001
@pytest.mark.asyncio
async def test_amend_order(client, mock_http):
mock_http.amend_order.return_value = {"retCode": 0, "result": {"orderId": "ord1"}}
out = await client.amend_order(
category="linear", symbol="BTCUSDT", order_id="ord1", new_qty=0.02
)
assert out["order_id"] == "ord1"
kwargs = mock_http.amend_order.call_args.kwargs
assert kwargs["orderId"] == "ord1"
assert kwargs["qty"] == "0.02"
assert "price" not in kwargs
@pytest.mark.asyncio
async def test_place_order_option_adds_link_id(client, mock_http):
mock_http.place_order.return_value = {
"retCode": 0,
"result": {"orderId": "opt1", "orderLinkId": "cerbero-abc"},
}
await client.place_order(
category="option", symbol="BTC-24APR26-96000-C-USDT",
side="Buy", qty=0.01, order_type="Limit", price=5.0,
)
kwargs = mock_http.place_order.call_args.kwargs
assert "orderLinkId" in kwargs
assert kwargs["orderLinkId"].startswith("cerbero-")
@pytest.mark.asyncio
async def test_place_order_linear_no_link_id(client, mock_http):
mock_http.place_order.return_value = {"retCode": 0, "result": {"orderId": "x"}}
await client.place_order(
category="linear", symbol="BTCUSDT", side="Buy", qty=0.01, order_type="Market"
)
kwargs = mock_http.place_order.call_args.kwargs
assert "orderLinkId" not in kwargs
@pytest.mark.asyncio
async def test_place_combo_order_batch_option(client, mock_http):
"""Combo order via place_batch_order su category=option (atomic, 1 round-trip)."""
mock_http.place_batch_order.return_value = {
"retCode": 0,
"result": {
"list": [
{"orderId": "ord-1", "orderLinkId": "cerbero-leg1"},
{"orderId": "ord-2", "orderLinkId": "cerbero-leg2"},
]
},
}
legs = [
{"symbol": "BTC-30APR26-75000-C-USDT", "side": "Buy", "qty": 0.01, "order_type": "Limit", "price": 5.0},
{"symbol": "BTC-30APR26-80000-C-USDT", "side": "Sell", "qty": 0.01, "order_type": "Limit", "price": 3.0},
]
out = await client.place_combo_order(category="option", legs=legs)
assert len(out["orders"]) == 2
assert out["orders"][0]["order_id"] == "ord-1"
kwargs = mock_http.place_batch_order.call_args.kwargs
assert kwargs["category"] == "option"
request = kwargs["request"]
assert len(request) == 2
assert request[0]["symbol"] == "BTC-30APR26-75000-C-USDT"
assert request[0]["qty"] == "0.01"
assert request[0]["orderType"] == "Limit"
# CER: orderLinkId obbligatorio per option
assert "orderLinkId" in request[0]
@pytest.mark.asyncio
async def test_place_combo_order_error(client, mock_http):
mock_http.place_batch_order.return_value = {"retCode": 10001, "retMsg": "invalid leg"}
out = await client.place_combo_order(
category="option",
legs=[
{"symbol": "X", "side": "Buy", "qty": 1, "order_type": "Limit", "price": 1.0},
{"symbol": "Y", "side": "Sell", "qty": 1, "order_type": "Limit", "price": 1.0},
],
)
assert out["error"] == "invalid leg"
assert out["code"] == 10001
@pytest.mark.asyncio
async def test_place_combo_order_rejects_non_option(client, mock_http):
"""Bybit batch_order è disponibile solo su option category."""
import pytest as _pytest
with _pytest.raises(ValueError, match="option"):
await client.place_combo_order(
category="linear",
legs=[
{"symbol": "BTCUSDT", "side": "Buy", "qty": 0.01, "order_type": "Market"},
{"symbol": "ETHUSDT", "side": "Sell", "qty": 0.01, "order_type": "Market"},
],
)
@pytest.mark.asyncio
async def test_cancel_order(client, mock_http):
mock_http.cancel_order.return_value = {"retCode": 0, "result": {"orderId": "ord1"}}
out = await client.cancel_order(category="linear", symbol="BTCUSDT", order_id="ord1")
mock_http.cancel_order.assert_called_once_with(
category="linear", symbol="BTCUSDT", orderId="ord1"
)
assert out["order_id"] == "ord1"
assert out["status"] == "cancelled"
@pytest.mark.asyncio
async def test_cancel_all_orders(client, mock_http):
mock_http.cancel_all_orders.return_value = {
"retCode": 0,
"result": {"list": [{"orderId": "o1"}, {"orderId": "o2"}]},
}
out = await client.cancel_all_orders(category="linear", symbol="BTCUSDT")
mock_http.cancel_all_orders.assert_called_once_with(
category="linear", symbol="BTCUSDT"
)
assert out["cancelled_ids"] == ["o1", "o2"]
@pytest.mark.asyncio
async def test_set_stop_loss(client, mock_http):
mock_http.set_trading_stop.return_value = {"retCode": 0, "result": {}}
out = await client.set_stop_loss(
category="linear", symbol="BTCUSDT", stop_loss=55000.0
)
mock_http.set_trading_stop.assert_called_once()
kwargs = mock_http.set_trading_stop.call_args.kwargs
assert kwargs["category"] == "linear"
assert kwargs["symbol"] == "BTCUSDT"
assert kwargs["stopLoss"] == "55000.0"
assert kwargs.get("positionIdx", 0) == 0
assert out["status"] == "stop_loss_set"
@pytest.mark.asyncio
async def test_set_take_profit(client, mock_http):
mock_http.set_trading_stop.return_value = {"retCode": 0, "result": {}}
out = await client.set_take_profit(
category="linear", symbol="BTCUSDT", take_profit=65000.0
)
kwargs = mock_http.set_trading_stop.call_args.kwargs
assert kwargs["takeProfit"] == "65000.0"
assert out["status"] == "take_profit_set"
@pytest.mark.asyncio
async def test_close_position(client, mock_http):
mock_http.get_positions.return_value = {
"retCode": 0, "result": {"list": [
{"symbol": "BTCUSDT", "side": "Buy", "size": "0.1",
"avgPrice": "60000", "unrealisedPnl": "0",
"leverage": "10", "liqPrice": "0", "positionValue": "6000"},
]},
}
mock_http.place_order.return_value = {
"retCode": 0, "result": {"orderId": "closeord", "orderLinkId": ""},
}
out = await client.close_position(category="linear", symbol="BTCUSDT")
assert out["status"] == "submitted"
kwargs = mock_http.place_order.call_args.kwargs
assert kwargs["side"] == "Sell"
assert kwargs["qty"] == "0.1"
assert kwargs["reduceOnly"] is True
assert kwargs["orderType"] == "Market"
@pytest.mark.asyncio
async def test_set_leverage(client, mock_http):
mock_http.set_leverage.return_value = {"retCode": 0, "result": {}}
out = await client.set_leverage(category="linear", symbol="BTCUSDT", leverage=5)
mock_http.set_leverage.assert_called_once_with(
category="linear", symbol="BTCUSDT", buyLeverage="5", sellLeverage="5"
)
assert out["status"] == "leverage_set"
@pytest.mark.asyncio
async def test_switch_position_mode(client, mock_http):
mock_http.switch_position_mode.return_value = {"retCode": 0, "result": {}}
out = await client.switch_position_mode(
category="linear", symbol="BTCUSDT", mode="hedge"
)
kwargs = mock_http.switch_position_mode.call_args.kwargs
assert kwargs["mode"] == 3
assert out["status"] == "mode_switched"
@pytest.mark.asyncio
async def test_transfer_asset(client, mock_http):
mock_http.create_internal_transfer.return_value = {
"retCode": 0, "result": {"transferId": "tx123"},
}
out = await client.transfer_asset(
coin="USDT", amount=100.0, from_type="UNIFIED", to_type="FUND"
)
kwargs = mock_http.create_internal_transfer.call_args.kwargs
assert kwargs["coin"] == "USDT"
assert kwargs["amount"] == "100.0"
assert kwargs["fromAccountType"] == "UNIFIED"
assert kwargs["toAccountType"] == "FUND"
assert out["transfer_id"] == "tx123"
@@ -1,54 +0,0 @@
from __future__ import annotations
from unittest.mock import AsyncMock, MagicMock
from fastapi.testclient import TestClient
from mcp_bybit.server import create_app
from mcp_common.auth import Principal, TokenStore
from mcp_common.environment import EnvironmentInfo
def _make_app(env_info, creds):
c = MagicMock()
c.testnet = True
c.set_leverage = AsyncMock(return_value={"state": "ok"})
store = TokenStore(tokens={
"ct": Principal("core", {"core"}),
"ot": Principal("observer", {"observer"}),
})
return create_app(client=c, token_store=store, creds=creds, env_info=env_info)
def test_environment_info_full_shape():
env = EnvironmentInfo(
exchange="bybit",
environment="testnet",
source="env",
env_value="true",
base_url="https://api-testnet.bybit.com",
)
app = _make_app(env, creds={"max_leverage": 3})
c = TestClient(app)
r = c.post(
"/tools/environment_info",
headers={"Authorization": "Bearer ot"},
)
assert r.status_code == 200
body = r.json()
assert body["exchange"] == "bybit"
assert body["environment"] == "testnet"
assert body["source"] == "env"
assert body["env_value"] == "true"
assert body["base_url"] == "https://api-testnet.bybit.com"
assert body["max_leverage"] == 3
def test_environment_info_requires_auth():
env = EnvironmentInfo(
exchange="bybit", environment="testnet", source="default",
env_value=None, base_url="https://api-testnet.bybit.com",
)
app = _make_app(env, creds={"max_leverage": 3})
c = TestClient(app)
r = c.post("/tools/environment_info")
assert r.status_code == 401
@@ -1,50 +0,0 @@
from __future__ import annotations
import pytest
from fastapi import HTTPException
from mcp_bybit.leverage_cap import enforce_leverage, get_max_leverage
def test_get_max_leverage_returns_creds_value():
creds = {"max_leverage": 5}
assert get_max_leverage(creds) == 5
def test_get_max_leverage_default_when_missing():
"""Default 1 (cash) se il secret non ha max_leverage."""
assert get_max_leverage({}) == 1
def test_enforce_leverage_pass_under_cap():
creds = {"max_leverage": 3}
enforce_leverage(2, creds=creds, exchange="bybit") # no raise
def test_enforce_leverage_pass_at_cap():
creds = {"max_leverage": 3}
enforce_leverage(3, creds=creds, exchange="bybit") # no raise
def test_enforce_leverage_reject_over_cap():
creds = {"max_leverage": 3}
with pytest.raises(HTTPException) as exc:
enforce_leverage(10, creds=creds, exchange="bybit")
assert exc.value.status_code == 403
assert exc.value.detail["error"] == "LEVERAGE_CAP_EXCEEDED"
assert exc.value.detail["exchange"] == "bybit"
assert exc.value.detail["requested"] == 10
assert exc.value.detail["max"] == 3
def test_enforce_leverage_reject_when_below_one():
creds = {"max_leverage": 3}
with pytest.raises(HTTPException) as exc:
enforce_leverage(0, creds=creds, exchange="bybit")
assert exc.value.status_code == 403
def test_enforce_leverage_default_when_none():
"""Se requested è None, applica il cap come default."""
creds = {"max_leverage": 3}
result = enforce_leverage(None, creds=creds, exchange="bybit")
assert result == 3
-150
View File
@@ -1,150 +0,0 @@
from __future__ import annotations
from unittest.mock import AsyncMock, MagicMock
import pytest
from fastapi.testclient import TestClient
from mcp_bybit.server import create_app
from mcp_common.auth import Principal, TokenStore
@pytest.fixture
def token_store():
return TokenStore(
tokens={
"core-tok": Principal("core", {"core"}),
"obs-tok": Principal("observer", {"observer"}),
}
)
@pytest.fixture
def mock_client():
c = MagicMock()
c.get_ticker = AsyncMock(return_value={"symbol": "BTCUSDT"})
c.get_ticker_batch = AsyncMock(return_value={"BTCUSDT": {}})
c.get_orderbook = AsyncMock(return_value={"bids": [], "asks": []})
c.get_historical = AsyncMock(return_value={"candles": []})
c.get_indicators = AsyncMock(return_value={"rsi": 50.0})
c.get_funding_rate = AsyncMock(return_value={"funding_rate": 0.0001})
c.get_funding_history = AsyncMock(return_value={"history": []})
c.get_open_interest = AsyncMock(return_value={"points": []})
c.get_instruments = AsyncMock(return_value={"instruments": []})
c.get_option_chain = AsyncMock(return_value={"options": []})
c.get_positions = AsyncMock(return_value=[])
c.get_account_summary = AsyncMock(return_value={"equity": 0})
c.get_trade_history = AsyncMock(return_value=[])
c.get_open_orders = AsyncMock(return_value=[])
c.get_basis_spot_perp = AsyncMock(return_value={"basis_pct": 0})
c.place_order = AsyncMock(return_value={"order_id": "x"})
c.amend_order = AsyncMock(return_value={"order_id": "x"})
c.cancel_order = AsyncMock(return_value={"status": "cancelled"})
c.cancel_all_orders = AsyncMock(return_value={"cancelled_ids": []})
c.set_stop_loss = AsyncMock(return_value={"status": "stop_loss_set"})
c.set_take_profit = AsyncMock(return_value={"status": "take_profit_set"})
c.close_position = AsyncMock(return_value={"status": "submitted"})
c.set_leverage = AsyncMock(return_value={"status": "leverage_set"})
c.switch_position_mode = AsyncMock(return_value={"status": "mode_switched"})
c.transfer_asset = AsyncMock(return_value={"transfer_id": "tx"})
c.place_combo_order = AsyncMock(return_value={"orders": [{"order_id": "ord-1"}, {"order_id": "ord-2"}]})
c.get_orderbook_imbalance = AsyncMock(return_value={"imbalance_ratio": 0.0, "microprice": 100.0})
c.get_basis_term_structure = AsyncMock(return_value={"asset": "BTC", "term_structure": []})
return c
@pytest.fixture
def http(mock_client, token_store):
app = create_app(client=mock_client, token_store=token_store, creds={"max_leverage": 5})
return TestClient(app)
CORE = {"Authorization": "Bearer core-tok"}
OBS = {"Authorization": "Bearer obs-tok"}
READ_ENDPOINTS = [
("/tools/get_ticker", {"symbol": "BTCUSDT"}),
("/tools/get_ticker_batch", {"symbols": ["BTCUSDT"]}),
("/tools/get_orderbook", {"symbol": "BTCUSDT"}),
("/tools/get_historical", {"symbol": "BTCUSDT"}),
("/tools/get_indicators", {"symbol": "BTCUSDT"}),
("/tools/get_funding_rate", {"symbol": "BTCUSDT"}),
("/tools/get_funding_history", {"symbol": "BTCUSDT"}),
("/tools/get_open_interest", {"symbol": "BTCUSDT"}),
("/tools/get_instruments", {}),
("/tools/get_option_chain", {"base_coin": "BTC"}),
("/tools/get_positions", {}),
("/tools/get_account_summary", {}),
("/tools/get_trade_history", {}),
("/tools/get_open_orders", {}),
("/tools/get_basis_spot_perp", {"asset": "BTC"}),
("/tools/get_orderbook_imbalance", {"symbol": "BTCUSDT"}),
("/tools/get_basis_term_structure", {"asset": "BTC"}),
]
WRITE_ENDPOINTS = [
("/tools/place_order", {"category": "linear", "symbol": "BTCUSDT", "side": "Buy", "qty": 0.01}),
("/tools/amend_order", {"category": "linear", "symbol": "BTCUSDT", "order_id": "o1"}),
("/tools/cancel_order", {"category": "linear", "symbol": "BTCUSDT", "order_id": "o1"}),
("/tools/cancel_all_orders", {"category": "linear"}),
("/tools/set_stop_loss", {"category": "linear", "symbol": "BTCUSDT", "stop_loss": 55000}),
("/tools/set_take_profit", {"category": "linear", "symbol": "BTCUSDT", "take_profit": 65000}),
("/tools/close_position", {"category": "linear", "symbol": "BTCUSDT"}),
("/tools/set_leverage", {"category": "linear", "symbol": "BTCUSDT", "leverage": 5}),
("/tools/switch_position_mode", {"category": "linear", "symbol": "BTCUSDT", "mode": "hedge"}),
("/tools/transfer_asset", {"coin": "USDT", "amount": 10.0, "from_type": "UNIFIED", "to_type": "FUND"}),
("/tools/place_combo_order", {
"category": "option",
"legs": [
{"symbol": "BTC-30APR26-75000-C-USDT", "side": "Buy", "qty": 0.01, "order_type": "Limit", "price": 5.0},
{"symbol": "BTC-30APR26-80000-C-USDT", "side": "Sell", "qty": 0.01, "order_type": "Limit", "price": 3.0},
],
}),
]
def test_place_combo_order_min_legs(http):
r = http.post(
"/tools/place_combo_order",
json={
"category": "option",
"legs": [{"symbol": "X", "side": "Buy", "qty": 1, "order_type": "Limit", "price": 1.0}],
},
headers=CORE,
)
assert r.status_code == 422
@pytest.mark.parametrize("path,payload", READ_ENDPOINTS)
def test_read_core_ok(http, path, payload):
r = http.post(path, json=payload, headers=CORE)
assert r.status_code == 200, (path, r.text)
@pytest.mark.parametrize("path,payload", READ_ENDPOINTS)
def test_read_observer_ok(http, path, payload):
r = http.post(path, json=payload, headers=OBS)
assert r.status_code == 200, (path, r.text)
@pytest.mark.parametrize("path,payload", READ_ENDPOINTS)
def test_read_no_auth_401(http, path, payload):
r = http.post(path, json=payload)
assert r.status_code == 401, (path, r.text)
@pytest.mark.parametrize("path,payload", WRITE_ENDPOINTS)
def test_write_core_ok(http, path, payload):
r = http.post(path, json=payload, headers=CORE)
assert r.status_code == 200, (path, r.text)
@pytest.mark.parametrize("path,payload", WRITE_ENDPOINTS)
def test_write_observer_403(http, path, payload):
r = http.post(path, json=payload, headers=OBS)
assert r.status_code == 403, (path, r.text)
@pytest.mark.parametrize("path,payload", WRITE_ENDPOINTS)
def test_write_no_auth_401(http, path, payload):
r = http.post(path, json=payload)
assert r.status_code == 401, (path, r.text)
-27
View File
@@ -1,27 +0,0 @@
[project]
name = "mcp-deribit"
version = "0.1.0"
requires-python = ">=3.11"
dependencies = [
"mcp-common",
"fastapi>=0.115",
"uvicorn[standard]>=0.30",
"httpx>=0.27",
"pydantic>=2.6",
]
[project.optional-dependencies]
dev = ["pytest>=8", "pytest-asyncio>=0.23", "pytest-httpx>=0.30"]
[build-system]
requires = ["hatchling"]
build-backend = "hatchling.build"
[tool.hatch.build.targets.wheel]
packages = ["src/mcp_deribit"]
[tool.uv.sources]
mcp-common = { workspace = true }
[project.scripts]
mcp-deribit = "mcp_deribit.__main__:main"
@@ -1,30 +0,0 @@
from __future__ import annotations
from mcp_common.app_factory import ExchangeAppSpec, run_exchange_main
from mcp_deribit.client import DeribitClient
from mcp_deribit.server import create_app
SPEC = ExchangeAppSpec(
exchange="deribit",
creds_env_var="CREDENTIALS_FILE",
env_var="DERIBIT_TESTNET",
flag_key="testnet",
default_base_url_live="https://www.deribit.com/api/v2",
default_base_url_testnet="https://test.deribit.com/api/v2",
default_port=9011,
build_client=lambda creds, env_info: DeribitClient(
client_id=creds["client_id"],
client_secret=creds["client_secret"],
testnet=(env_info.environment == "testnet"),
),
build_app=create_app,
)
def main():
run_exchange_main(SPEC)
if __name__ == "__main__":
main()
File diff suppressed because it is too large Load Diff
@@ -1,18 +0,0 @@
"""Re-export shim per backward-compat: la logica vive ora in
mcp_common.env_validation. Non aggiungere nuovo codice qui.
"""
from mcp_common.env_validation import (
MissingEnvError,
fail_fast_if_missing,
optional_env,
require_env,
summarize,
)
__all__ = [
"MissingEnvError",
"fail_fast_if_missing",
"optional_env",
"require_env",
"summarize",
]
@@ -1,56 +0,0 @@
"""Leverage cap server-side per place_order.
Cap letto dal secret JSON via campo `max_leverage`. Default 1 (cash) se assente.
"""
from __future__ import annotations
from fastapi import HTTPException
def get_max_leverage(creds: dict) -> int:
"""Legge max_leverage dal secret. Default 1 se mancante."""
raw = creds.get("max_leverage", 1)
try:
value = int(raw)
except (TypeError, ValueError):
value = 1
return max(1, value)
def enforce_leverage(
requested: int | float | None,
*,
creds: dict,
exchange: str,
) -> int:
"""Verifica e applica leverage cap. Ritorna leverage applicabile.
Solleva HTTPException(403, LEVERAGE_CAP_EXCEEDED) se requested > cap.
Se requested is None, applica il cap come default.
"""
cap = get_max_leverage(creds)
if requested is None:
return cap
lev = int(requested)
if lev < 1:
raise HTTPException(
status_code=403,
detail={
"error": "LEVERAGE_CAP_EXCEEDED",
"exchange": exchange,
"requested": lev,
"max": cap,
"reason": "leverage must be >= 1",
},
)
if lev > cap:
raise HTTPException(
status_code=403,
detail={
"error": "LEVERAGE_CAP_EXCEEDED",
"exchange": exchange,
"requested": lev,
"max": cap,
},
)
return lev
@@ -1,695 +0,0 @@
from __future__ import annotations
import contextlib
import os
from fastapi import Depends, FastAPI, HTTPException
from mcp_common.audit import audit_write_op
from mcp_common.auth import Principal, TokenStore, require_principal
from mcp_common.environment import EnvironmentInfo
from mcp_common.mcp_bridge import mount_mcp_endpoint
from mcp_common.server import build_app
from pydantic import BaseModel, field_validator, model_validator
from mcp_deribit.client import DeribitClient
from mcp_deribit.leverage_cap import enforce_leverage as _enforce_leverage
from mcp_deribit.leverage_cap import get_max_leverage
# --- Body models ---
class GetTickerReq(BaseModel):
instrument_name: str | None = None
instrument: str | None = None
model_config = {"extra": "allow"}
@model_validator(mode="after")
def _normalize(self):
sym = self.instrument_name or self.instrument
if not sym:
raise ValueError("instrument_name (or instrument) is required")
self.instrument_name = sym
return self
class GetTickerBatchReq(BaseModel):
instrument_names: list[str] | None = None
instruments: list[str] | None = None
model_config = {"extra": "allow"}
@model_validator(mode="after")
def _normalize(self):
names = self.instrument_names or self.instruments
if not names:
raise ValueError("instrument_names (or instruments) is required")
self.instrument_names = names
return self
class GetInstrumentsReq(BaseModel):
currency: str
kind: str | None = None
expiry_from: str | None = None
expiry_to: str | None = None
strike_min: float | None = None
strike_max: float | None = None
min_open_interest: float | None = None
limit: int = 100
offset: int = 0
class GetOrderbookReq(BaseModel):
instrument_name: str
depth: int = 10
class OrderbookImbalanceReq(BaseModel):
instrument_name: str
depth: int = 10
class GetPositionsReq(BaseModel):
currency: str = "USDC"
class GetAccountSummaryReq(BaseModel):
currency: str = "USDC"
class GetTradeHistoryReq(BaseModel):
limit: int = 100
instrument_name: str | None = None
class GetHistoricalReq(BaseModel):
instrument: str
start_date: str
end_date: str
resolution: str = "1h"
class GetDvolReq(BaseModel):
currency: str = "BTC"
start_date: str
end_date: str
resolution: str = "1D"
class GetDvolHistoryReq(BaseModel):
currency: str = "BTC"
lookback_days: int = 90
class GetIvRankReq(BaseModel):
instrument: str
class GetRealizedVolReq(BaseModel):
currency: str = "BTC"
windows: list[int] = [14, 30]
class GetGexReq(BaseModel):
currency: str
expiry_from: str | None = None
expiry_to: str | None = None
top_n_strikes: int = 50
class OptionFlowReq(BaseModel):
"""Body comune per indicatori option-flow (dealer gamma, vanna/charm,
OI-weighted skew, smile asymmetry, ATM vs wings)."""
currency: str
expiry_from: str | None = None
expiry_to: str | None = None
top_n_strikes: int = 100
class GetPcRatioReq(BaseModel):
currency: str
class GetSkew25dReq(BaseModel):
currency: str
expiry: str
class GetTermStructureReq(BaseModel):
currency: str
class CalculateSpreadPayoffReq(BaseModel):
legs: list[dict]
quote_currency: str = "USD"
class RunBacktestReq(BaseModel):
strategy_name: str
underlying: str = "BTC"
lookback_days: int = 30
resolution: str = "4h"
entry_rules: dict | None = None
exit_rules: dict | None = None
class FindByDeltaReq(BaseModel):
currency: str
expiry: str
target_delta: float
option_type: str
max_results: int = 3
min_open_interest: float = 100.0
min_volume_24h: float = 20.0
class GetIndicatorsReq(BaseModel):
instrument: str
indicators: list[str]
start_date: str
end_date: str
resolution: str = "1h"
@field_validator("indicators", mode="before")
@classmethod
def _coerce_indicators(cls, v):
if isinstance(v, str):
import json
s = v.strip()
if s.startswith("["):
try:
parsed = json.loads(s)
if isinstance(parsed, list):
return [str(x).strip() for x in parsed if str(x).strip()]
except json.JSONDecodeError:
pass
return [x.strip() for x in s.split(",") if x.strip()]
if isinstance(v, list):
return v
raise ValueError(
"indicators must be a list like ['rsi','atr','macd'] "
"or a comma-separated string like 'rsi,atr,macd'"
)
class PlaceOrderReq(BaseModel):
instrument_name: str
side: str # "buy" | "sell"
amount: float
type: str = "limit"
price: float | None = None
reduce_only: bool = False
post_only: bool = False
label: str | None = None
leverage: int | None = None # CER-016: None → default cap (3x)
class ComboLeg(BaseModel):
instrument_name: str
direction: str # "buy" | "sell"
ratio: int = 1
class PlaceComboOrderReq(BaseModel):
legs: list[ComboLeg]
side: str # "buy" | "sell"
amount: float
type: str = "limit"
price: float | None = None
label: str | None = None
leverage: int | None = None
@model_validator(mode="after")
def _at_least_two_legs(self):
if len(self.legs) < 2:
raise ValueError("combo requires at least 2 legs")
return self
class CancelOrderReq(BaseModel):
order_id: str
class SetStopLossReq(BaseModel):
order_id: str
stop_price: float
class SetTakeProfitReq(BaseModel):
order_id: str
tp_price: float
class ClosePositionReq(BaseModel):
instrument_name: str
# --- ACL helper ---
def _check(principal: Principal, *, core: bool = False, observer: bool = False) -> None:
allowed: set[str] = set()
if core:
allowed.add("core")
if observer:
allowed.add("observer")
if not (principal.capabilities & allowed):
raise HTTPException(403, f"capability required: {allowed}")
# --- App factory ---
def create_app(
*,
client: DeribitClient,
token_store: TokenStore,
creds: dict,
env_info: EnvironmentInfo | None = None,
) -> FastAPI:
from contextlib import asynccontextmanager
cap_default = get_max_leverage(creds)
# CER-016: pre-set leverage cap su perp principali al boot (best-effort).
@asynccontextmanager
async def _lifespan(_app: FastAPI):
for inst in ("BTC-PERPETUAL", "ETH-PERPETUAL"):
with contextlib.suppress(Exception):
await client.set_leverage(inst, cap_default)
yield
app = build_app(
name="mcp-deribit",
version="0.1.0",
token_store=token_store,
lifespan=_lifespan,
)
# --- Read tools: core + observer ---
@app.post("/tools/is_testnet", tags=["reads"])
async def t_is_testnet(principal: Principal = Depends(require_principal)):
_check(principal, core=True, observer=True)
return client.is_testnet()
@app.post("/tools/environment_info", tags=["reads"])
async def t_environment_info(principal: Principal = Depends(require_principal)):
_check(principal, core=True, observer=True)
if env_info is None:
return {
"exchange": "deribit",
"environment": "testnet" if client.is_testnet().get("testnet") else "mainnet",
"source": "credentials",
"env_value": None,
"base_url": client.base_url,
"max_leverage": get_max_leverage(creds),
}
return {
"exchange": env_info.exchange,
"environment": env_info.environment,
"source": env_info.source,
"env_value": env_info.env_value,
"base_url": env_info.base_url,
"max_leverage": get_max_leverage(creds),
}
@app.post("/tools/get_ticker", tags=["reads"])
async def t_get_ticker(
body: GetTickerReq, principal: Principal = Depends(require_principal)
):
_check(principal, core=True, observer=True)
return await client.get_ticker(body.instrument_name)
@app.post("/tools/get_ticker_batch", tags=["reads"])
async def t_get_ticker_batch(
body: GetTickerBatchReq, principal: Principal = Depends(require_principal)
):
_check(principal, core=True, observer=True)
return await client.get_ticker_batch(body.instrument_names)
@app.post("/tools/get_instruments", tags=["reads"])
async def t_get_instruments(
body: GetInstrumentsReq, principal: Principal = Depends(require_principal)
):
_check(principal, core=True, observer=True)
return await client.get_instruments(
currency=body.currency,
kind=body.kind,
expiry_from=body.expiry_from,
expiry_to=body.expiry_to,
strike_min=body.strike_min,
strike_max=body.strike_max,
min_open_interest=body.min_open_interest,
limit=body.limit,
offset=body.offset,
)
@app.post("/tools/get_orderbook", tags=["reads"])
async def t_get_orderbook(
body: GetOrderbookReq, principal: Principal = Depends(require_principal)
):
_check(principal, core=True, observer=True)
return await client.get_orderbook(body.instrument_name, body.depth)
@app.post("/tools/get_orderbook_imbalance", tags=["reads"])
async def t_get_ob_imbalance(
body: OrderbookImbalanceReq, principal: Principal = Depends(require_principal)
):
_check(principal, core=True, observer=True)
return await client.get_orderbook_imbalance(body.instrument_name, body.depth)
@app.post("/tools/get_positions", tags=["reads"])
async def t_get_positions(
body: GetPositionsReq, principal: Principal = Depends(require_principal)
):
_check(principal, core=True, observer=True)
return await client.get_positions(body.currency)
@app.post("/tools/get_account_summary", tags=["reads"])
async def t_get_account_summary(
body: GetAccountSummaryReq, principal: Principal = Depends(require_principal)
):
_check(principal, core=True, observer=True)
return await client.get_account_summary(body.currency)
@app.post("/tools/get_trade_history", tags=["reads"])
async def t_get_trade_history(
body: GetTradeHistoryReq, principal: Principal = Depends(require_principal)
):
_check(principal, core=True, observer=True)
return await client.get_trade_history(body.limit, body.instrument_name)
@app.post("/tools/get_historical", tags=["reads"])
async def t_get_historical(
body: GetHistoricalReq, principal: Principal = Depends(require_principal)
):
_check(principal, core=True, observer=True)
return await client.get_historical(
body.instrument, body.start_date, body.end_date, body.resolution
)
@app.post("/tools/get_dvol", tags=["reads"])
async def t_get_dvol(
body: GetDvolReq, principal: Principal = Depends(require_principal)
):
_check(principal, core=True, observer=True)
return await client.get_dvol(
body.currency, body.start_date, body.end_date, body.resolution
)
@app.post("/tools/get_gex", tags=["reads"])
async def t_get_gex(
body: GetGexReq, principal: Principal = Depends(require_principal)
):
_check(principal, core=True, observer=True)
return await client.get_gex(
body.currency, body.expiry_from, body.expiry_to, body.top_n_strikes
)
@app.post("/tools/get_dealer_gamma_profile", tags=["reads"])
async def t_get_dealer_gamma_profile(
body: OptionFlowReq, principal: Principal = Depends(require_principal)
):
_check(principal, core=True, observer=True)
return await client.get_dealer_gamma_profile(
body.currency, body.expiry_from, body.expiry_to, body.top_n_strikes
)
@app.post("/tools/get_vanna_charm", tags=["reads"])
async def t_get_vanna_charm(
body: OptionFlowReq, principal: Principal = Depends(require_principal)
):
_check(principal, core=True, observer=True)
return await client.get_vanna_charm(
body.currency, body.expiry_from, body.expiry_to, body.top_n_strikes
)
@app.post("/tools/get_oi_weighted_skew", tags=["reads"])
async def t_get_oi_weighted_skew(
body: OptionFlowReq, principal: Principal = Depends(require_principal)
):
_check(principal, core=True, observer=True)
return await client.get_oi_weighted_skew(
body.currency, body.expiry_from, body.expiry_to, body.top_n_strikes
)
@app.post("/tools/get_smile_asymmetry", tags=["reads"])
async def t_get_smile_asymmetry(
body: OptionFlowReq, principal: Principal = Depends(require_principal)
):
_check(principal, core=True, observer=True)
return await client.get_smile_asymmetry(
body.currency, body.expiry_from, body.expiry_to, body.top_n_strikes
)
@app.post("/tools/get_atm_vs_wings_vol", tags=["reads"])
async def t_get_atm_vs_wings_vol(
body: OptionFlowReq, principal: Principal = Depends(require_principal)
):
_check(principal, core=True, observer=True)
return await client.get_atm_vs_wings_vol(
body.currency, body.expiry_from, body.expiry_to, body.top_n_strikes
)
@app.post("/tools/get_pc_ratio", tags=["reads"])
async def t_get_pc_ratio(
body: GetPcRatioReq, principal: Principal = Depends(require_principal)
):
_check(principal, core=True, observer=True)
return await client.get_pc_ratio(body.currency)
@app.post("/tools/get_skew_25d", tags=["reads"])
async def t_get_skew_25d(
body: GetSkew25dReq, principal: Principal = Depends(require_principal)
):
_check(principal, core=True, observer=True)
return await client.get_skew_25d(body.currency, body.expiry)
@app.post("/tools/get_term_structure", tags=["reads"])
async def t_get_term_structure(
body: GetTermStructureReq, principal: Principal = Depends(require_principal)
):
_check(principal, core=True, observer=True)
return await client.get_term_structure(body.currency)
@app.post("/tools/run_backtest", tags=["writes"])
async def t_run_backtest(
body: RunBacktestReq, principal: Principal = Depends(require_principal)
):
_check(principal, core=True, observer=True)
return await client.run_backtest(
strategy_name=body.strategy_name,
underlying=body.underlying,
lookback_days=body.lookback_days,
resolution=body.resolution,
entry_rules=body.entry_rules,
exit_rules=body.exit_rules,
)
@app.post("/tools/calculate_spread_payoff", tags=["writes"])
async def t_calculate_spread_payoff(
body: CalculateSpreadPayoffReq, principal: Principal = Depends(require_principal)
):
_check(principal, core=True, observer=True)
return await client.calculate_spread_payoff(body.legs, body.quote_currency)
@app.post("/tools/find_by_delta", tags=["writes"])
async def t_find_by_delta(
body: FindByDeltaReq, principal: Principal = Depends(require_principal)
):
_check(principal, core=True, observer=True)
return await client.find_by_delta(
currency=body.currency,
expiry=body.expiry,
target_delta=body.target_delta,
option_type=body.option_type,
max_results=body.max_results,
min_open_interest=body.min_open_interest,
min_volume_24h=body.min_volume_24h,
)
@app.post("/tools/get_iv_rank", tags=["reads"])
async def t_get_iv_rank(
body: GetIvRankReq, principal: Principal = Depends(require_principal)
):
_check(principal, core=True, observer=True)
return await client.get_iv_rank(body.instrument)
@app.post("/tools/get_dvol_history", tags=["reads"])
async def t_get_dvol_history(
body: GetDvolHistoryReq, principal: Principal = Depends(require_principal)
):
_check(principal, core=True, observer=True)
return await client.get_dvol_history(body.currency, body.lookback_days)
@app.post("/tools/get_realized_vol", tags=["reads"])
async def t_get_realized_vol(
body: GetRealizedVolReq, principal: Principal = Depends(require_principal)
):
_check(principal, core=True, observer=True)
return await client.get_realized_vol(body.currency, body.windows)
@app.post("/tools/get_technical_indicators", tags=["reads"])
async def t_get_indicators(
body: GetIndicatorsReq, principal: Principal = Depends(require_principal)
):
_check(principal, core=True, observer=True)
return await client.get_technical_indicators(
body.instrument,
body.indicators,
body.start_date,
body.end_date,
body.resolution,
)
# --- Write tools: core only ---
@app.post("/tools/place_order", tags=["writes"])
async def t_place_order(
body: PlaceOrderReq, principal: Principal = Depends(require_principal)
):
_check(principal, core=True)
lev = _enforce_leverage(body.leverage, creds=creds, exchange="deribit")
if lev != cap_default:
with contextlib.suppress(Exception):
await client.set_leverage(body.instrument_name, lev)
result = await client.place_order(
instrument_name=body.instrument_name,
side=body.side,
amount=body.amount,
type=body.type,
price=body.price,
reduce_only=body.reduce_only,
post_only=body.post_only,
label=body.label,
)
audit_write_op(
principal=principal, action="place_order", exchange="deribit",
target=body.instrument_name,
payload={"side": body.side, "amount": body.amount, "type": body.type,
"price": body.price, "leverage": lev, "label": body.label},
result=result,
)
return result
@app.post("/tools/place_combo_order", tags=["writes"])
async def t_place_combo_order(
body: PlaceComboOrderReq, principal: Principal = Depends(require_principal)
):
_check(principal, core=True)
lev = _enforce_leverage(body.leverage, creds=creds, exchange="deribit")
if lev != cap_default:
for leg in body.legs:
with contextlib.suppress(Exception):
await client.set_leverage(leg.instrument_name, lev)
result = await client.place_combo_order(
legs=[leg.model_dump() for leg in body.legs],
side=body.side,
amount=body.amount,
type=body.type,
price=body.price,
label=body.label,
)
audit_write_op(
principal=principal, action="place_combo_order", exchange="deribit",
target=result.get("combo_instrument") if isinstance(result, dict) else None,
payload={"legs": [leg.model_dump() for leg in body.legs],
"side": body.side, "amount": body.amount, "leverage": lev},
result=result if isinstance(result, dict) else None,
)
return result
@app.post("/tools/cancel_order", tags=["writes"])
async def t_cancel_order(
body: CancelOrderReq, principal: Principal = Depends(require_principal)
):
_check(principal, core=True)
result = await client.cancel_order(body.order_id)
audit_write_op(
principal=principal, action="cancel_order", exchange="deribit",
target=body.order_id, payload={}, result=result,
)
return result
@app.post("/tools/set_stop_loss", tags=["writes"])
async def t_set_sl(
body: SetStopLossReq, principal: Principal = Depends(require_principal)
):
_check(principal, core=True)
result = await client.set_stop_loss(body.order_id, body.stop_price)
audit_write_op(
principal=principal, action="set_stop_loss", exchange="deribit",
target=body.order_id, payload={"stop_price": body.stop_price}, result=result,
)
return result
@app.post("/tools/set_take_profit", tags=["writes"])
async def t_set_tp(
body: SetTakeProfitReq, principal: Principal = Depends(require_principal)
):
_check(principal, core=True)
result = await client.set_take_profit(body.order_id, body.tp_price)
audit_write_op(
principal=principal, action="set_take_profit", exchange="deribit",
target=body.order_id, payload={"tp_price": body.tp_price}, result=result,
)
return result
@app.post("/tools/close_position", tags=["writes"])
async def t_close_position(
body: ClosePositionReq, principal: Principal = Depends(require_principal)
):
_check(principal, core=True)
result = await client.close_position(body.instrument_name)
audit_write_op(
principal=principal, action="close_position", exchange="deribit",
target=body.instrument_name, payload={}, result=result,
)
return result
# ───── MCP endpoint (/mcp) — bridge verso /tools/* ─────
port = int(os.environ.get("PORT", "9011"))
mount_mcp_endpoint(
app,
name="cerbero-deribit",
version="0.1.0",
token_store=token_store,
internal_base_url=f"http://localhost:{port}",
tools=[
{"name": "is_testnet", "description": "True se client Deribit è in modalità testnet."},
{"name": "environment_info", "description": "Ambiente operativo (testnet/mainnet), source, base_url, max_leverage cap."},
{"name": "get_ticker", "description": "Ticker di un instrument Deribit."},
{"name": "get_ticker_batch", "description": "Ticker per N instruments in parallelo (max 20)."},
{"name": "get_instruments", "description": "Lista instruments per currency."},
{"name": "get_orderbook", "description": "Orderbook L1/L2 per instrument."},
{"name": "get_orderbook_imbalance", "description": "Microstructure: imbalance ratio + microprice + slope."},
{"name": "get_positions", "description": "Posizioni aperte."},
{"name": "get_account_summary", "description": "Summary account (equity, balance)."},
{"name": "get_trade_history", "description": "Storia trade recenti."},
{"name": "get_historical", "description": "OHLCV storico."},
{"name": "get_dvol", "description": "Deribit Volatility Index (DVOL) OHLC per currency (BTC/ETH)."},
{"name": "get_dvol_history", "description": "DVOL time series + percentili su lookback_days."},
{"name": "get_iv_rank", "description": "IV rank 30/90/365d di un instrument vs DVOL storico della currency."},
{"name": "find_by_delta", "description": "Trova strike con delta più vicino a target, filtrato per liquidità (OI/vol)."},
{"name": "calculate_spread_payoff", "description": "Payoff/greci/max P-L/break-even/fee per struttura multi-leg."},
{"name": "run_backtest", "description": "Heuristic backtest RSI-based su storia OHLCV per threshold accept/marginal/reject."},
{"name": "get_term_structure", "description": "IV ATM per ogni expiry disponibile, detect contango/backwardation."},
{"name": "get_skew_25d", "description": "Skew 25-delta put/call IV + risk reversal + butterfly per expiry."},
{"name": "get_pc_ratio", "description": "Put/Call ratio aggregato su OI e volume 24h."},
{"name": "get_gex", "description": "Gamma exposure per strike + zero gamma level (top N strikes per OI)."},
{"name": "get_dealer_gamma_profile", "description": "Net dealer gamma per strike (short calls/long puts) + gamma flip level."},
{"name": "get_vanna_charm", "description": "Vanna (∂delta/∂IV) e Charm (∂delta/∂t) aggregati pesati OI."},
{"name": "get_oi_weighted_skew", "description": "Skew aggregato pesato per OI: IV puts - IV calls. Positivo = paura."},
{"name": "get_smile_asymmetry", "description": "Asymmetry IV otm-puts vs otm-calls + ATM IV reference."},
{"name": "get_atm_vs_wings_vol", "description": "IV ATM vs IV ali 25-delta. wing_richness > 0 = smile/kurtosis."},
{"name": "get_technical_indicators", "description": "Indicatori tecnici (RSI, MACD, ATR, ADX)."},
{"name": "get_realized_vol", "description": "Volatilità realizzata annualizzata (log-return std) BTC/ETH + spread IVRV."},
{"name": "place_order", "description": "Invia ordine (CORE only, testnet)."},
{"name": "place_combo_order", "description": "Crea combo via private/create_combo + piazza ordine sul combo (1 cross spread invece di N)."},
{"name": "cancel_order", "description": "Cancella ordine."},
{"name": "set_stop_loss", "description": "Setta stop loss su posizione."},
{"name": "set_take_profit", "description": "Setta take profit su posizione."},
{"name": "close_position", "description": "Chiude posizione aperta."},
],
)
return app
-297
View File
@@ -1,297 +0,0 @@
from __future__ import annotations
import re
import pytest
from mcp_deribit.client import DeribitClient
from pytest_httpx import HTTPXMock
@pytest.fixture
def client():
return DeribitClient(client_id="cid", client_secret="csec", testnet=True)
AUTH_RESP = {"result": {"access_token": "tok", "expires_in": 3600}}
@pytest.mark.asyncio
async def test_get_ticker(httpx_mock: HTTPXMock, client: DeribitClient):
# public endpoint — no auth needed
httpx_mock.add_response(
url=re.compile(r"https://test\.deribit\.com/api/v2/public/ticker"),
json={
"result": {
"mark_price": 50000,
"last_price": 49900,
"best_bid_price": 49950,
"best_ask_price": 50050,
"instrument_name": "BTC-PERPETUAL",
"stats": {"volume": 1234.5},
"open_interest": 9999,
"greeks": None,
"mark_iv": None,
}
},
)
result = await client.get_ticker("BTC-PERPETUAL")
assert result["mark_price"] == 50000
assert result["bid"] == 49950
assert result["ask"] == 50050
# CER-003: perpetual returns conceptual greeks, not None
assert result["greeks"] == {"delta": 1.0, "gamma": 0.0, "vega": 0.0, "theta": 0.0, "rho": 0.0}
# CER-007: testnet flag present
assert result["testnet"] is True
@pytest.mark.asyncio
async def test_get_ticker_option_preserves_greeks(httpx_mock: HTTPXMock, client: DeribitClient):
real_greeks = {"delta": 0.42, "gamma": 0.001, "vega": 0.05, "theta": -0.02, "rho": 0.003}
httpx_mock.add_response(
url=re.compile(r"https://test\.deribit\.com/api/v2/public/ticker"),
json={
"result": {
"mark_price": 2500,
"last_price": 2500,
"best_bid_price": 2490,
"best_ask_price": 2510,
"instrument_name": "BTC-30APR26-75000-C",
"stats": {"volume": 5.0},
"open_interest": 100,
"greeks": real_greeks,
"mark_iv": 62.5,
}
},
)
result = await client.get_ticker("BTC-30APR26-75000-C")
assert result["greeks"] == real_greeks
assert result["mark_iv"] == 62.5
def test_is_testnet(client: DeribitClient):
info = client.is_testnet()
assert info["testnet"] is True
assert "test.deribit.com" in info["base_url"]
@pytest.mark.asyncio
async def test_get_instruments_pagination_and_filter(httpx_mock: HTTPXMock, client: DeribitClient):
items = []
for i, exp_ms in enumerate([1700000000000, 1776000000000, 1800000000000]):
items.append({
"instrument_name": f"BTC-inst-{i}",
"strike": 50000 + i * 10000,
"expiration_timestamp": exp_ms,
"option_type": "call",
"tick_size": 0.5,
"min_trade_amount": 0.1,
# CER-008: public/get_instruments non include OI in produzione
})
httpx_mock.add_response(
url=re.compile(r"https://test\.deribit\.com/api/v2/public/get_instruments"),
json={"result": items},
)
httpx_mock.add_response(
url=re.compile(r"https://test\.deribit\.com/api/v2/public/get_book_summary_by_currency"),
json={"result": [
{"instrument_name": "BTC-inst-0", "open_interest": 100.0},
{"instrument_name": "BTC-inst-1", "open_interest": 200.0},
{"instrument_name": "BTC-inst-2", "open_interest": 300.0},
]},
)
result = await client.get_instruments(
"BTC", kind="option", strike_min=55000, limit=1, offset=0
)
assert result["total"] == 2
assert len(result["instruments"]) == 1
assert result["has_more"] is True
assert result["testnet"] is True
assert result["instruments"][0]["strike"] >= 55000
# CER-008: OI merge da book_summary
assert result["instruments"][0]["open_interest"] in (200.0, 300.0)
@pytest.mark.asyncio
async def test_get_instruments_min_oi_filter(httpx_mock: HTTPXMock, client: DeribitClient):
"""CER-008: min_open_interest filtra server-side usando book_summary."""
items = [
{"instrument_name": "BTC-low-OI", "strike": 60000, "expiration_timestamp": 1800000000000,
"option_type": "call", "tick_size": 0.5, "min_trade_amount": 0.1},
{"instrument_name": "BTC-high-OI", "strike": 60000, "expiration_timestamp": 1800000000000,
"option_type": "call", "tick_size": 0.5, "min_trade_amount": 0.1},
]
httpx_mock.add_response(
url=re.compile(r"https://test\.deribit\.com/api/v2/public/get_instruments"),
json={"result": items},
)
httpx_mock.add_response(
url=re.compile(r"https://test\.deribit\.com/api/v2/public/get_book_summary_by_currency"),
json={"result": [
{"instrument_name": "BTC-low-OI", "open_interest": 5.0},
{"instrument_name": "BTC-high-OI", "open_interest": 500.0},
]},
)
result = await client.get_instruments("BTC", kind="option", min_open_interest=100)
assert result["total"] == 1
assert result["instruments"][0]["name"] == "BTC-high-OI"
assert result["instruments"][0]["open_interest"] == 500.0
@pytest.mark.asyncio
async def test_get_account_summary(httpx_mock: HTTPXMock, client: DeribitClient):
httpx_mock.add_response(
url=re.compile(r"https://test\.deribit\.com/api/v2/public/auth"),
json=AUTH_RESP,
)
httpx_mock.add_response(
url=re.compile(r"https://test\.deribit\.com/api/v2/private/get_account_summary"),
json={"result": {"equity": 1000.0, "balance": 900.0, "currency": "USDC",
"margin_balance": 800.0, "available_funds": 700.0,
"unrealized_pnl": 50.0, "total_pnl": 100.0}},
)
result = await client.get_account_summary("USDC")
assert result["equity"] == 1000.0
assert result["balance"] == 900.0
@pytest.mark.asyncio
async def test_place_order(httpx_mock: HTTPXMock, client: DeribitClient):
httpx_mock.add_response(
url=re.compile(r"https://test\.deribit\.com/api/v2/public/auth"),
json=AUTH_RESP,
)
httpx_mock.add_response(
url=re.compile(r"https://test\.deribit\.com/api/v2/private/buy"),
json={"result": {"order": {"order_id": "abc", "amount": 10, "order_state": "open"}, "trades": []}},
)
result = await client.place_order(
instrument_name="BTC-PERPETUAL",
side="buy",
amount=10,
type="limit",
price=50000,
)
assert result["order"]["order_id"] == "abc"
@pytest.mark.asyncio
async def test_get_positions(httpx_mock: HTTPXMock, client: DeribitClient):
httpx_mock.add_response(
url=re.compile(r"https://test\.deribit\.com/api/v2/public/auth"),
json=AUTH_RESP,
)
httpx_mock.add_response(
url=re.compile(r"https://test\.deribit\.com/api/v2/private/get_positions"),
json={"result": [
{
"instrument_name": "BTC-PERPETUAL",
"size": 100.0,
"average_price": 48000.0,
"mark_price": 50000.0,
"floating_profit_loss": 200.0,
"realized_profit_loss": 50.0,
"leverage": 10,
}
]},
)
result = await client.get_positions("USDC")
assert len(result) == 1
assert result[0]["instrument"] == "BTC-PERPETUAL"
assert result[0]["direction"] == "long"
@pytest.mark.asyncio
async def test_get_dvol(httpx_mock: HTTPXMock, client: DeribitClient):
httpx_mock.add_response(
url=re.compile(r"https://test\.deribit\.com/api/v2/public/get_volatility_index_data"),
json={
"result": {
"data": [
[1700000000000, 55.0, 58.0, 54.0, 57.0],
[1700086400000, 57.0, 60.0, 56.0, 59.5],
],
"continuation": None,
}
},
)
result = await client.get_dvol("btc", "2024-01-01", "2024-01-02", "1D")
assert result["currency"] == "BTC"
assert result["latest"] == 59.5
assert len(result["candles"]) == 2
assert result["candles"][0]["close"] == 57.0
@pytest.mark.asyncio
async def test_place_combo_order(httpx_mock: HTTPXMock, client: DeribitClient):
httpx_mock.add_response(
url=re.compile(r"https://test\.deribit\.com/api/v2/public/auth"),
json=AUTH_RESP,
)
httpx_mock.add_response(
url=re.compile(r"https://test\.deribit\.com/api/v2/private/create_combo"),
json={
"result": {
"id": "BTC-COMBO-1",
"instrument_name": "BTC-COMBO-1",
"state": "active",
}
},
)
httpx_mock.add_response(
url=re.compile(r"https://test\.deribit\.com/api/v2/private/buy"),
json={
"result": {
"order": {"order_id": "ord-1", "order_state": "open"},
"trades": [],
}
},
)
legs = [
{"instrument_name": "BTC-30APR26-75000-C", "direction": "buy", "ratio": 1},
{"instrument_name": "BTC-30APR26-80000-C", "direction": "sell", "ratio": 1},
]
result = await client.place_combo_order(
legs=legs,
side="buy",
amount=1,
type="limit",
price=0.05,
label="vert-spread",
)
assert result["combo_instrument"] == "BTC-COMBO-1"
assert result["order"]["order_id"] == "ord-1"
@pytest.mark.asyncio
async def test_place_combo_order_create_combo_error(httpx_mock: HTTPXMock, client: DeribitClient):
httpx_mock.add_response(
url=re.compile(r"https://test\.deribit\.com/api/v2/public/auth"),
json=AUTH_RESP,
)
httpx_mock.add_response(
url=re.compile(r"https://test\.deribit\.com/api/v2/private/create_combo"),
json={"error": {"code": -32602, "message": "Invalid leg"}},
)
result = await client.place_combo_order(
legs=[{"instrument_name": "BTC-X", "direction": "buy", "ratio": 1}],
side="buy",
amount=1,
type="market",
)
assert result["state"] == "error"
assert "Invalid leg" in str(result.get("error"))
@pytest.mark.asyncio
async def test_cancel_order(httpx_mock: HTTPXMock, client: DeribitClient):
httpx_mock.add_response(
url=re.compile(r"https://test\.deribit\.com/api/v2/public/auth"),
json=AUTH_RESP,
)
httpx_mock.add_response(
url=re.compile(r"https://test\.deribit\.com/api/v2/private/cancel"),
json={"result": {"order_id": "abc123", "order_state": "cancelled"}},
)
result = await client.cancel_order("abc123")
assert result["order_id"] == "abc123"
assert result["state"] == "cancelled"
@@ -1,71 +0,0 @@
"""CER-P5-010 env validation tests."""
from __future__ import annotations
import pytest
from mcp_deribit.env_validation import (
MissingEnvError,
fail_fast_if_missing,
optional_env,
require_env,
summarize,
)
def test_require_env_present(monkeypatch):
monkeypatch.setenv("FOO_KEY", "value1")
assert require_env("FOO_KEY") == "value1"
def test_require_env_missing_raises(monkeypatch):
monkeypatch.delenv("MISSING_REQ", raising=False)
with pytest.raises(MissingEnvError):
require_env("MISSING_REQ", "critical path")
def test_require_env_empty_raises(monkeypatch):
monkeypatch.setenv("EMPTY_REQ", "")
with pytest.raises(MissingEnvError):
require_env("EMPTY_REQ")
def test_require_env_whitespace_only_raises(monkeypatch):
monkeypatch.setenv("WS_REQ", " ")
with pytest.raises(MissingEnvError):
require_env("WS_REQ")
def test_optional_env_default(monkeypatch):
monkeypatch.delenv("OPT_A", raising=False)
assert optional_env("OPT_A", default="fallback") == "fallback"
def test_optional_env_set(monkeypatch):
monkeypatch.setenv("OPT_B", "xx")
assert optional_env("OPT_B", default="fallback") == "xx"
def test_fail_fast_all_present(monkeypatch):
monkeypatch.setenv("AA", "1")
monkeypatch.setenv("BB", "2")
fail_fast_if_missing(["AA", "BB"]) # no exit
def test_fail_fast_missing_exits(monkeypatch):
monkeypatch.setenv("HAVE_IT", "1")
monkeypatch.delenv("MISSING_X", raising=False)
with pytest.raises(SystemExit) as exc:
fail_fast_if_missing(["HAVE_IT", "MISSING_X"])
assert exc.value.code == 2
def test_summarize_does_not_leak_secrets(monkeypatch, caplog):
import logging
monkeypatch.setenv("API_KEY_FOO", "super-secret-token-123456")
monkeypatch.setenv("PORT", "9000")
with caplog.at_level(logging.INFO, logger="mcp_deribit.env_validation"):
summarize(["API_KEY_FOO", "PORT", "NOT_SET_XYZ"])
log_text = "\n".join(caplog.messages)
assert "super-secret-token-123456" not in log_text
assert "9000" in log_text
assert "<unset>" in log_text
@@ -1,77 +0,0 @@
from __future__ import annotations
from unittest.mock import AsyncMock
from fastapi.testclient import TestClient
from mcp_common.auth import Principal, TokenStore
from mcp_common.environment import EnvironmentInfo
from mcp_deribit.server import create_app
def _make_app(env_info, creds):
c = AsyncMock()
c.set_leverage = AsyncMock(return_value={"state": "ok"})
store = TokenStore(tokens={
"ct": Principal("core", {"core"}),
"ot": Principal("observer", {"observer"}),
})
return create_app(client=c, token_store=store, creds=creds, env_info=env_info)
def test_environment_info_full_shape():
env = EnvironmentInfo(
exchange="deribit",
environment="testnet",
source="env",
env_value="true",
base_url="https://test.deribit.com/api/v2",
)
app = _make_app(env, creds={"max_leverage": 3})
c = TestClient(app)
r = c.post(
"/tools/environment_info",
headers={"Authorization": "Bearer ot"},
)
assert r.status_code == 200
body = r.json()
assert body["exchange"] == "deribit"
assert body["environment"] == "testnet"
assert body["source"] == "env"
assert body["env_value"] == "true"
assert body["base_url"] == "https://test.deribit.com/api/v2"
assert body["max_leverage"] == 3
def test_environment_info_default_source():
env = EnvironmentInfo(
exchange="deribit",
environment="testnet",
source="default",
env_value=None,
base_url="https://test.deribit.com/api/v2",
)
app = _make_app(env, creds={"max_leverage": 1})
c = TestClient(app)
r = c.post(
"/tools/environment_info",
headers={"Authorization": "Bearer ct"},
)
assert r.status_code == 200
body = r.json()
assert body["source"] == "default"
assert body["env_value"] is None
assert body["max_leverage"] == 1
def test_environment_info_requires_auth():
env = EnvironmentInfo(
exchange="deribit",
environment="testnet",
source="default",
env_value=None,
base_url="https://test.deribit.com/api/v2",
)
app = _make_app(env, creds={"max_leverage": 3})
c = TestClient(app)
r = c.post("/tools/environment_info")
assert r.status_code == 401
@@ -1,50 +0,0 @@
from __future__ import annotations
import pytest
from fastapi import HTTPException
from mcp_deribit.leverage_cap import enforce_leverage, get_max_leverage
def test_get_max_leverage_returns_creds_value():
creds = {"max_leverage": 5}
assert get_max_leverage(creds) == 5
def test_get_max_leverage_default_when_missing():
"""Default 1 (cash) se il secret non ha max_leverage."""
assert get_max_leverage({}) == 1
def test_enforce_leverage_pass_under_cap():
creds = {"max_leverage": 3}
enforce_leverage(2, creds=creds, exchange="deribit") # no raise
def test_enforce_leverage_pass_at_cap():
creds = {"max_leverage": 3}
enforce_leverage(3, creds=creds, exchange="deribit") # no raise
def test_enforce_leverage_reject_over_cap():
creds = {"max_leverage": 3}
with pytest.raises(HTTPException) as exc:
enforce_leverage(10, creds=creds, exchange="deribit")
assert exc.value.status_code == 403
assert exc.value.detail["error"] == "LEVERAGE_CAP_EXCEEDED"
assert exc.value.detail["exchange"] == "deribit"
assert exc.value.detail["requested"] == 10
assert exc.value.detail["max"] == 3
def test_enforce_leverage_reject_when_below_one():
creds = {"max_leverage": 3}
with pytest.raises(HTTPException) as exc:
enforce_leverage(0, creds=creds, exchange="deribit")
assert exc.value.status_code == 403
def test_enforce_leverage_default_when_none():
"""Se requested è None, applica il cap come default."""
creds = {"max_leverage": 3}
result = enforce_leverage(None, creds=creds, exchange="deribit")
assert result == 3
@@ -1,269 +0,0 @@
from __future__ import annotations
from unittest.mock import AsyncMock, MagicMock
import pytest
from fastapi.testclient import TestClient
from mcp_common.auth import Principal, TokenStore
from mcp_deribit.server import create_app
@pytest.fixture
def mock_client():
c = MagicMock()
c.get_ticker = AsyncMock(return_value={"mark_price": 50000})
c.get_instruments = AsyncMock(return_value=[])
c.get_orderbook = AsyncMock(return_value={"bids": [], "asks": []})
c.get_positions = AsyncMock(return_value=[])
c.get_account_summary = AsyncMock(return_value={"equity": 1000})
c.get_trade_history = AsyncMock(return_value=[])
c.get_historical = AsyncMock(return_value={"candles": []})
c.get_technical_indicators = AsyncMock(return_value={"rsi": 55.0})
c.place_order = AsyncMock(return_value={"order_id": "x"})
c.place_combo_order = AsyncMock(return_value={"combo_instrument": "BTC-COMBO-1", "order": {"order_id": "x"}})
c.get_dealer_gamma_profile = AsyncMock(return_value={"by_strike": [], "total_net_dealer_gamma": 0})
c.get_vanna_charm = AsyncMock(return_value={"total_vanna": 0, "total_charm": 0, "legs_analyzed": 0})
c.get_oi_weighted_skew = AsyncMock(return_value={"skew": 0, "call_iv_weighted": None, "put_iv_weighted": None})
c.get_smile_asymmetry = AsyncMock(return_value={"atm_iv": 0.5, "asymmetry": 0.0})
c.get_atm_vs_wings_vol = AsyncMock(return_value={"atm_iv": 0.5, "wing_richness": 0.0})
c.get_orderbook_imbalance = AsyncMock(return_value={"imbalance_ratio": 0.0, "microprice": 50000})
c.cancel_order = AsyncMock(return_value={"order_id": "x", "state": "cancelled"})
c.set_stop_loss = AsyncMock(return_value={"order_id": "x", "stop_price": 45000})
c.set_take_profit = AsyncMock(return_value={"order_id": "x", "tp_price": 55000})
c.close_position = AsyncMock(return_value={"closed": True})
c.set_leverage = AsyncMock(return_value={"state": "ok"})
return c
@pytest.fixture
def http(mock_client):
store = TokenStore(tokens={
"ct": Principal("core", {"core"}),
"ot": Principal("observer", {"observer"}),
})
app = create_app(client=mock_client, token_store=store, creds={"max_leverage": 3})
return TestClient(app)
def test_health(http):
assert http.get("/health").status_code == 200
def test_get_ticker_core_ok(http):
r = http.post(
"/tools/get_ticker",
headers={"Authorization": "Bearer ct"},
json={"instrument_name": "BTC-PERPETUAL"},
)
assert r.status_code == 200
assert r.json()["mark_price"] == 50000
def test_get_ticker_observer_ok(http):
r = http.post(
"/tools/get_ticker",
headers={"Authorization": "Bearer ot"},
json={"instrument_name": "BTC-PERPETUAL"},
)
assert r.status_code == 200
def test_get_ticker_no_auth_401(http):
r = http.post("/tools/get_ticker", json={"instrument_name": "BTC-PERPETUAL"})
assert r.status_code == 401
def test_get_ticker_alias_instrument_ok(http, mock_client):
r = http.post(
"/tools/get_ticker",
headers={"Authorization": "Bearer ct"},
json={"instrument": "ETH"},
)
assert r.status_code == 200
mock_client.get_ticker.assert_awaited_with("ETH")
def test_place_order_core_ok(http):
r = http.post(
"/tools/place_order",
headers={"Authorization": "Bearer ct"},
json={"instrument_name": "BTC-PERPETUAL", "side": "buy", "amount": 10},
)
assert r.status_code == 200
def test_place_order_observer_forbidden(http):
r = http.post(
"/tools/place_order",
headers={"Authorization": "Bearer ot"},
json={"instrument_name": "BTC-PERPETUAL", "side": "buy", "amount": 10},
)
assert r.status_code == 403
def test_get_orderbook_imbalance_observer_ok(http):
r = http.post(
"/tools/get_orderbook_imbalance",
headers={"Authorization": "Bearer ot"},
json={"instrument_name": "BTC-PERPETUAL"},
)
assert r.status_code == 200
@pytest.mark.parametrize("path", [
"/tools/get_dealer_gamma_profile",
"/tools/get_vanna_charm",
"/tools/get_oi_weighted_skew",
"/tools/get_smile_asymmetry",
"/tools/get_atm_vs_wings_vol",
])
def test_option_flow_indicators_observer_ok(http, path):
r = http.post(path, headers={"Authorization": "Bearer ot"}, json={"currency": "BTC"})
assert r.status_code == 200, (path, r.text)
@pytest.mark.parametrize("path", [
"/tools/get_dealer_gamma_profile",
"/tools/get_vanna_charm",
"/tools/get_oi_weighted_skew",
"/tools/get_smile_asymmetry",
"/tools/get_atm_vs_wings_vol",
])
def test_option_flow_indicators_no_auth_401(http, path):
r = http.post(path, json={"currency": "BTC"})
assert r.status_code == 401, (path, r.text)
def test_place_combo_order_core_ok(http):
r = http.post(
"/tools/place_combo_order",
headers={"Authorization": "Bearer ct"},
json={
"legs": [
{"instrument_name": "BTC-30APR26-75000-C", "direction": "buy", "ratio": 1},
{"instrument_name": "BTC-30APR26-80000-C", "direction": "sell", "ratio": 1},
],
"side": "buy",
"amount": 1,
"type": "limit",
"price": 0.05,
},
)
assert r.status_code == 200
assert r.json()["combo_instrument"] == "BTC-COMBO-1"
def test_place_combo_order_observer_forbidden(http):
r = http.post(
"/tools/place_combo_order",
headers={"Authorization": "Bearer ot"},
json={
"legs": [
{"instrument_name": "BTC-X", "direction": "buy", "ratio": 1},
{"instrument_name": "BTC-Y", "direction": "sell", "ratio": 1},
],
"side": "buy",
"amount": 1,
},
)
assert r.status_code == 403
def test_place_combo_order_min_legs(http):
r = http.post(
"/tools/place_combo_order",
headers={"Authorization": "Bearer ct"},
json={
"legs": [{"instrument_name": "BTC-X", "direction": "buy", "ratio": 1}],
"side": "buy",
"amount": 1,
},
)
assert r.status_code == 422
def test_place_combo_order_leverage_cap_enforced(http):
r = http.post(
"/tools/place_combo_order",
headers={"Authorization": "Bearer ct"},
json={
"legs": [
{"instrument_name": "BTC-X", "direction": "buy", "ratio": 1},
{"instrument_name": "BTC-Y", "direction": "sell", "ratio": 1},
],
"side": "buy",
"amount": 1,
"leverage": 50,
},
)
assert r.status_code == 403
err = r.json()["error"]
assert err["code"] == "LEVERAGE_CAP_EXCEEDED"
def test_place_order_leverage_cap_enforced(http):
"""Reject leverage > max_leverage (da secret, default 3)."""
r = http.post(
"/tools/place_order",
headers={"Authorization": "Bearer ct"},
json={
"instrument_name": "BTC-PERPETUAL",
"side": "buy",
"amount": 50,
"leverage": 50,
},
)
assert r.status_code == 403
body = r.json()
err = body["error"]
assert err["code"] == "LEVERAGE_CAP_EXCEEDED"
details = err["details"]
assert details["exchange"] == "deribit"
assert details["requested"] == 50
assert details["max"] == 3
def test_close_position_core_ok(http):
r = http.post(
"/tools/close_position",
headers={"Authorization": "Bearer ct"},
json={"instrument_name": "BTC-PERPETUAL"},
)
assert r.status_code == 200
def test_close_position_observer_forbidden(http):
r = http.post(
"/tools/close_position",
headers={"Authorization": "Bearer ot"},
json={"instrument_name": "BTC-PERPETUAL"},
)
assert r.status_code == 403
def test_cancel_order_observer_forbidden(http):
r = http.post(
"/tools/cancel_order",
headers={"Authorization": "Bearer ot"},
json={"order_id": "abc123"},
)
assert r.status_code == 403
def test_set_stop_loss_observer_forbidden(http):
r = http.post(
"/tools/set_stop_loss",
headers={"Authorization": "Bearer ot"},
json={"order_id": "abc123", "stop_price": 45000.0},
)
assert r.status_code == 403
def test_get_account_summary_observer_ok(http):
r = http.post(
"/tools/get_account_summary",
headers={"Authorization": "Bearer ot"},
json={"currency": "USDC"},
)
assert r.status_code == 200
assert r.json()["equity"] == 1000
-29
View File
@@ -1,29 +0,0 @@
[project]
name = "mcp-hyperliquid"
version = "0.1.0"
requires-python = ">=3.11"
dependencies = [
"mcp-common",
"fastapi>=0.115",
"uvicorn[standard]>=0.30",
"httpx>=0.27",
"pydantic>=2.6",
"hyperliquid-python-sdk>=0.3",
"eth-account>=0.11",
]
[project.optional-dependencies]
dev = ["pytest>=8", "pytest-asyncio>=0.23", "pytest-httpx>=0.30"]
[build-system]
requires = ["hatchling"]
build-backend = "hatchling.build"
[tool.hatch.build.targets.wheel]
packages = ["src/mcp_hyperliquid"]
[tool.uv.sources]
mcp-common = { workspace = true }
[project.scripts]
mcp-hyperliquid = "mcp_hyperliquid.__main__:main"
@@ -1,31 +0,0 @@
from __future__ import annotations
from mcp_common.app_factory import ExchangeAppSpec, run_exchange_main
from mcp_hyperliquid.client import HyperliquidClient
from mcp_hyperliquid.server import create_app
SPEC = ExchangeAppSpec(
exchange="hyperliquid",
creds_env_var="HYPERLIQUID_WALLET_FILE",
env_var="HYPERLIQUID_TESTNET",
flag_key="testnet",
default_base_url_live="https://api.hyperliquid.xyz",
default_base_url_testnet="https://api.hyperliquid-testnet.xyz",
default_port=9012,
build_client=lambda creds, env_info: HyperliquidClient(
wallet_address=creds["wallet_address"],
private_key=creds["private_key"],
testnet=(env_info.environment == "testnet"),
api_wallet_address=creds.get("api_wallet_address"),
),
build_app=create_app,
)
def main():
run_exchange_main(SPEC)
if __name__ == "__main__":
main()
@@ -1,577 +0,0 @@
"""Hyperliquid REST API client for perpetual futures trading."""
from __future__ import annotations
import asyncio
import datetime as _dt
from typing import Any
from mcp_common import indicators as ind
from mcp_common.http import async_client
BASE_LIVE = "https://api.hyperliquid.xyz"
BASE_TESTNET = "https://api.hyperliquid-testnet.xyz"
RESOLUTION_MAP = {
"1m": "1m",
"5m": "5m",
"15m": "15m",
"1h": "1h",
"4h": "4h",
"1d": "1d",
}
try:
from eth_account import Account
from hyperliquid.exchange import Exchange
from hyperliquid.utils import constants as hl_constants
_SDK_AVAILABLE = True
except ImportError: # pragma: no cover
_SDK_AVAILABLE = False
def _to_ms(date_str: str) -> int:
try:
dt = _dt.datetime.fromisoformat(date_str)
except ValueError:
dt = _dt.datetime.strptime(date_str, "%Y-%m-%d")
return int(dt.timestamp() * 1000)
class HyperliquidClient:
"""Async client for the Hyperliquid API.
Read operations use direct HTTP calls via httpx against /info.
Write operations delegate to hyperliquid-python-sdk for EIP-712 signing.
"""
def __init__(
self,
wallet_address: str,
private_key: str,
testnet: bool = True,
api_wallet_address: str | None = None,
):
self.wallet_address = wallet_address
self.private_key = private_key
self.testnet = testnet
self.api_wallet_address = api_wallet_address or wallet_address
self.base_url = BASE_TESTNET if testnet else BASE_LIVE
self._exchange: Any | None = None
# ── SDK exchange (lazy) ────────────────────────────────────
def _get_exchange(self) -> Any:
"""Return (and cache) an SDK Exchange instance for write ops."""
if not _SDK_AVAILABLE:
raise RuntimeError(
"hyperliquid-python-sdk is not installed; write operations unavailable."
)
if self._exchange is None:
account = Account.from_key(self.private_key)
base_url = (
hl_constants.TESTNET_API_URL if self.testnet else hl_constants.MAINNET_API_URL
)
empty_spot_meta: dict[str, Any] = {"universe": [], "tokens": []}
self._exchange = Exchange(
account,
base_url,
account_address=self.wallet_address,
spot_meta=empty_spot_meta,
)
return self._exchange
# ── Internal helpers ───────────────────────────────────────
async def _post(self, payload: dict[str, Any]) -> Any:
"""POST JSON to the /info endpoint."""
async with async_client(timeout=15.0) as http:
resp = await http.post(f"{self.base_url}/info", json=payload)
resp.raise_for_status()
return resp.json()
@staticmethod
async def _run_sync(func: Any, *args: Any, **kwargs: Any) -> Any:
"""Run a synchronous SDK call in the default executor."""
loop = asyncio.get_event_loop()
return await loop.run_in_executor(None, lambda: func(*args, **kwargs))
# ── Read tools ─────────────────────────────────────────────
async def get_markets(self) -> list[dict[str, Any]]:
"""List all perp markets with metadata and current stats."""
data = await self._post({"type": "metaAndAssetCtxs"})
universe = data[0]["universe"]
ctx_list = data[1]
markets = []
for meta, ctx in zip(universe, ctx_list, strict=True):
markets.append(
{
"asset": meta["name"],
"mark_price": float(ctx.get("markPx", 0)),
"funding_rate": float(ctx.get("funding", 0)),
"open_interest": float(ctx.get("openInterest", 0)),
"volume_24h": float(ctx.get("dayNtlVlm", 0)),
"max_leverage": int(meta.get("maxLeverage", 1)),
}
)
return markets
async def get_ticker(self, instrument: str) -> dict[str, Any]:
"""Get ticker information for a specific asset."""
markets = await self.get_markets()
for m in markets:
if m["asset"].upper() == instrument.upper():
return {
"asset": m["asset"],
"mark_price": m["mark_price"],
"mid_price": m["mark_price"],
"funding_rate": m["funding_rate"],
"open_interest": m["open_interest"],
"volume_24h": m["volume_24h"],
"premium": 0.0,
}
return {"error": f"Asset {instrument} not found"}
async def get_orderbook(self, instrument: str, depth: int = 10) -> dict[str, Any]:
"""Get L2 order book for an asset."""
data = await self._post({"type": "l2Book", "coin": instrument.upper()})
levels = data.get("levels", [[], []])
bids = [{"price": float(b["px"]), "size": float(b["sz"])} for b in levels[0][:depth]]
asks = [{"price": float(a["px"]), "size": float(a["sz"])} for a in levels[1][:depth]]
return {"asset": instrument, "bids": bids, "asks": asks}
async def get_positions(self) -> list[dict[str, Any]]:
"""Get open positions for the wallet."""
data = await self._post(
{"type": "clearinghouseState", "user": self.wallet_address}
)
positions = []
for ap in data.get("assetPositions", []):
pos = ap.get("position", {})
size = float(pos.get("szi", 0))
if size == 0:
continue
leverage_data = pos.get("leverage", {})
lev_value = (
leverage_data.get("value", "1")
if isinstance(leverage_data, dict)
else str(leverage_data)
)
positions.append(
{
"asset": pos.get("coin", ""),
"size": abs(size),
"direction": "long" if size > 0 else "short",
"entry_price": float(pos.get("entryPx", 0) or 0),
"unrealized_pnl": float(pos.get("unrealizedPnl", 0)),
"leverage": float(lev_value),
"liquidation_price": float(pos.get("liquidationPx", 0) or 0),
}
)
return positions
async def get_account_summary(self) -> dict[str, Any]:
"""Get account summary (equity, balance, margin) including spot balances.
Con Unified Account, spot USDC e perps condividono collaterale.
`spot_fetch_ok` / `perps_fetch_ok` indicano se i due lati sono stati
letti correttamente: se uno dei due è False il chiamante dovrebbe
considerare `equity`/`available_balance` un lower bound.
"""
perps_fetch_ok = True
perps_equity = 0.0
perps_available = 0.0
margin_used = 0.0
unrealized_pnl = 0.0
try:
data = await self._post(
{"type": "clearinghouseState", "user": self.wallet_address}
)
margin = data.get("marginSummary") or {}
perps_equity = float(margin.get("accountValue", 0) or 0)
perps_available = float(margin.get("totalRawUsd", 0) or 0)
margin_used = float(margin.get("totalMarginUsed", 0) or 0)
unrealized_pnl = float(margin.get("totalNtlPos", 0) or 0)
except Exception:
perps_fetch_ok = False
spot_fetch_ok = True
spot_usdc = 0.0
try:
spot_data = await self._post(
{"type": "spotClearinghouseState", "user": self.wallet_address}
)
for b in spot_data.get("balances", []) or []:
if b.get("coin") == "USDC":
spot_usdc = float(b.get("total", 0) or 0)
except Exception:
spot_fetch_ok = False
total_equity = perps_equity + spot_usdc
total_available = perps_available + spot_usdc
return {
"equity": total_equity,
"perps_equity": perps_equity,
"perps_available": perps_available,
"spot_usdc": spot_usdc,
"available_balance": total_available,
"margin_used": margin_used,
"unrealized_pnl": unrealized_pnl,
"perps_fetch_ok": perps_fetch_ok,
"spot_fetch_ok": spot_fetch_ok,
}
async def get_trade_history(self, limit: int = 100) -> list[dict[str, Any]]:
"""Get recent trade fills."""
data = await self._post({"type": "userFills", "user": self.wallet_address})
trades = []
for t in data[:limit]:
trades.append(
{
"asset": t.get("coin", ""),
"side": t.get("side", ""),
"size": float(t.get("sz", 0)),
"price": float(t.get("px", 0)),
"fee": float(t.get("fee", 0)),
"timestamp": t.get("time", ""),
}
)
return trades
async def get_historical(
self, instrument: str, start_date: str, end_date: str, resolution: str = "1h"
) -> dict[str, Any]:
"""Get OHLCV candles for an asset."""
start_ms = _to_ms(start_date)
end_ms = _to_ms(end_date)
interval = RESOLUTION_MAP.get(resolution, resolution)
data = await self._post(
{
"type": "candleSnapshot",
"req": {
"coin": instrument.upper(),
"interval": interval,
"startTime": start_ms,
"endTime": end_ms,
},
}
)
candles = []
for c in data:
candles.append(
{
"timestamp": c.get("t", 0),
"open": float(c.get("o", 0)),
"high": float(c.get("h", 0)),
"low": float(c.get("l", 0)),
"close": float(c.get("c", 0)),
"volume": float(c.get("v", 0)),
}
)
return {"candles": candles}
async def get_open_orders(self) -> list[dict[str, Any]]:
"""Get all open orders for the wallet."""
data = await self._post({"type": "openOrders", "user": self.wallet_address})
orders = []
for o in data:
orders.append(
{
"oid": o.get("oid"),
"asset": o.get("coin", ""),
"side": o.get("side", ""),
"size": float(o.get("sz", 0)),
"price": float(o.get("limitPx", 0)),
"order_type": o.get("orderType", ""),
}
)
return orders
async def basis_spot_perp(self, asset: str) -> dict[str, Any]:
asset = asset.upper()
# Spot reference price from Coinbase (mainnet reference, anche se HL è testnet)
spot_price: float | None = None
spot_source = "coinbase"
try:
async with async_client(timeout=8.0) as c:
resp = await c.get(f"https://api.coinbase.com/v2/prices/{asset}-USD/spot")
if resp.status_code == 200:
spot_price = float(resp.json().get("data", {}).get("amount"))
except Exception:
spot_price = None
if spot_price is None:
try:
async with async_client(timeout=8.0) as c:
resp = await c.get(
"https://api.kraken.com/0/public/Ticker", params={"pair": f"{asset}USD"}
)
if resp.status_code == 200:
res = resp.json().get("result") or {}
first = next(iter(res.values()), {})
price = (first.get("c") or [None])[0]
spot_price = float(price) if price else None
spot_source = "kraken"
except Exception:
pass
# Perp price + funding from HL
try:
ctx = await self._post({"type": "metaAndAssetCtxs"})
universe = ctx[0]["universe"]
ctx_list = ctx[1]
perp_price = None
funding = None
for meta, c in zip(universe, ctx_list, strict=True):
if meta["name"].upper() == asset:
perp_price = float(c.get("markPx", 0))
funding = float(c.get("funding", 0))
break
except Exception:
perp_price = None
funding = None
if spot_price is None or perp_price is None:
return {
"asset": asset,
"spot_price": spot_price,
"perp_price": perp_price,
"error": "missing spot or perp price",
}
basis_abs = perp_price - spot_price
basis_pct = round(basis_abs / spot_price * 100, 4)
basis_ann_funding = (
round(funding * 24 * 365 * 100, 2) if funding is not None else None
)
carry_opp = bool(
basis_ann_funding is not None
and basis_ann_funding > 5
and (funding or 0) > 0.0001
)
return {
"asset": asset,
"spot_price": spot_price,
"spot_source": spot_source,
"perp_price": perp_price,
"basis_absolute": round(basis_abs, 4),
"basis_pct": basis_pct,
"current_funding_hourly": funding,
"basis_annualized_funding_only": basis_ann_funding,
"carry_opportunity": carry_opp,
"testnet": self.testnet,
"data_timestamp": _dt.datetime.now(_dt.UTC).isoformat(),
}
async def get_funding_rate(self, instrument: str) -> dict[str, Any]:
"""Get current and recent historical funding rates for an asset."""
data = await self._post({"type": "metaAndAssetCtxs"})
universe = data[0]["universe"]
ctx_list = data[1]
current_rate = None
for meta, ctx in zip(universe, ctx_list, strict=True):
if meta["name"].upper() == instrument.upper():
current_rate = float(ctx.get("funding", 0))
break
if current_rate is None:
return {"error": f"Asset {instrument} not found"}
# Fetch funding history (last 7 days)
end_ms = int(_dt.datetime.utcnow().timestamp() * 1000)
start_ms = end_ms - 7 * 24 * 3600 * 1000
history_data = await self._post(
{
"type": "fundingHistory",
"coin": instrument.upper(),
"startTime": start_ms,
"endTime": end_ms,
}
)
history = []
for entry in history_data:
history.append(
{
"timestamp": entry.get("time", 0),
"funding_rate": float(entry.get("fundingRate", 0)),
}
)
return {
"asset": instrument,
"current_funding_rate": current_rate,
"history": history,
}
async def get_indicators(
self,
instrument: str,
indicators: list[str],
start_date: str,
end_date: str,
resolution: str = "1h",
) -> dict[str, Any]:
"""Compute technical indicators over OHLCV data."""
historical = await self.get_historical(instrument, start_date, end_date, resolution)
candles = historical.get("candles", [])
closes = [c["close"] for c in candles]
highs = [c["high"] for c in candles]
lows = [c["low"] for c in candles]
result: dict[str, Any] = {}
for indicator in indicators:
name = indicator.lower()
if name == "sma":
result["sma"] = ind.sma(closes, 20)
elif name == "rsi":
result["rsi"] = ind.rsi(closes)
elif name == "atr":
result["atr"] = ind.atr(highs, lows, closes)
elif name == "macd":
result["macd"] = ind.macd(closes)
elif name == "adx":
result["adx"] = ind.adx(highs, lows, closes)
else:
result[name] = None
return result
# ── Write tools (via SDK) ──────────────────────────────────
async def place_order(
self,
instrument: str,
side: str,
amount: float,
type: str = "limit",
price: float | None = None,
reduce_only: bool = False,
) -> dict[str, Any]:
"""Place an order on Hyperliquid using the SDK for EIP-712 signing."""
exchange = self._get_exchange()
is_buy = side.lower() in ("buy", "long")
coin = instrument.upper()
if type == "market":
ot: dict[str, Any] = {"limit": {"tif": "Ioc"}}
if price is None:
ticker = await self.get_ticker(coin)
mark = ticker.get("mark_price", 0)
price = round(mark * 1.03, 1) if is_buy else round(mark * 0.97, 1)
elif type in ("stop_market", "stop_loss"):
ot = {"trigger": {"triggerPx": float(price), "isMarket": True, "tpsl": "sl"}}
elif type == "take_profit":
ot = {"trigger": {"triggerPx": float(price), "isMarket": True, "tpsl": "tp"}}
else:
ot = {"limit": {"tif": "Gtc"}}
if price is None:
return {"error": "price is required for limit orders"}
result = await self._run_sync(
exchange.order, coin, is_buy, amount, price, ot, reduce_only
)
status = result.get("status", "unknown")
response = result.get("response", {})
if isinstance(response, str):
return {
"status": status,
"error": response,
"order_id": "",
"filled_size": 0,
"avg_fill_price": 0,
}
statuses = response.get("data", {}).get("statuses", [{}])
first = statuses[0] if statuses else {}
if isinstance(first, str):
return {
"status": status,
"error": first,
"order_id": "",
"filled_size": 0,
"avg_fill_price": 0,
}
return {
"order_id": first.get("resting", {}).get(
"oid", first.get("filled", {}).get("oid", "")
),
"status": status,
"filled_size": float(first.get("filled", {}).get("totalSz", 0)),
"avg_fill_price": float(first.get("filled", {}).get("avgPx", 0)),
}
async def cancel_order(self, order_id: str, instrument: str) -> dict[str, Any]:
"""Cancel an existing order using the SDK."""
exchange = self._get_exchange()
result = await self._run_sync(
exchange.cancel, instrument.upper(), int(order_id)
)
status = result.get("status", "unknown")
response = result.get("response", "")
if isinstance(response, str) and status == "err":
return {"order_id": order_id, "status": status, "error": response}
return {"order_id": order_id, "status": status}
async def set_stop_loss(
self, instrument: str, stop_price: float, size: float
) -> dict[str, Any]:
"""Set a stop-loss trigger order."""
# Determine direction by checking open position
positions = await self.get_positions()
direction = "sell" # default: assume long
for pos in positions:
if pos["asset"].upper() == instrument.upper():
direction = "sell" if pos["direction"] == "long" else "buy"
if size == 0:
size = pos["size"]
break
return await self.place_order(
instrument=instrument,
side=direction,
amount=size,
type="stop_loss",
price=stop_price,
reduce_only=True,
)
async def set_take_profit(
self, instrument: str, tp_price: float, size: float
) -> dict[str, Any]:
"""Set a take-profit trigger order."""
positions = await self.get_positions()
direction = "sell" # default: assume long
for pos in positions:
if pos["asset"].upper() == instrument.upper():
direction = "sell" if pos["direction"] == "long" else "buy"
if size == 0:
size = pos["size"]
break
return await self.place_order(
instrument=instrument,
side=direction,
amount=size,
type="take_profit",
price=tp_price,
reduce_only=True,
)
async def close_position(self, instrument: str) -> dict[str, Any]:
"""Close an open position for the given asset using market_close."""
exchange = self._get_exchange()
try:
result = await self._run_sync(exchange.market_close, instrument.upper())
return {
"status": result.get("status", "unknown"),
"asset": instrument,
}
except Exception as exc:
return {"error": str(exc), "asset": instrument}
async def health(self) -> dict[str, Any]:
"""Health check — ping /info for server status."""
try:
await self._post({"type": "meta"})
return {"status": "ok", "testnet": self.testnet}
except Exception as exc:
return {"status": "error", "error": str(exc)}
@@ -1,56 +0,0 @@
"""Leverage cap server-side per place_order.
Cap letto dal secret JSON via campo `max_leverage`. Default 1 (cash) se assente.
"""
from __future__ import annotations
from fastapi import HTTPException
def get_max_leverage(creds: dict) -> int:
"""Legge max_leverage dal secret. Default 1 se mancante."""
raw = creds.get("max_leverage", 1)
try:
value = int(raw)
except (TypeError, ValueError):
value = 1
return max(1, value)
def enforce_leverage(
requested: int | float | None,
*,
creds: dict,
exchange: str,
) -> int:
"""Verifica e applica leverage cap. Ritorna leverage applicabile.
Solleva HTTPException(403, LEVERAGE_CAP_EXCEEDED) se requested > cap.
Se requested is None, applica il cap come default.
"""
cap = get_max_leverage(creds)
if requested is None:
return cap
lev = int(requested)
if lev < 1:
raise HTTPException(
status_code=403,
detail={
"error": "LEVERAGE_CAP_EXCEEDED",
"exchange": exchange,
"requested": lev,
"max": cap,
"reason": "leverage must be >= 1",
},
)
if lev > cap:
raise HTTPException(
status_code=403,
detail={
"error": "LEVERAGE_CAP_EXCEEDED",
"exchange": exchange,
"requested": lev,
"max": cap,
},
)
return lev
@@ -1,408 +0,0 @@
from __future__ import annotations
import os
from fastapi import Depends, FastAPI, HTTPException
from mcp_common.audit import audit_write_op
from mcp_common.auth import Principal, TokenStore, require_principal
from mcp_common.environment import EnvironmentInfo
from mcp_common.mcp_bridge import mount_mcp_endpoint
from mcp_common.server import build_app
from pydantic import BaseModel, field_validator, model_validator
from mcp_hyperliquid.client import HyperliquidClient
from mcp_hyperliquid.leverage_cap import enforce_leverage as _enforce_leverage
from mcp_hyperliquid.leverage_cap import get_max_leverage
# --- Body models ---
class GetMarketsReq(BaseModel):
pass
class GetTickerReq(BaseModel):
instrument: str
class GetOrderbookReq(BaseModel):
instrument: str
depth: int = 10
class GetPositionsReq(BaseModel):
pass
class GetAccountSummaryReq(BaseModel):
pass
class GetTradeHistoryReq(BaseModel):
limit: int = 100
class GetHistoricalReq(BaseModel):
instrument: str | None = None
asset: str | None = None
start_date: str | None = None
end_date: str | None = None
resolution: str = "1h"
interval: str | None = None
limit: int = 50
model_config = {"extra": "allow"}
@model_validator(mode="after")
def _normalize(self):
from datetime import UTC, datetime, timedelta
sym = self.instrument or self.asset
if not sym:
raise ValueError("instrument (or asset) is required")
self.instrument = sym
if self.interval:
self.resolution = self.interval
if not self.end_date:
self.end_date = datetime.now(UTC).strftime("%Y-%m-%dT%H:%M:%S")
if not self.start_date:
days = max(1, self.limit // 6)
self.start_date = (
datetime.now(UTC) - timedelta(days=days)
).strftime("%Y-%m-%dT%H:%M:%S")
return self
class GetOpenOrdersReq(BaseModel):
pass
class GetFundingRateReq(BaseModel):
instrument: str
class BasisSpotPerpReq(BaseModel):
asset: str
class GetIndicatorsReq(BaseModel):
instrument: str | None = None
asset: str | None = None
indicators: list[str] = ["rsi", "atr", "macd", "adx"]
start_date: str | None = None
end_date: str | None = None
resolution: str = "1h"
interval: str | None = None
limit: int = 50
model_config = {"extra": "allow"}
@model_validator(mode="after")
def _normalize(self):
from datetime import UTC, datetime, timedelta
sym = self.instrument or self.asset
if not sym:
raise ValueError("instrument (or asset) is required")
self.instrument = sym
if self.interval:
self.resolution = self.interval
if not self.end_date:
self.end_date = datetime.now(UTC).strftime("%Y-%m-%dT%H:%M:%S")
if not self.start_date:
days = max(2, self.limit // 6)
self.start_date = (
datetime.now(UTC) - timedelta(days=days)
).strftime("%Y-%m-%dT%H:%M:%S")
return self
@field_validator("indicators", mode="before")
@classmethod
def _coerce_indicators(cls, v):
if isinstance(v, str):
import json
s = v.strip()
if s.startswith("["):
try:
parsed = json.loads(s)
if isinstance(parsed, list):
return [str(x).strip() for x in parsed if str(x).strip()]
except json.JSONDecodeError:
pass
return [x.strip() for x in s.split(",") if x.strip()]
if isinstance(v, list):
return v
raise ValueError(
"indicators must be a list like ['rsi','atr','macd'] "
"or a comma-separated string like 'rsi,atr,macd'"
)
class PlaceOrderReq(BaseModel):
instrument: str
side: str # "buy" | "sell"
amount: float
type: str = "limit"
price: float | None = None
reduce_only: bool = False
leverage: int | None = None # CER-016: None → default cap (3x)
class CancelOrderReq(BaseModel):
order_id: str
instrument: str
class SetStopLossReq(BaseModel):
instrument: str
stop_price: float
size: float
class SetTakeProfitReq(BaseModel):
instrument: str
tp_price: float
size: float
class ClosePositionReq(BaseModel):
instrument: str
# --- ACL helper ---
def _check(principal: Principal, *, core: bool = False, observer: bool = False) -> None:
allowed: set[str] = set()
if core:
allowed.add("core")
if observer:
allowed.add("observer")
if not (principal.capabilities & allowed):
raise HTTPException(403, f"capability required: {allowed}")
# --- App factory ---
def create_app(
*,
client: HyperliquidClient,
token_store: TokenStore,
creds: dict | None = None,
env_info: EnvironmentInfo | None = None,
) -> FastAPI:
creds = creds or {}
app = build_app(name="mcp-hyperliquid", version="0.1.0", token_store=token_store)
# --- Read tools: core + observer ---
@app.post("/tools/environment_info", tags=["reads"])
async def t_environment_info(principal: Principal = Depends(require_principal)):
_check(principal, core=True, observer=True)
if env_info is None:
return {
"exchange": "hyperliquid",
"environment": "testnet" if getattr(client, "testnet", True) else "mainnet",
"source": "credentials",
"env_value": None,
"base_url": getattr(client, "base_url", None),
"max_leverage": get_max_leverage(creds),
}
return {
"exchange": env_info.exchange,
"environment": env_info.environment,
"source": env_info.source,
"env_value": env_info.env_value,
"base_url": env_info.base_url,
"max_leverage": get_max_leverage(creds),
}
@app.post("/tools/get_markets", tags=["reads"])
async def t_get_markets(
body: GetMarketsReq, principal: Principal = Depends(require_principal)
):
_check(principal, core=True, observer=True)
return await client.get_markets()
@app.post("/tools/get_ticker", tags=["reads"])
async def t_get_ticker(
body: GetTickerReq, principal: Principal = Depends(require_principal)
):
_check(principal, core=True, observer=True)
return await client.get_ticker(body.instrument)
@app.post("/tools/get_orderbook", tags=["reads"])
async def t_get_orderbook(
body: GetOrderbookReq, principal: Principal = Depends(require_principal)
):
_check(principal, core=True, observer=True)
return await client.get_orderbook(body.instrument, body.depth)
@app.post("/tools/get_positions", tags=["reads"])
async def t_get_positions(
body: GetPositionsReq, principal: Principal = Depends(require_principal)
):
_check(principal, core=True, observer=True)
return await client.get_positions()
@app.post("/tools/get_account_summary", tags=["reads"])
async def t_get_account_summary(
body: GetAccountSummaryReq, principal: Principal = Depends(require_principal)
):
_check(principal, core=True, observer=True)
return await client.get_account_summary()
@app.post("/tools/get_trade_history", tags=["reads"])
async def t_get_trade_history(
body: GetTradeHistoryReq, principal: Principal = Depends(require_principal)
):
_check(principal, core=True, observer=True)
return await client.get_trade_history(body.limit)
@app.post("/tools/get_historical", tags=["reads"])
async def t_get_historical(
body: GetHistoricalReq, principal: Principal = Depends(require_principal)
):
_check(principal, core=True, observer=True)
return await client.get_historical(
body.instrument, body.start_date, body.end_date, body.resolution
)
@app.post("/tools/get_open_orders", tags=["reads"])
async def t_get_open_orders(
body: GetOpenOrdersReq, principal: Principal = Depends(require_principal)
):
_check(principal, core=True, observer=True)
return await client.get_open_orders()
@app.post("/tools/get_funding_rate", tags=["reads"])
async def t_get_funding_rate(
body: GetFundingRateReq, principal: Principal = Depends(require_principal)
):
_check(principal, core=True, observer=True)
return await client.get_funding_rate(body.instrument)
@app.post("/tools/basis_spot_perp", tags=["writes"])
async def t_basis_spot_perp(
body: BasisSpotPerpReq, principal: Principal = Depends(require_principal)
):
_check(principal, core=True, observer=True)
return await client.basis_spot_perp(body.asset)
@app.post("/tools/get_indicators", tags=["reads"])
async def t_get_indicators(
body: GetIndicatorsReq, principal: Principal = Depends(require_principal)
):
_check(principal, core=True, observer=True)
return await client.get_indicators(
body.instrument,
body.indicators,
body.start_date,
body.end_date,
body.resolution,
)
# --- Write tools: core only ---
@app.post("/tools/place_order", tags=["writes"])
async def t_place_order(
body: PlaceOrderReq, principal: Principal = Depends(require_principal)
):
_check(principal, core=True)
_enforce_leverage(body.leverage, creds=creds, exchange="hyperliquid")
result = await client.place_order(
instrument=body.instrument,
side=body.side,
amount=body.amount,
type=body.type,
price=body.price,
reduce_only=body.reduce_only,
)
audit_write_op(
principal=principal, action="place_order", exchange="hyperliquid",
target=body.instrument,
payload={"side": body.side, "amount": body.amount, "type": body.type,
"price": body.price, "reduce_only": body.reduce_only,
"leverage": body.leverage},
result=result,
)
return result
@app.post("/tools/cancel_order", tags=["writes"])
async def t_cancel_order(
body: CancelOrderReq, principal: Principal = Depends(require_principal)
):
_check(principal, core=True)
result = await client.cancel_order(body.order_id, body.instrument)
audit_write_op(
principal=principal, action="cancel_order", exchange="hyperliquid",
target=body.order_id, payload={"instrument": body.instrument}, result=result,
)
return result
@app.post("/tools/set_stop_loss", tags=["writes"])
async def t_set_sl(
body: SetStopLossReq, principal: Principal = Depends(require_principal)
):
_check(principal, core=True)
result = await client.set_stop_loss(body.instrument, body.stop_price, body.size)
audit_write_op(
principal=principal, action="set_stop_loss", exchange="hyperliquid",
target=body.instrument,
payload={"stop_price": body.stop_price, "size": body.size},
result=result,
)
return result
@app.post("/tools/set_take_profit", tags=["writes"])
async def t_set_tp(
body: SetTakeProfitReq, principal: Principal = Depends(require_principal)
):
_check(principal, core=True)
result = await client.set_take_profit(body.instrument, body.tp_price, body.size)
audit_write_op(
principal=principal, action="set_take_profit", exchange="hyperliquid",
target=body.instrument,
payload={"tp_price": body.tp_price, "size": body.size},
result=result,
)
return result
@app.post("/tools/close_position", tags=["writes"])
async def t_close_position(
body: ClosePositionReq, principal: Principal = Depends(require_principal)
):
_check(principal, core=True)
result = await client.close_position(body.instrument)
audit_write_op(
principal=principal, action="close_position", exchange="hyperliquid",
target=body.instrument, payload={}, result=result,
)
return result
# ───── MCP endpoint (/mcp) — bridge verso /tools/* ─────
port = int(os.environ.get("PORT", "9012"))
mount_mcp_endpoint(
app,
name="cerbero-hyperliquid",
version="0.1.0",
token_store=token_store,
internal_base_url=f"http://localhost:{port}",
tools=[
{"name": "environment_info", "description": "Ambiente operativo (testnet/mainnet), source, base_url, max_leverage cap."},
{"name": "get_markets", "description": "Lista mercati perp disponibili."},
{"name": "get_ticker", "description": "Ticker di un perp."},
{"name": "get_orderbook", "description": "Orderbook L2."},
{"name": "get_positions", "description": "Posizioni aperte."},
{"name": "get_account_summary", "description": "Account summary (spot + perp equity)."},
{"name": "get_trade_history", "description": "Storia trade."},
{"name": "get_historical", "description": "OHLCV storico."},
{"name": "get_open_orders", "description": "Ordini aperti."},
{"name": "get_funding_rate", "description": "Funding rate corrente per simbolo."},
{"name": "basis_spot_perp", "description": "Basis spot-perp annualizzato + carry opportunity detection."},
{"name": "get_indicators", "description": "Indicatori tecnici."},
{"name": "place_order", "description": "Invia ordine (CORE only)."},
{"name": "cancel_order", "description": "Cancella ordine."},
{"name": "set_stop_loss", "description": "Stop loss su posizione."},
{"name": "set_take_profit", "description": "Take profit su posizione."},
{"name": "close_position", "description": "Chiude posizione."},
],
)
return app
@@ -1,227 +0,0 @@
from __future__ import annotations
import re
import pytest
from mcp_hyperliquid.client import HyperliquidClient
from pytest_httpx import HTTPXMock
@pytest.fixture
def client():
return HyperliquidClient(
wallet_address="0xDeadBeef",
private_key="0x" + "a" * 64,
testnet=True,
)
# Shared mock responses
META_AND_CTX = [
{
"universe": [
{"name": "BTC", "maxLeverage": 50},
{"name": "ETH", "maxLeverage": 25},
]
},
[
{
"markPx": "50000.0",
"funding": "0.0001",
"openInterest": "1000.0",
"dayNtlVlm": "500000.0",
},
{
"markPx": "3000.0",
"funding": "0.00005",
"openInterest": "500.0",
"dayNtlVlm": "200000.0",
},
],
]
CLEARINGHOUSE_STATE = {
"marginSummary": {
"accountValue": "1500.0",
"totalRawUsd": "1200.0",
"totalMarginUsed": "300.0",
"totalNtlPos": "50.0",
},
"assetPositions": [
{
"position": {
"coin": "BTC",
"szi": "0.1",
"entryPx": "48000.0",
"unrealizedPnl": "200.0",
"leverage": {"value": "10"},
"liquidationPx": "40000.0",
}
}
],
}
SPOT_STATE = {"balances": [{"coin": "USDC", "total": "500.0"}]}
@pytest.mark.asyncio
async def test_get_markets(httpx_mock: HTTPXMock, client: HyperliquidClient):
httpx_mock.add_response(
url=re.compile(r"https://api\.hyperliquid-testnet\.xyz/info"),
json=META_AND_CTX,
)
markets = await client.get_markets()
assert len(markets) == 2
assert markets[0]["asset"] == "BTC"
assert markets[0]["mark_price"] == 50000.0
assert markets[0]["max_leverage"] == 50
@pytest.mark.asyncio
async def test_get_ticker(httpx_mock: HTTPXMock, client: HyperliquidClient):
httpx_mock.add_response(
url=re.compile(r"https://api\.hyperliquid-testnet\.xyz/info"),
json=META_AND_CTX,
)
result = await client.get_ticker("BTC")
assert result["asset"] == "BTC"
assert result["mark_price"] == 50000.0
assert result["funding_rate"] == 0.0001
@pytest.mark.asyncio
async def test_get_ticker_not_found(httpx_mock: HTTPXMock, client: HyperliquidClient):
httpx_mock.add_response(
url=re.compile(r"https://api\.hyperliquid-testnet\.xyz/info"),
json=META_AND_CTX,
)
result = await client.get_ticker("SOL")
assert "error" in result
@pytest.mark.asyncio
async def test_get_orderbook(httpx_mock: HTTPXMock, client: HyperliquidClient):
httpx_mock.add_response(
url=re.compile(r"https://api\.hyperliquid-testnet\.xyz/info"),
json={
"levels": [
[{"px": "49990.0", "sz": "0.5"}, {"px": "49980.0", "sz": "1.0"}],
[{"px": "50010.0", "sz": "0.3"}, {"px": "50020.0", "sz": "0.8"}],
]
},
)
result = await client.get_orderbook("BTC", depth=2)
assert result["asset"] == "BTC"
assert len(result["bids"]) == 2
assert len(result["asks"]) == 2
assert result["bids"][0]["price"] == 49990.0
@pytest.mark.asyncio
async def test_get_positions(httpx_mock: HTTPXMock, client: HyperliquidClient):
httpx_mock.add_response(
url=re.compile(r"https://api\.hyperliquid-testnet\.xyz/info"),
json=CLEARINGHOUSE_STATE,
)
positions = await client.get_positions()
assert len(positions) == 1
assert positions[0]["asset"] == "BTC"
assert positions[0]["direction"] == "long"
assert positions[0]["size"] == 0.1
assert positions[0]["leverage"] == 10.0
@pytest.mark.asyncio
async def test_get_account_summary(httpx_mock: HTTPXMock, client: HyperliquidClient):
# get_account_summary calls /info twice (perp + spot)
httpx_mock.add_response(
url=re.compile(r"https://api\.hyperliquid-testnet\.xyz/info"),
json=CLEARINGHOUSE_STATE,
)
httpx_mock.add_response(
url=re.compile(r"https://api\.hyperliquid-testnet\.xyz/info"),
json=SPOT_STATE,
)
result = await client.get_account_summary()
assert result["perps_equity"] == 1500.0
assert result["spot_usdc"] == 500.0
assert result["equity"] == 2000.0
@pytest.mark.asyncio
async def test_get_trade_history(httpx_mock: HTTPXMock, client: HyperliquidClient):
httpx_mock.add_response(
url=re.compile(r"https://api\.hyperliquid-testnet\.xyz/info"),
json=[
{"coin": "BTC", "side": "B", "sz": "0.1", "px": "50000", "fee": "0.5", "time": 1000},
{"coin": "ETH", "side": "A", "sz": "1.0", "px": "3000", "fee": "0.3", "time": 2000},
],
)
trades = await client.get_trade_history(limit=10)
assert len(trades) == 2
assert trades[0]["asset"] == "BTC"
assert trades[0]["price"] == 50000.0
@pytest.mark.asyncio
async def test_get_open_orders(httpx_mock: HTTPXMock, client: HyperliquidClient):
httpx_mock.add_response(
url=re.compile(r"https://api\.hyperliquid-testnet\.xyz/info"),
json=[
{
"oid": 12345,
"coin": "BTC",
"side": "B",
"sz": "0.05",
"limitPx": "49000",
"orderType": "Limit",
}
],
)
orders = await client.get_open_orders()
assert len(orders) == 1
assert orders[0]["oid"] == 12345
assert orders[0]["asset"] == "BTC"
@pytest.mark.asyncio
async def test_get_historical(httpx_mock: HTTPXMock, client: HyperliquidClient):
httpx_mock.add_response(
url=re.compile(r"https://api\.hyperliquid-testnet\.xyz/info"),
json=[
{"t": 1000000, "o": "49000", "h": "51000", "l": "48500", "c": "50000", "v": "100"},
],
)
result = await client.get_historical("BTC", "2024-01-01", "2024-01-02", "1h")
assert len(result["candles"]) == 1
assert result["candles"][0]["close"] == 50000.0
@pytest.mark.asyncio
async def test_health_ok(httpx_mock: HTTPXMock, client: HyperliquidClient):
httpx_mock.add_response(
url=re.compile(r"https://api\.hyperliquid-testnet\.xyz/info"),
json={"universe": []},
)
result = await client.health()
assert result["status"] in ("ok", "healthy")
assert result["testnet"] is True
@pytest.mark.asyncio
async def test_place_order_sdk_unavailable(client: HyperliquidClient):
"""place_order raises RuntimeError when SDK is not available (mocked)."""
import mcp_hyperliquid.client as mod
original = mod._SDK_AVAILABLE
mod._SDK_AVAILABLE = False
client._exchange = None
try:
result = await client.place_order("BTC", "buy", 0.1, price=50000.0)
# Should return error dict or raise RuntimeError
assert "error" in result or result.get("status") == "error"
except RuntimeError as exc:
assert "not installed" in str(exc).lower() or "sdk" in str(exc).lower()
finally:
mod._SDK_AVAILABLE = original
@@ -1,50 +0,0 @@
from __future__ import annotations
from unittest.mock import MagicMock
from fastapi.testclient import TestClient
from mcp_common.auth import Principal, TokenStore
from mcp_common.environment import EnvironmentInfo
from mcp_hyperliquid.server import create_app
def _make_app(env_info, creds):
c = MagicMock()
c.testnet = True
store = TokenStore(tokens={
"ct": Principal("core", {"core"}),
"ot": Principal("observer", {"observer"}),
})
return create_app(client=c, token_store=store, creds=creds, env_info=env_info)
def test_environment_info_full_shape():
env = EnvironmentInfo(
exchange="hyperliquid",
environment="testnet",
source="env",
env_value="true",
base_url="https://api.hyperliquid-testnet.xyz",
)
app = _make_app(env, creds={"max_leverage": 3})
c = TestClient(app)
r = c.post("/tools/environment_info", headers={"Authorization": "Bearer ot"})
assert r.status_code == 200
body = r.json()
assert body["exchange"] == "hyperliquid"
assert body["environment"] == "testnet"
assert body["source"] == "env"
assert body["env_value"] == "true"
assert body["base_url"] == "https://api.hyperliquid-testnet.xyz"
assert body["max_leverage"] == 3
def test_environment_info_requires_auth():
env = EnvironmentInfo(
exchange="hyperliquid", environment="testnet", source="default",
env_value=None, base_url="https://api.hyperliquid-testnet.xyz",
)
app = _make_app(env, creds={"max_leverage": 3})
c = TestClient(app)
r = c.post("/tools/environment_info")
assert r.status_code == 401
@@ -1,50 +0,0 @@
from __future__ import annotations
import pytest
from fastapi import HTTPException
from mcp_hyperliquid.leverage_cap import enforce_leverage, get_max_leverage
def test_get_max_leverage_returns_creds_value():
creds = {"max_leverage": 5}
assert get_max_leverage(creds) == 5
def test_get_max_leverage_default_when_missing():
"""Default 1 (cash) se il secret non ha max_leverage."""
assert get_max_leverage({}) == 1
def test_enforce_leverage_pass_under_cap():
creds = {"max_leverage": 3}
enforce_leverage(2, creds=creds, exchange="hyperliquid") # no raise
def test_enforce_leverage_pass_at_cap():
creds = {"max_leverage": 3}
enforce_leverage(3, creds=creds, exchange="hyperliquid") # no raise
def test_enforce_leverage_reject_over_cap():
creds = {"max_leverage": 3}
with pytest.raises(HTTPException) as exc:
enforce_leverage(10, creds=creds, exchange="hyperliquid")
assert exc.value.status_code == 403
assert exc.value.detail["error"] == "LEVERAGE_CAP_EXCEEDED"
assert exc.value.detail["exchange"] == "hyperliquid"
assert exc.value.detail["requested"] == 10
assert exc.value.detail["max"] == 3
def test_enforce_leverage_reject_when_below_one():
creds = {"max_leverage": 3}
with pytest.raises(HTTPException) as exc:
enforce_leverage(0, creds=creds, exchange="hyperliquid")
assert exc.value.status_code == 403
def test_enforce_leverage_default_when_none():
"""Se requested è None, applica il cap come default."""
creds = {"max_leverage": 3}
result = enforce_leverage(None, creds=creds, exchange="hyperliquid")
assert result == 3
@@ -1,211 +0,0 @@
from __future__ import annotations
from unittest.mock import AsyncMock, MagicMock
import pytest
from fastapi.testclient import TestClient
from mcp_common.auth import Principal, TokenStore
from mcp_hyperliquid.server import create_app
@pytest.fixture
def mock_client():
c = MagicMock()
c.get_markets = AsyncMock(return_value=[{"asset": "BTC", "mark_price": 50000}])
c.get_ticker = AsyncMock(return_value={"asset": "BTC", "mark_price": 50000})
c.get_orderbook = AsyncMock(return_value={"bids": [], "asks": []})
c.get_positions = AsyncMock(return_value=[])
c.get_account_summary = AsyncMock(return_value={"equity": 1500, "perps_equity": 1000})
c.get_trade_history = AsyncMock(return_value=[])
c.get_historical = AsyncMock(return_value={"candles": []})
c.get_open_orders = AsyncMock(return_value=[])
c.get_funding_rate = AsyncMock(return_value={"asset": "BTC", "current_funding_rate": 0.0001})
c.get_indicators = AsyncMock(return_value={"rsi": 55.0})
c.place_order = AsyncMock(return_value={"order_id": "x", "status": "ok"})
c.cancel_order = AsyncMock(return_value={"order_id": "x", "status": "ok"})
c.set_stop_loss = AsyncMock(return_value={"order_id": "x", "status": "ok"})
c.set_take_profit = AsyncMock(return_value={"order_id": "x", "status": "ok"})
c.close_position = AsyncMock(return_value={"status": "ok", "asset": "BTC"})
return c
@pytest.fixture
def http(mock_client):
store = TokenStore(
tokens={
"ct": Principal("core", {"core"}),
"ot": Principal("observer", {"observer"}),
}
)
app = create_app(client=mock_client, token_store=store, creds={"max_leverage": 3})
return TestClient(app)
# --- Health ---
def test_health(http):
assert http.get("/health").status_code == 200
# --- Read tools: both core and observer allowed ---
def test_get_markets_core_ok(http):
r = http.post("/tools/get_markets", headers={"Authorization": "Bearer ct"}, json={})
assert r.status_code == 200
def test_get_markets_observer_ok(http):
r = http.post("/tools/get_markets", headers={"Authorization": "Bearer ot"}, json={})
assert r.status_code == 200
def test_get_ticker_core_ok(http):
r = http.post(
"/tools/get_ticker",
headers={"Authorization": "Bearer ct"},
json={"instrument": "BTC"},
)
assert r.status_code == 200
assert r.json()["mark_price"] == 50000
def test_get_ticker_observer_ok(http):
r = http.post(
"/tools/get_ticker",
headers={"Authorization": "Bearer ot"},
json={"instrument": "BTC"},
)
assert r.status_code == 200
def test_get_ticker_no_auth_401(http):
r = http.post("/tools/get_ticker", json={"instrument": "BTC"})
assert r.status_code == 401
def test_get_account_summary_observer_ok(http):
r = http.post(
"/tools/get_account_summary",
headers={"Authorization": "Bearer ot"},
json={},
)
assert r.status_code == 200
assert r.json()["equity"] == 1500
def test_get_funding_rate_observer_ok(http):
r = http.post(
"/tools/get_funding_rate",
headers={"Authorization": "Bearer ot"},
json={"instrument": "BTC"},
)
assert r.status_code == 200
def test_get_positions_no_auth_401(http):
r = http.post("/tools/get_positions", json={})
assert r.status_code == 401
# --- Write tools: core only ---
def test_place_order_core_ok(http):
# CER-016: amount * price = 150 < cap 200
r = http.post(
"/tools/place_order",
headers={"Authorization": "Bearer ct"},
json={"instrument": "BTC", "side": "buy", "amount": 0.003, "price": 50000},
)
assert r.status_code == 200
def test_place_order_observer_forbidden(http):
r = http.post(
"/tools/place_order",
headers={"Authorization": "Bearer ot"},
json={"instrument": "BTC", "side": "buy", "amount": 0.001, "price": 50000},
)
assert r.status_code == 403
def test_place_order_leverage_cap_enforced_hl(http):
"""Reject leverage > max_leverage (da secret, default 3)."""
r = http.post(
"/tools/place_order",
headers={"Authorization": "Bearer ct"},
json={
"instrument": "BTC",
"side": "buy",
"amount": 0.001,
"price": 50000,
"leverage": 10,
},
)
assert r.status_code == 403
body = r.json()
err = body["error"]
assert err["code"] == "LEVERAGE_CAP_EXCEEDED"
assert err["details"]["exchange"] == "hyperliquid"
def test_cancel_order_core_ok(http):
r = http.post(
"/tools/cancel_order",
headers={"Authorization": "Bearer ct"},
json={"order_id": "123", "instrument": "BTC"},
)
assert r.status_code == 200
def test_cancel_order_observer_forbidden(http):
r = http.post(
"/tools/cancel_order",
headers={"Authorization": "Bearer ot"},
json={"order_id": "123", "instrument": "BTC"},
)
assert r.status_code == 403
def test_set_stop_loss_core_ok(http):
r = http.post(
"/tools/set_stop_loss",
headers={"Authorization": "Bearer ct"},
json={"instrument": "BTC", "stop_price": 45000.0, "size": 0.1},
)
assert r.status_code == 200
def test_set_stop_loss_observer_forbidden(http):
r = http.post(
"/tools/set_stop_loss",
headers={"Authorization": "Bearer ot"},
json={"instrument": "BTC", "stop_price": 45000.0, "size": 0.1},
)
assert r.status_code == 403
def test_set_take_profit_observer_forbidden(http):
r = http.post(
"/tools/set_take_profit",
headers={"Authorization": "Bearer ot"},
json={"instrument": "BTC", "tp_price": 55000.0, "size": 0.1},
)
assert r.status_code == 403
def test_close_position_core_ok(http):
r = http.post(
"/tools/close_position",
headers={"Authorization": "Bearer ct"},
json={"instrument": "BTC"},
)
assert r.status_code == 200
def test_close_position_observer_forbidden(http):
r = http.post(
"/tools/close_position",
headers={"Authorization": "Bearer ot"},
json={"instrument": "BTC"},
)
assert r.status_code == 403
-27
View File
@@ -1,27 +0,0 @@
[project]
name = "mcp-macro"
version = "0.1.0"
requires-python = ">=3.11"
dependencies = [
"mcp-common",
"fastapi>=0.115",
"uvicorn[standard]>=0.30",
"httpx>=0.27",
"pydantic>=2.6",
]
[project.optional-dependencies]
dev = ["pytest>=8", "pytest-asyncio>=0.23", "pytest-httpx>=0.30"]
[build-system]
requires = ["hatchling"]
build-backend = "hatchling.build"
[tool.hatch.build.targets.wheel]
packages = ["src/mcp_macro"]
[tool.uv.sources]
mcp-common = { workspace = true }
[project.scripts]
mcp-macro = "mcp_macro.__main__:main"
@@ -1,37 +0,0 @@
from __future__ import annotations
import json
import os
import uvicorn
from mcp_common.auth import load_token_store_from_files
from mcp_common.logging import configure_root_logging
from mcp_macro.server import create_app
configure_root_logging() # CER-P5-009
def main():
creds_file = os.environ["MACRO_CREDENTIALS_FILE"]
with open(creds_file) as f:
creds = json.load(f)
token_store = load_token_store_from_files(
core_token_file=os.environ.get("CORE_TOKEN_FILE"),
observer_token_file=os.environ.get("OBSERVER_TOKEN_FILE"),
)
app = create_app(
fred_api_key=creds.get("fred_api_key", ""),
finnhub_api_key=creds.get("finnhub_api_key", ""),
token_store=token_store,
)
uvicorn.run(
app,
log_config=None, # CER-P5-009: delega al root JSON logger
host=os.environ.get("HOST", "0.0.0.0"),
port=int(os.environ.get("PORT", "9013")),
)
if __name__ == "__main__":
main()
-91
View File
@@ -1,91 +0,0 @@
"""Pure-logic helpers per COT report parsing e analytics.
Niente HTTP qui — orchestrazione fetch sta in fetchers.py. Tutto testabile
in isolamento.
"""
from __future__ import annotations
from typing import Literal
ExtremeSignal = Literal["extreme_short", "extreme_long", "neutral"]
def compute_percentile(value: float, history: list[float]) -> float | None:
"""Percentile di `value` rispetto ad `history` (0-100, inclusive).
Restituisce None se history vuoto. Clipped a [0, 100] se value fuori range.
"""
if not history:
return None
n = len(history)
below_or_eq = sum(1 for h in history if h <= value)
pct = 100.0 * below_or_eq / n
return max(0.0, min(100.0, pct))
def classify_extreme(percentile: float | None, threshold: float = 5.0) -> ExtremeSignal:
"""Classifica un percentile come estremo short/long o neutral.
threshold default 5 → flagga ≤ 5 come short, ≥ 100-5=95 come long.
"""
if percentile is None:
return "neutral"
if percentile <= threshold:
return "extreme_short"
if percentile >= 100.0 - threshold:
return "extreme_long"
return "neutral"
def _to_int(v) -> int:
try:
return int(float(v))
except (TypeError, ValueError):
return 0
def _date_only(s: str) -> str:
"""Estrae 'YYYY-MM-DD' da una data ISO con o senza timestamp."""
if not s:
return ""
return s.split("T", 1)[0]
def parse_tff_row(raw: dict) -> dict:
"""Mappa una row Socrata TFF al formato API output."""
dl = _to_int(raw.get("dealer_positions_long_all"))
ds = _to_int(raw.get("dealer_positions_short_all"))
al = _to_int(raw.get("asset_mgr_positions_long"))
as_ = _to_int(raw.get("asset_mgr_positions_short"))
ll = _to_int(raw.get("lev_money_positions_long"))
ls = _to_int(raw.get("lev_money_positions_short"))
ol = _to_int(raw.get("other_rept_positions_long"))
os_ = _to_int(raw.get("other_rept_positions_short"))
return {
"report_date": _date_only(raw.get("report_date_as_yyyy_mm_dd", "")),
"dealer_long": dl, "dealer_short": ds, "dealer_net": dl - ds,
"asset_mgr_long": al, "asset_mgr_short": as_, "asset_mgr_net": al - as_,
"lev_funds_long": ll, "lev_funds_short": ls, "lev_funds_net": ll - ls,
"other_long": ol, "other_short": os_, "other_net": ol - os_,
"open_interest": _to_int(raw.get("open_interest_all")),
}
def parse_disagg_row(raw: dict) -> dict:
"""Mappa una row Socrata Disaggregated F&O combined al formato API output."""
pl = _to_int(raw.get("prod_merc_positions_long_all"))
ps = _to_int(raw.get("prod_merc_positions_short_all"))
sl = _to_int(raw.get("swap_positions_long_all"))
ss = _to_int(raw.get("swap_positions_short_all"))
ml = _to_int(raw.get("m_money_positions_long_all"))
ms = _to_int(raw.get("m_money_positions_short_all"))
ol = _to_int(raw.get("other_rept_positions_long_all"))
os_ = _to_int(raw.get("other_rept_positions_short_all"))
return {
"report_date": _date_only(raw.get("report_date_as_yyyy_mm_dd", "")),
"producer_long": pl, "producer_short": ps, "producer_net": pl - ps,
"swap_long": sl, "swap_short": ss, "swap_net": sl - ss,
"managed_money_long": ml, "managed_money_short": ms, "managed_money_net": ml - ms,
"other_long": ol, "other_short": os_, "other_net": ol - os_,
"open_interest": _to_int(raw.get("open_interest_all")),
}
@@ -1,36 +0,0 @@
"""Costanti CFTC: ticker → contract_market_code per TFF e Disaggregated.
I codici CFTC (`cftc_contract_market_code`) sono pubblici e stabili nel tempo.
Riferimento: https://www.cftc.gov/MarketReports/CommitmentsofTraders/
"""
from __future__ import annotations
CFTC_BASE_URL = "https://publicreporting.cftc.gov/resource"
TFF_DATASET_ID = "gpe5-46if"
DISAGG_DATASET_ID = "72hh-3qpy"
# TFF: equity/financial. Mapping ticker → cftc_contract_market_code.
SYMBOL_TO_CFTC_CODE_TFF: dict[str, str] = {
"ES": "13874A", # E-mini S&P 500
"NQ": "209742", # E-mini Nasdaq-100
"RTY": "239742", # E-mini Russell 2000
"ZN": "043602", # 10-Year T-Note
"ZB": "020601", # 30-Year T-Bond
"6E": "099741", # Euro FX
"6J": "097741", # Japanese Yen
"DX": "098662", # US Dollar Index
}
# Disaggregated: commodities.
SYMBOL_TO_CFTC_CODE_DISAGG: dict[str, str] = {
"CL": "067651", # Crude Oil WTI
"GC": "088691", # Gold
"SI": "084691", # Silver
"HG": "085692", # Copper
"ZW": "001602", # Wheat
"ZC": "002602", # Corn
"ZS": "005602", # Soybeans
}
ALL_TFF_SYMBOLS: list[str] = list(SYMBOL_TO_CFTC_CODE_TFF.keys())
ALL_DISAGG_SYMBOLS: list[str] = list(SYMBOL_TO_CFTC_CODE_DISAGG.keys())
@@ -1,771 +0,0 @@
from __future__ import annotations
from datetime import UTC, datetime, timedelta
from typing import Any
import httpx
from mcp_common.http import async_client
from mcp_macro.cot import classify_extreme, compute_percentile, parse_disagg_row, parse_tff_row
from mcp_macro.cot_contracts import (
ALL_DISAGG_SYMBOLS,
ALL_TFF_SYMBOLS,
CFTC_BASE_URL,
DISAGG_DATASET_ID,
SYMBOL_TO_CFTC_CODE_DISAGG,
SYMBOL_TO_CFTC_CODE_TFF,
TFF_DATASET_ID,
)
FRED_BASE = "https://api.stlouisfed.org/fred/series/observations"
FINNHUB_CALENDAR = "https://finnhub.io/api/v1/calendar/economic"
COINGECKO_GLOBAL = "https://api.coingecko.com/api/v3/global"
COINGECKO_SIMPLE = "https://api.coingecko.com/api/v3/simple/price"
DERIBIT_DVOL = "https://www.deribit.com/api/v2/public/get_volatility_index_data"
YAHOO_CHART = "https://query1.finance.yahoo.com/v8/finance/chart/{symbol}"
ASSET_TICKER_MAP: dict[str, tuple[str, str]] = {
"WTI": ("CL=F", "WTI Crude Oil"),
"BRENT": ("BZ=F", "Brent Crude Oil"),
"GOLD": ("GC=F", "Gold Futures"),
"SILVER": ("SI=F", "Silver Futures"),
"COPPER": ("HG=F", "Copper Futures"),
"NATGAS": ("NG=F", "Natural Gas"),
"DXY": ("DX-Y.NYB", "US Dollar Index"),
"SPX": ("^GSPC", "S&P 500"),
"NDX": ("^NDX", "Nasdaq 100"),
"DJI": ("^DJI", "Dow Jones"),
"RUT": ("^RUT", "Russell 2000"),
"VIX": ("^VIX", "CBOE Volatility Index"),
"US5Y": ("^FVX", "US 5-Year Treasury"),
"US10Y": ("^TNX", "US 10-Year Treasury"),
"US30Y": ("^TYX", "US 30-Year Treasury"),
"US2Y": ("^UST2YR", "US 2-Year Treasury"),
"EURUSD": ("EURUSD=X", "EUR/USD"),
"USDJPY": ("JPY=X", "USD/JPY"),
"GBPUSD": ("GBPUSD=X", "GBP/USD"),
"BTCUSD": ("BTC-USD", "Bitcoin/USD"),
"ETHUSD": ("ETH-USD", "Ethereum/USD"),
"ES": ("ES=F", "E-mini S&P 500 Futures"),
"NQ": ("NQ=F", "E-mini Nasdaq 100 Futures"),
"YM": ("YM=F", "E-mini Dow Futures"),
"RTY": ("RTY=F", "E-mini Russell 2000 Futures"),
}
_ASSET_CACHE: dict[str, dict] = {}
_ASSET_CACHE_TTL = 60.0
async def _fetch_yahoo_meta(client: httpx.AsyncClient, symbol: str, range_: str = "10d") -> dict:
try:
resp = await client.get(
YAHOO_CHART.format(symbol=symbol),
params={"interval": "1d", "range": range_},
headers={"User-Agent": "Mozilla/5.0"},
)
if resp.status_code != 200:
return {}
result = (resp.json().get("chart") or {}).get("result") or []
if not result:
return {}
r0 = result[0]
meta = r0.get("meta") or {}
closes = ((r0.get("indicators") or {}).get("quote") or [{}])[0].get("close") or []
closes = [c for c in closes if c is not None]
return {"meta": meta, "closes": closes}
except Exception:
return {}
async def fetch_asset_price(ticker: str) -> dict[str, Any]:
import time
key = ticker.upper()
now = time.monotonic()
cached = _ASSET_CACHE.get(key)
if cached and (now - cached["ts"]) < _ASSET_CACHE_TTL:
return cached["data"]
mapping = ASSET_TICKER_MAP.get(key)
if not mapping:
return {"ticker": ticker, "error": f"unknown ticker {ticker}"}
symbol, name = mapping
async with async_client(timeout=10.0) as client:
info = await _fetch_yahoo_meta(client, symbol, "10d")
meta = info.get("meta") or {}
closes = info.get("closes") or []
price = meta.get("regularMarketPrice")
prev_close = meta.get("previousClose")
change_24h_pct = None
if price is not None and prev_close:
try:
change_24h_pct = round((float(price) - float(prev_close)) / float(prev_close) * 100, 3)
except Exception:
change_24h_pct = None
change_7d_pct = None
if len(closes) >= 6 and price is not None:
try:
change_7d_pct = round((float(price) - float(closes[-6])) / float(closes[-6]) * 100, 3)
except Exception:
change_7d_pct = None
out = {
"ticker": key,
"name": name,
"price": float(price) if price is not None else None,
"change_24h_pct": change_24h_pct,
"change_7d_pct": change_7d_pct,
"source": f"yfinance:{symbol}",
"data_timestamp": datetime.now(UTC).isoformat(),
}
_ASSET_CACHE[key] = {"data": out, "ts": now}
return out
_TREASURY_CACHE: dict[str, Any] = {"data": None, "ts": 0.0}
_TREASURY_TTL = 300.0
async def fetch_treasury_yields() -> dict[str, Any]:
import time
now = time.monotonic()
if _TREASURY_CACHE["data"] and (now - _TREASURY_CACHE["ts"]) < _TREASURY_TTL:
return _TREASURY_CACHE["data"]
symbols = [
("us2y", "^UST2YR"),
("us5y", "^FVX"),
("us10y", "^TNX"),
("us30y", "^TYX"),
]
yields: dict[str, float | None] = {}
async with async_client(timeout=10.0) as client:
for key, sym in symbols:
info = await _fetch_yahoo_meta(client, sym, "5d")
meta = info.get("meta") or {}
price = meta.get("regularMarketPrice")
yields[key] = float(price) if price is not None else None
spread = None
if yields.get("us10y") is not None and yields.get("us2y") is not None:
spread = round(yields["us10y"] - yields["us2y"], 3)
shape = "unknown"
if spread is not None:
if spread > 0.25:
shape = "normal"
elif spread < -0.1:
shape = "inverted"
else:
shape = "flat"
out = {
"yields": yields,
"spread_2y10y": spread,
"yield_curve_shape": shape,
"data_timestamp": datetime.now(UTC).isoformat(),
}
_TREASURY_CACHE["data"] = out
_TREASURY_CACHE["ts"] = now
return out
def yield_curve_metrics(yields: dict[str, float | None]) -> dict[str, Any]:
"""Slope + convexity da curva yields (us2y, us5y, us10y, us30y).
Convexity (butterfly): 2*us10y - us2y - us30y. >0 = curva concava.
"""
y2 = yields.get("us2y")
y5 = yields.get("us5y")
y10 = yields.get("us10y")
y30 = yields.get("us30y")
slope_2y10y = (y10 - y2) if (y2 is not None and y10 is not None) else None
slope_5y30y = (y30 - y5) if (y5 is not None and y30 is not None) else None
butterfly_2_10_30 = (2 * y10 - y2 - y30) if (y2 is not None and y10 is not None and y30 is not None) else None
regime = "unknown"
if slope_2y10y is not None:
if slope_2y10y >= 0.5:
regime = "steep"
elif slope_2y10y > 0.1:
regime = "normal"
elif slope_2y10y > -0.1:
regime = "flat"
else:
regime = "inverted"
return {
"slope_2y10y": round(slope_2y10y, 3) if slope_2y10y is not None else None,
"slope_5y30y": round(slope_5y30y, 3) if slope_5y30y is not None else None,
"butterfly_2_10_30": round(butterfly_2_10_30, 3) if butterfly_2_10_30 is not None else None,
"regime": regime,
}
async def fetch_yield_curve_slope() -> dict[str, Any]:
"""Curve slope/convexity metrics su treasury yields correnti."""
base = await fetch_treasury_yields()
metrics = yield_curve_metrics(base.get("yields") or {})
return {
"yields": base.get("yields"),
**metrics,
"data_timestamp": datetime.now(UTC).isoformat(),
}
async def fetch_breakeven_inflation(fred_api_key: str = "") -> dict[str, Any]:
"""Breakeven inflation rate via FRED:
- T10YIE (10Y breakeven, market expectation 10Y inflation)
- T5YIE (5Y breakeven)
- T5YIFR (5Y forward 5Y forward inflation expectation)
"""
if not fred_api_key:
return {"error": "No FRED API key configured", "breakevens": {}}
series_map = {
"be_5y": "T5YIE",
"be_10y": "T10YIE",
"be_5y5y_forward": "T5YIFR",
}
out: dict[str, float | None] = {}
async with async_client(timeout=10.0) as client:
for name, series_id in series_map.items():
resp = await client.get(
FRED_BASE,
params={
"series_id": series_id,
"api_key": fred_api_key,
"file_type": "json",
"sort_order": "desc",
"limit": 1,
},
)
data = resp.json()
obs = data.get("observations", [])
try:
out[name] = float(obs[0]["value"]) if obs and obs[0]["value"] != "." else None
except (ValueError, IndexError, KeyError):
out[name] = None
interpretation = "unknown"
be10 = out.get("be_10y")
if be10 is not None:
if be10 > 3.0:
interpretation = "high_inflation_expected"
elif be10 < 1.5:
interpretation = "low_inflation_expected"
else:
interpretation = "anchored"
return {
"breakevens": out,
"interpretation": interpretation,
"data_timestamp": datetime.now(UTC).isoformat(),
}
async def fetch_equity_futures() -> dict[str, Any]:
"""Fetch ES/NQ/YM/RTY futures con session detection."""
tickers = [("es", "ES=F"), ("nq", "NQ=F"), ("ym", "YM=F"), ("rty", "RTY=F")]
now = datetime.now(UTC)
weekday = now.weekday() # 0=Mon
hour_utc = now.hour
cash_open = (weekday < 5) and (13 <= hour_utc < 20)
if cash_open:
session = "regular"
elif weekday >= 5:
session = "weekend"
elif hour_utc < 13:
session = "pre-market"
else:
session = "after-hours"
out: dict[str, Any] = {}
async with async_client(timeout=10.0) as client:
for key, sym in tickers:
info = await _fetch_yahoo_meta(client, sym, "5d")
meta = info.get("meta") or {}
price = meta.get("regularMarketPrice")
prev = meta.get("previousClose") or meta.get("chartPreviousClose")
change_pct = None
if price is not None and prev:
try:
change_pct = round((float(price) - float(prev)) / float(prev) * 100, 3)
except Exception:
change_pct = None
out[key] = {
"price": float(price) if price is not None else None,
"change_pct": change_pct,
"session": session,
}
next_open = None
if weekday < 5 and hour_utc < 13:
next_open = now.replace(hour=13, minute=30, second=0, microsecond=0).isoformat()
else:
days_ahead = (7 - weekday) if weekday >= 5 else 1
nd = (now.replace(hour=13, minute=30, second=0, microsecond=0) + timedelta(days=days_ahead))
next_open = nd.isoformat()
return {
"futures": out,
"session_status": {
"cash_open": cash_open,
"session": session,
"next_open_utc": next_open,
},
"data_timestamp": datetime.now(UTC).isoformat(),
}
_MARKET_CACHE: dict[str, Any] = {"data": None, "ts": 0.0}
_MARKET_CACHE_TTL = 120.0
async def _fetch_yahoo_price(client: httpx.AsyncClient, symbol: str) -> float | None:
try:
resp = await client.get(
YAHOO_CHART.format(symbol=symbol),
params={"interval": "1d", "range": "5d"},
headers={"User-Agent": "Mozilla/5.0"},
)
if resp.status_code != 200:
return None
result = (resp.json().get("chart") or {}).get("result") or []
if not result:
return None
price = (result[0].get("meta") or {}).get("regularMarketPrice")
return float(price) if price is not None else None
except Exception:
return None
async def _fetch_dvol_latest(client: httpx.AsyncClient, currency: str) -> float | None:
now_ms = int(datetime.now(UTC).timestamp() * 1000)
start_ms = now_ms - 7 * 24 * 3600 * 1000
try:
resp = await client.get(
DERIBIT_DVOL,
params={
"currency": currency,
"start_timestamp": start_ms,
"end_timestamp": now_ms,
"resolution": "1D",
},
)
rows = (resp.json().get("result") or {}).get("data") or []
if not rows:
return None
return float(rows[-1][4])
except Exception:
return None
async def fetch_economic_indicators(
fred_api_key: str = "",
indicators: list[str] | None = None,
) -> dict[str, Any]:
series_map = {
"fed_rate": "FEDFUNDS",
"cpi": "CPIAUCSL",
"unemployment": "UNRATE",
"us10y_yield": "DGS10",
}
result: dict[str, Any] = {}
if not fred_api_key:
return {"indicators": result, "error": "No FRED API key configured"}
async with async_client(timeout=10.0) as client:
for name, series_id in series_map.items():
if indicators and name not in indicators:
continue
resp = await client.get(
FRED_BASE,
params={
"series_id": series_id,
"api_key": fred_api_key,
"file_type": "json",
"sort_order": "desc",
"limit": 1,
},
)
data = resp.json()
obs = data.get("observations", [])
result[name] = float(obs[0]["value"]) if obs else None
result["updated_at"] = datetime.now(UTC).isoformat()
return result
CURRENCY_TO_COUNTRY = {
"USD": ("US", "United States"),
"EUR": ("EU", "Euro Area"),
"JPY": ("JP", "Japan"),
"GBP": ("UK", "United Kingdom"),
"CAD": ("CA", "Canada"),
"AUD": ("AU", "Australia"),
"NZD": ("NZ", "New Zealand"),
"CHF": ("CH", "Switzerland"),
"CNY": ("CN", "China"),
}
_HIGH_IMPACT_EVENTS = (
"fomc", "fed", "cpi", "nfp", "non-farm", "nonfarm", "ppi",
"ecb", "boj", "boe", "gdp", "unemployment rate",
)
def _market_impact_historical(name: str) -> str:
n = (name or "").lower()
for kw in _HIGH_IMPACT_EVENTS:
if kw in n:
return "high_vol_spike"
return "normal"
async def fetch_macro_calendar(
finnhub_api_key: str = "",
days_ahead: int = 7,
country_filter: list[str] | None = None,
importance_min: str | None = None,
start: str | None = None,
end: str | None = None,
) -> dict[str, Any]:
"""Fetch economic calendar con filtri country/importance/date range."""
events: list[dict[str, Any]] = []
importance_order = {"low": 0, "medium": 1, "high": 2}
min_level = importance_order.get(
(importance_min or "").lower(), 0
) if importance_min else 0
start_dt: datetime | None = None
end_dt: datetime | None = None
if start:
try:
start_dt = datetime.fromisoformat(start).replace(tzinfo=UTC)
except ValueError:
start_dt = datetime.strptime(start, "%Y-%m-%d").replace(tzinfo=UTC)
if end:
try:
end_dt = datetime.fromisoformat(end).replace(tzinfo=UTC)
except ValueError:
end_dt = datetime.strptime(end, "%Y-%m-%d").replace(tzinfo=UTC)
country_filter_set = (
{c.upper() for c in country_filter} if country_filter else None
)
# Try Forex Factory free feed first
try:
async with async_client(timeout=10.0) as client:
resp = await client.get("https://nfs.faireconomy.media/ff_calendar_thisweek.json")
if resp.status_code == 200:
raw = resp.json()
now = datetime.now(UTC)
for e in raw:
date_str = e.get("date", "")
event_dt: datetime | None = None
try:
event_dt = datetime.fromisoformat(date_str.replace("Z", "+00:00"))
if event_dt < now:
continue
except (ValueError, TypeError):
pass
currency = (e.get("country", "") or "").upper()
country_code, country_name = CURRENCY_TO_COUNTRY.get(
currency, (currency or "", e.get("country", "") or "")
)
if country_filter_set and country_code not in country_filter_set:
continue
impact = (e.get("impact", "") or "").lower()
importance = (
"high" if impact == "high" else "medium" if impact == "medium" else "low"
)
if importance_order[importance] < min_level:
continue
if start_dt and event_dt and event_dt < start_dt:
continue
if end_dt and event_dt and event_dt > end_dt:
continue
name = e.get("title", "")
events.append(
{
"date": date_str,
"datetime_utc": event_dt.isoformat() if event_dt else date_str,
"name": name,
"event": name,
"country": country_name,
"country_code": country_code,
"importance": importance,
"forecast": e.get("forecast", ""),
"previous": e.get("previous", ""),
"actual": e.get("actual"),
"market_impact_historical": _market_impact_historical(name),
}
)
except Exception:
pass
# Fallback to Finnhub if we have a key and no events
if not events and finnhub_api_key:
try:
now = datetime.now(UTC)
end_default = now + timedelta(days=days_ahead)
async with async_client(timeout=10.0) as client:
resp = await client.get(
FINNHUB_CALENDAR,
params={
"from": (start_dt or now).strftime("%Y-%m-%d"),
"to": (end_dt or end_default).strftime("%Y-%m-%d"),
"token": finnhub_api_key,
},
)
data = resp.json()
if isinstance(data, dict) and "error" in data:
return {"events": [], "error": data["error"]}
raw = data if isinstance(data, list) else data.get("economicCalendar", [])
for e in raw:
importance_raw = (
e.get("importance")
or e.get("impact")
or "medium"
)
if isinstance(importance_raw, int):
importance = (
"high" if importance_raw >= 3 else
"medium" if importance_raw >= 2 else
"low"
)
else:
importance = str(importance_raw).lower()
if importance not in ("low", "medium", "high"):
importance = "medium"
if importance_order[importance] < min_level:
continue
country_code = (e.get("country", "") or "").upper()
country_name = CURRENCY_TO_COUNTRY.get(
country_code, (country_code, country_code)
)[1]
if country_filter_set and country_code not in country_filter_set:
continue
name = e.get("event", "")
date_str = e.get("date", e.get("time", ""))
events.append({
"date": date_str,
"datetime_utc": date_str,
"name": name,
"event": name,
"country": country_name,
"country_code": country_code,
"importance": importance,
"forecast": e.get("forecast", ""),
"previous": e.get("previous", e.get("prev", "")),
"actual": e.get("actual"),
"market_impact_historical": _market_impact_historical(name),
})
except Exception:
pass
if not events:
return {"events": [], "note": "No calendar source available"}
return {"events": events}
async def fetch_market_overview() -> dict[str, Any]:
import time
now = time.monotonic()
if _MARKET_CACHE["data"] is not None and (now - _MARKET_CACHE["ts"]) < _MARKET_CACHE_TTL:
return _MARKET_CACHE["data"]
async with async_client(timeout=10.0) as client:
global_data: dict[str, Any] = {}
prices: dict[str, Any] = {}
try:
global_resp = await client.get(COINGECKO_GLOBAL)
global_data = global_resp.json().get("data", {}) or {}
except Exception:
global_data = {}
try:
price_resp = await client.get(
COINGECKO_SIMPLE,
params={"ids": "bitcoin,ethereum", "vs_currencies": "usd"},
)
prices = price_resp.json() or {}
except Exception:
prices = {}
dvol_btc = await _fetch_dvol_latest(client, "BTC")
dvol_eth = await _fetch_dvol_latest(client, "ETH")
sp500 = await _fetch_yahoo_price(client, "^GSPC")
gold = await _fetch_yahoo_price(client, "GC=F")
vix = await _fetch_yahoo_price(client, "^VIX")
out = {
"btc_dominance": global_data.get("market_cap_percentage", {}).get("btc"),
"total_market_cap": global_data.get("total_market_cap", {}).get("usd"),
"btc_price": prices.get("bitcoin", {}).get("usd"),
"eth_price": prices.get("ethereum", {}).get("usd"),
"sp500": sp500,
"gold": gold,
"vix": vix,
"dvol_btc": dvol_btc,
"dvol_eth": dvol_eth,
"data_timestamp": datetime.now(UTC).isoformat(),
}
_MARKET_CACHE["data"] = out
_MARKET_CACHE["ts"] = now
return out
_COT_TTL = 3600.0 # 1h
_COT_CACHE: dict[tuple[str, str, int], dict[str, Any]] = {}
_COT_CACHE_TS: dict[tuple[str, str, int], float] = {}
async def fetch_cot_tff(symbol: str, lookback_weeks: int = 52) -> dict[str, Any]:
"""Fetch COT TFF report per simbolo equity/financial. Returns ASC by date."""
import time
symbol = symbol.upper()
if symbol not in SYMBOL_TO_CFTC_CODE_TFF:
return {"error": "unknown_symbol", "available": ALL_TFF_SYMBOLS}
key = (symbol, "tff", lookback_weeks)
now = time.monotonic()
if key in _COT_CACHE and (now - _COT_CACHE_TS[key]) < _COT_TTL:
return _COT_CACHE[key]
code = SYMBOL_TO_CFTC_CODE_TFF[symbol]
url = f"{CFTC_BASE_URL}/{TFF_DATASET_ID}.json"
async with async_client(timeout=10.0) as client:
resp = await client.get(
url,
params={
"cftc_contract_market_code": code,
"$order": "report_date_as_yyyy_mm_dd DESC",
"$limit": str(lookback_weeks),
},
)
if resp.status_code != 200:
return {"symbol": symbol, "report_type": "tff", "rows": [], "error": "cftc_unavailable"}
raw_rows = resp.json() or []
parsed = [parse_tff_row(r) for r in raw_rows]
parsed.sort(key=lambda r: r["report_date"]) # ASC by date
out = {
"symbol": symbol,
"report_type": "tff",
"rows": parsed,
"data_timestamp": datetime.now(UTC).isoformat(),
}
_COT_CACHE[key] = out
_COT_CACHE_TS[key] = now
return out
async def fetch_cot_disaggregated(symbol: str, lookback_weeks: int = 52) -> dict[str, Any]:
"""Fetch COT Disaggregated report per simbolo commodity. Returns ASC by date."""
import time
symbol = symbol.upper()
if symbol not in SYMBOL_TO_CFTC_CODE_DISAGG:
return {"error": "unknown_symbol", "available": ALL_DISAGG_SYMBOLS}
key = (symbol, "disaggregated", lookback_weeks)
now = time.monotonic()
if key in _COT_CACHE and (now - _COT_CACHE_TS[key]) < _COT_TTL:
return _COT_CACHE[key]
code = SYMBOL_TO_CFTC_CODE_DISAGG[symbol]
url = f"{CFTC_BASE_URL}/{DISAGG_DATASET_ID}.json"
async with async_client(timeout=10.0) as client:
resp = await client.get(
url,
params={
"cftc_contract_market_code": code,
"$order": "report_date_as_yyyy_mm_dd DESC",
"$limit": str(lookback_weeks),
},
)
if resp.status_code != 200:
return {"symbol": symbol, "report_type": "disaggregated", "rows": [], "error": "cftc_unavailable"}
raw_rows = resp.json() or []
parsed = [parse_disagg_row(r) for r in raw_rows]
parsed.sort(key=lambda r: r["report_date"])
out = {
"symbol": symbol,
"report_type": "disaggregated",
"rows": parsed,
"data_timestamp": datetime.now(UTC).isoformat(),
}
_COT_CACHE[key] = out
_COT_CACHE_TS[key] = now
return out
async def fetch_cot_extreme_positioning(lookback_weeks: int = 156) -> dict[str, Any]:
"""Scanner posizionamento estremo (percentile <=5 o >=95) sui simboli watchlist.
TFF -> key_role = lev_funds (lev_funds_net).
Disaggregated -> key_role = managed_money (managed_money_net).
"""
import asyncio
tff_tasks = [fetch_cot_tff(s, lookback_weeks) for s in ALL_TFF_SYMBOLS]
disagg_tasks = [fetch_cot_disaggregated(s, lookback_weeks) for s in ALL_DISAGG_SYMBOLS]
tff_results, disagg_results = await asyncio.gather(
asyncio.gather(*tff_tasks, return_exceptions=True),
asyncio.gather(*disagg_tasks, return_exceptions=True),
)
extremes: list[dict[str, Any]] = []
for res in tff_results:
if isinstance(res, BaseException) or not isinstance(res, dict):
continue
rows = res.get("rows") or []
if len(rows) < 4:
continue
series = [r["lev_funds_net"] for r in rows]
current = series[-1]
history = series[:-1]
pct = compute_percentile(current, history)
extremes.append({
"symbol": res["symbol"],
"report_type": "tff",
"key_role": "lev_funds",
"current_net": current,
"percentile": pct,
"signal": classify_extreme(pct),
"report_date": rows[-1]["report_date"],
})
for res in disagg_results:
if isinstance(res, BaseException) or not isinstance(res, dict):
continue
rows = res.get("rows") or []
if len(rows) < 4:
continue
series = [r["managed_money_net"] for r in rows]
current = series[-1]
history = series[:-1]
pct = compute_percentile(current, history)
extremes.append({
"symbol": res["symbol"],
"report_type": "disaggregated",
"key_role": "managed_money",
"current_net": current,
"percentile": pct,
"signal": classify_extreme(pct),
"report_date": rows[-1]["report_date"],
})
return {
"lookback_weeks": lookback_weeks,
"extremes": extremes,
"data_timestamp": datetime.now(UTC).isoformat(),
}
-203
View File
@@ -1,203 +0,0 @@
from __future__ import annotations
import os
from fastapi import Depends, FastAPI, HTTPException
from mcp_common.auth import Principal, TokenStore, require_principal
from mcp_common.mcp_bridge import mount_mcp_endpoint
from mcp_common.server import build_app
from pydantic import BaseModel, Field
from mcp_macro.fetchers import (
fetch_asset_price,
fetch_breakeven_inflation,
fetch_cot_disaggregated,
fetch_cot_extreme_positioning,
fetch_cot_tff,
fetch_economic_indicators,
fetch_equity_futures,
fetch_macro_calendar,
fetch_market_overview,
fetch_treasury_yields,
fetch_yield_curve_slope,
)
# --- Body models ---
class GetEconomicIndicatorsReq(BaseModel):
indicators: list[str] | None = None
class GetMacroCalendarReq(BaseModel):
days: int = 7
country_filter: list[str] | None = None
importance_min: str | None = None
start: str | None = None
end: str | None = None
class GetMarketOverviewReq(BaseModel):
pass
class GetAssetPriceReq(BaseModel):
ticker: str
class GetTreasuryYieldsReq(BaseModel):
pass
class GetEquityFuturesReq(BaseModel):
pass
class GetYieldCurveSlopeReq(BaseModel):
pass
class GetBreakevenInflationReq(BaseModel):
pass
class GetCotTffReq(BaseModel):
symbol: str
lookback_weeks: int = Field(default=52, ge=4, le=520)
class GetCotDisaggregatedReq(BaseModel):
symbol: str
lookback_weeks: int = Field(default=52, ge=4, le=520)
class GetCotExtremeReq(BaseModel):
lookback_weeks: int = Field(default=156, ge=4, le=520)
# --- ACL helper ---
def _check(principal: Principal, *, core: bool = False, observer: bool = False) -> None:
allowed: set[str] = set()
if core:
allowed.add("core")
if observer:
allowed.add("observer")
if not (principal.capabilities & allowed):
raise HTTPException(403, f"capability required: {allowed}")
# --- App factory ---
def create_app(*, fred_api_key: str = "", finnhub_api_key: str = "", token_store: TokenStore) -> FastAPI:
app = build_app(name="mcp-macro", version="0.1.0", token_store=token_store)
@app.post("/tools/get_economic_indicators", tags=["reads"])
async def t_get_economic_indicators(
body: GetEconomicIndicatorsReq, principal: Principal = Depends(require_principal)
):
_check(principal, core=True, observer=True)
return await fetch_economic_indicators(
fred_api_key=fred_api_key, indicators=body.indicators
)
@app.post("/tools/get_macro_calendar", tags=["reads"])
async def t_get_macro_calendar(
body: GetMacroCalendarReq, principal: Principal = Depends(require_principal)
):
_check(principal, core=True, observer=True)
return await fetch_macro_calendar(
finnhub_api_key=finnhub_api_key,
days_ahead=body.days,
country_filter=body.country_filter,
importance_min=body.importance_min,
start=body.start,
end=body.end,
)
@app.post("/tools/get_market_overview", tags=["reads"])
async def t_get_market_overview(
body: GetMarketOverviewReq, principal: Principal = Depends(require_principal)
):
_check(principal, core=True, observer=True)
return await fetch_market_overview()
@app.post("/tools/get_asset_price", tags=["reads"])
async def t_get_asset_price(
body: GetAssetPriceReq, principal: Principal = Depends(require_principal)
):
_check(principal, core=True, observer=True)
return await fetch_asset_price(body.ticker)
@app.post("/tools/get_treasury_yields", tags=["reads"])
async def t_get_treasury_yields(
body: GetTreasuryYieldsReq, principal: Principal = Depends(require_principal)
):
_check(principal, core=True, observer=True)
return await fetch_treasury_yields()
@app.post("/tools/get_equity_futures", tags=["reads"])
async def t_get_equity_futures(
body: GetEquityFuturesReq, principal: Principal = Depends(require_principal)
):
_check(principal, core=True, observer=True)
return await fetch_equity_futures()
@app.post("/tools/get_yield_curve_slope", tags=["reads"])
async def t_get_yield_curve_slope(
body: GetYieldCurveSlopeReq, principal: Principal = Depends(require_principal)
):
_check(principal, core=True, observer=True)
return await fetch_yield_curve_slope()
@app.post("/tools/get_breakeven_inflation", tags=["reads"])
async def t_get_breakeven_inflation(
body: GetBreakevenInflationReq, principal: Principal = Depends(require_principal)
):
_check(principal, core=True, observer=True)
return await fetch_breakeven_inflation(fred_api_key=fred_api_key)
@app.post("/tools/get_cot_tff", tags=["reads"])
async def t_get_cot_tff(
body: GetCotTffReq, principal: Principal = Depends(require_principal)
):
_check(principal, core=True, observer=True)
return await fetch_cot_tff(body.symbol, body.lookback_weeks)
@app.post("/tools/get_cot_disaggregated", tags=["reads"])
async def t_get_cot_disaggregated(
body: GetCotDisaggregatedReq, principal: Principal = Depends(require_principal)
):
_check(principal, core=True, observer=True)
return await fetch_cot_disaggregated(body.symbol, body.lookback_weeks)
@app.post("/tools/get_cot_extreme_positioning", tags=["reads"])
async def t_get_cot_extreme(
body: GetCotExtremeReq, principal: Principal = Depends(require_principal)
):
_check(principal, core=True, observer=True)
return await fetch_cot_extreme_positioning(body.lookback_weeks)
# ───── MCP endpoint (/mcp) — bridge verso /tools/* ─────
port = int(os.environ.get("PORT", "9013"))
mount_mcp_endpoint(
app,
name="cerbero-macro",
version="0.1.0",
token_store=token_store,
internal_base_url=f"http://localhost:{port}",
tools=[
{"name": "get_economic_indicators", "description": "FRED economic indicators (Fed rate, CPI, ecc)."},
{"name": "get_macro_calendar", "description": "Eventi macro con filtri country/importance/date range."},
{"name": "get_market_overview", "description": "Snapshot overview mercato macro."},
{"name": "get_asset_price", "description": "Prezzo cross-asset: WTI, DXY, SPX, VIX, yields, FX, ecc."},
{"name": "get_treasury_yields", "description": "Curva US Treasury 2y/5y/10y/30y + shape detection."},
{"name": "get_equity_futures", "description": "Futures ES/NQ/YM/RTY con session status."},
{"name": "get_yield_curve_slope", "description": "Slope 2y10y/5y30y + butterfly + regime (steep/normal/flat/inverted)."},
{"name": "get_breakeven_inflation", "description": "Breakeven inflation 5Y/10Y + 5y5y forward (FRED T5YIE/T10YIE/T5YIFR)."},
{"name": "get_cot_tff", "description": "COT TFF report (CFTC) per equity/financial: ES/NQ/RTY/ZN/ZB/6E/6J/DX. Roles: dealer, asset manager, leveraged funds, other."},
{"name": "get_cot_disaggregated", "description": "COT Disaggregated report (CFTC) per commodities: CL/GC/SI/HG/ZW/ZC/ZS. Roles: producer/merchant, swap dealer, managed money, other."},
{"name": "get_cot_extreme_positioning", "description": "Scanner posizionamento estremo (percentile ≤5 o ≥95) sui simboli watchlist."},
],
)
return app
-117
View File
@@ -1,117 +0,0 @@
from __future__ import annotations
from mcp_macro.cot import (
classify_extreme,
compute_percentile,
parse_disagg_row,
parse_tff_row,
)
def test_compute_percentile_basic():
history = [10, 20, 30, 40, 50, 60, 70, 80, 90, 100]
assert compute_percentile(50, history) == 50.0
assert compute_percentile(10, history) == 10.0
assert compute_percentile(100, history) == 100.0
def test_compute_percentile_value_below_min():
history = [10, 20, 30]
assert compute_percentile(5, history) == 0.0
def test_compute_percentile_value_above_max():
history = [10, 20, 30]
assert compute_percentile(40, history) == 100.0
def test_compute_percentile_empty_history():
assert compute_percentile(50, []) is None
def test_classify_extreme_below_threshold():
assert classify_extreme(3.0) == "extreme_short"
assert classify_extreme(5.0) == "extreme_short" # boundary inclusive
def test_classify_extreme_above_threshold():
assert classify_extreme(96.0) == "extreme_long"
assert classify_extreme(95.0) == "extreme_long" # boundary inclusive
def test_classify_extreme_neutral():
assert classify_extreme(50.0) == "neutral"
assert classify_extreme(94.99) == "neutral"
assert classify_extreme(5.01) == "neutral"
def test_classify_extreme_none_input():
assert classify_extreme(None) == "neutral"
# Payload Socrata reale (subset campi rilevanti, valori arbitrari per test)
TFF_SOCRATA_ROW = {
"report_date_as_yyyy_mm_dd": "2026-04-22T00:00:00.000",
"dealer_positions_long_all": "12345",
"dealer_positions_short_all": "23456",
"asset_mgr_positions_long": "654321",
"asset_mgr_positions_short": "200000",
"lev_money_positions_long": "100000",
"lev_money_positions_short": "350000",
"other_rept_positions_long": "50000",
"other_rept_positions_short": "50000",
"open_interest_all": "2500000",
}
DISAGG_SOCRATA_ROW = {
"report_date_as_yyyy_mm_dd": "2026-04-22T00:00:00.000",
"prod_merc_positions_long_all": "100000",
"prod_merc_positions_short_all": "300000",
"swap_positions_long_all": "50000",
"swap_positions_short_all": "60000",
"m_money_positions_long_all": "200000",
"m_money_positions_short_all": "80000",
"other_rept_positions_long_all": "10000",
"other_rept_positions_short_all": "10000",
"open_interest_all": "1500000",
}
def test_parse_tff_row_extracts_all_fields():
row = parse_tff_row(TFF_SOCRATA_ROW)
assert row["report_date"] == "2026-04-22"
assert row["dealer_long"] == 12345
assert row["dealer_short"] == 23456
assert row["dealer_net"] == 12345 - 23456
assert row["asset_mgr_long"] == 654321
assert row["asset_mgr_net"] == 654321 - 200000
assert row["lev_funds_long"] == 100000
assert row["lev_funds_short"] == 350000
assert row["lev_funds_net"] == 100000 - 350000
assert row["other_long"] == 50000
assert row["other_net"] == 0
assert row["open_interest"] == 2500000
def test_parse_tff_row_handles_missing_field():
payload = {"report_date_as_yyyy_mm_dd": "2026-04-22T00:00:00.000"}
row = parse_tff_row(payload)
assert row["report_date"] == "2026-04-22"
assert row["dealer_long"] == 0
assert row["dealer_net"] == 0
def test_parse_disagg_row_extracts_all_fields():
row = parse_disagg_row(DISAGG_SOCRATA_ROW)
assert row["report_date"] == "2026-04-22"
assert row["producer_long"] == 100000
assert row["producer_short"] == 300000
assert row["producer_net"] == -200000
assert row["swap_long"] == 50000
assert row["swap_net"] == -10000
assert row["managed_money_long"] == 200000
assert row["managed_money_short"] == 80000
assert row["managed_money_net"] == 120000
assert row["other_long"] == 10000
assert row["other_net"] == 0
assert row["open_interest"] == 1500000
-402
View File
@@ -1,402 +0,0 @@
from __future__ import annotations
from datetime import UTC
import httpx
import pytest
import pytest_httpx
from mcp_macro.fetchers import (
fetch_breakeven_inflation,
fetch_economic_indicators,
fetch_macro_calendar,
fetch_market_overview,
yield_curve_metrics,
)
# --- fetch_economic_indicators ---
@pytest.mark.asyncio
async def test_economic_indicators_no_key():
result = await fetch_economic_indicators(fred_api_key="")
assert "error" in result
assert result["error"] == "No FRED API key configured"
@pytest.mark.asyncio
async def test_economic_indicators_happy_path(httpx_mock: pytest_httpx.HTTPXMock):
for series_id in ("FEDFUNDS", "CPIAUCSL", "UNRATE", "DGS10"):
httpx_mock.add_response(
url=httpx.URL(
"https://api.stlouisfed.org/fred/series/observations",
params={
"series_id": series_id,
"api_key": "testkey",
"file_type": "json",
"sort_order": "desc",
"limit": "1",
},
),
json={"observations": [{"value": "5.25"}]},
)
result = await fetch_economic_indicators(fred_api_key="testkey")
assert result["fed_rate"] == 5.25
assert result["cpi"] == 5.25
assert result["unemployment"] == 5.25
assert result["us10y_yield"] == 5.25
assert "updated_at" in result
@pytest.mark.asyncio
async def test_economic_indicators_filter(httpx_mock: pytest_httpx.HTTPXMock):
httpx_mock.add_response(
url=httpx.URL(
"https://api.stlouisfed.org/fred/series/observations",
params={
"series_id": "FEDFUNDS",
"api_key": "k",
"file_type": "json",
"sort_order": "desc",
"limit": "1",
},
),
json={"observations": [{"value": "5.33"}]},
)
result = await fetch_economic_indicators(fred_api_key="k", indicators=["fed_rate"])
assert "fed_rate" in result
assert "cpi" not in result
# --- fetch_macro_calendar ---
@pytest.mark.asyncio
async def test_macro_calendar_forex_factory_happy(httpx_mock: pytest_httpx.HTTPXMock):
from datetime import datetime, timedelta
future = (datetime.now(UTC) + timedelta(days=1)).isoformat()
httpx_mock.add_response(
url="https://nfs.faireconomy.media/ff_calendar_thisweek.json",
json=[
{
"date": future,
"title": "CPI",
"country": "US",
"impact": "High",
"forecast": "3.0%",
"previous": "3.2%",
}
],
)
result = await fetch_macro_calendar()
assert "events" in result
assert len(result["events"]) >= 1
assert result["events"][0]["name"] == "CPI"
@pytest.mark.asyncio
async def test_macro_calendar_no_source(httpx_mock: pytest_httpx.HTTPXMock):
httpx_mock.add_response(
url="https://nfs.faireconomy.media/ff_calendar_thisweek.json",
status_code=500,
)
result = await fetch_macro_calendar(finnhub_api_key="")
assert result == {"events": [], "note": "No calendar source available"}
@pytest.mark.asyncio
@pytest.mark.httpx_mock(assert_all_responses_were_requested=False, assert_all_requests_were_expected=False)
async def test_macro_calendar_finnhub_fallback(httpx_mock: pytest_httpx.HTTPXMock):
httpx_mock.add_response(
url="https://nfs.faireconomy.media/ff_calendar_thisweek.json",
status_code=500,
)
def dispatch(request: httpx.Request) -> httpx.Response:
if "finnhub.io" in str(request.url):
return httpx.Response(
200,
json=[{"date": "2024-01-15", "event": "FOMC", "importance": "high", "forecast": "", "prev": ""}],
)
return httpx.Response(500)
httpx_mock.add_callback(dispatch)
result = await fetch_macro_calendar(finnhub_api_key="fkey")
assert "events" in result
assert result["events"][0]["name"] == "FOMC"
# --- fetch_market_overview ---
@pytest.mark.asyncio
async def test_market_overview_happy(httpx_mock: pytest_httpx.HTTPXMock):
httpx_mock.add_response(
url="https://api.coingecko.com/api/v3/global",
json={
"data": {
"market_cap_percentage": {"btc": 52.3},
"total_market_cap": {"usd": 2_000_000_000_000},
}
},
)
httpx_mock.add_response(
url=httpx.URL(
"https://api.coingecko.com/api/v3/simple/price",
params={"ids": "bitcoin,ethereum", "vs_currencies": "usd"},
),
json={"bitcoin": {"usd": 65000}, "ethereum": {"usd": 3500}},
)
import re as _re
httpx_mock.add_response(
url=_re.compile(
r"https://www\.deribit\.com/api/v2/public/get_volatility_index_data\?currency=BTC.*"
),
json={"result": {"data": [[1, 50, 52, 49, 51.5]], "continuation": None}},
)
httpx_mock.add_response(
url=_re.compile(
r"https://www\.deribit\.com/api/v2/public/get_volatility_index_data\?currency=ETH.*"
),
json={"result": {"data": [[1, 60, 62, 59, 61.2]], "continuation": None}},
)
import re as _re
httpx_mock.add_response(
url=_re.compile(r"https://query1\.finance\.yahoo\.com/v8/finance/chart/\^GSPC.*"),
json={"chart": {"result": [{"meta": {"regularMarketPrice": 5830.12}}]}},
)
httpx_mock.add_response(
url=_re.compile(r"https://query1\.finance\.yahoo\.com/v8/finance/chart/GC[%=].*"),
json={"chart": {"result": [{"meta": {"regularMarketPrice": 2412.5}}]}},
)
httpx_mock.add_response(
url=_re.compile(r"https://query1\.finance\.yahoo\.com/v8/finance/chart/\^VIX.*"),
json={"chart": {"result": [{"meta": {"regularMarketPrice": 18.3}}]}},
)
# Clear module cache to force fresh fetch
from mcp_macro import fetchers as _f
_f._MARKET_CACHE["data"] = None
_f._MARKET_CACHE["ts"] = 0.0
result = await fetch_market_overview()
assert result["btc_dominance"] == 52.3
assert result["btc_price"] == 65000
assert result["eth_price"] == 3500
assert result["total_market_cap"] == 2_000_000_000_000
assert result["dvol_btc"] == 51.5
assert result["dvol_eth"] == 61.2
assert result["sp500"] == 5830.12
assert result["gold"] == 2412.5
assert result["vix"] == 18.3
assert "data_timestamp" in result
# --- yield_curve_metrics ---
def test_yield_curve_metrics_normal_curve():
out = yield_curve_metrics({"us2y": 4.0, "us5y": 4.2, "us10y": 4.5, "us30y": 4.8})
assert out["slope_2y10y"] == 0.5
assert out["slope_5y30y"] == 0.6
assert out["regime"] == "steep"
# butterfly: 2*4.5 - 4.0 - 4.8 = 0.2
assert out["butterfly_2_10_30"] == 0.2
def test_yield_curve_metrics_inverted():
out = yield_curve_metrics({"us2y": 5.5, "us5y": 5.0, "us10y": 4.5, "us30y": 4.3})
assert out["slope_2y10y"] == -1.0
assert out["regime"] == "inverted"
def test_yield_curve_metrics_partial_data():
out = yield_curve_metrics({"us10y": 4.5})
assert out["slope_2y10y"] is None
assert out["regime"] == "unknown"
# --- fetch_breakeven_inflation ---
@pytest.mark.asyncio
async def test_breakeven_no_key():
out = await fetch_breakeven_inflation(fred_api_key="")
assert "error" in out
@pytest.mark.asyncio
async def test_breakeven_happy_path(httpx_mock: pytest_httpx.HTTPXMock):
for series_id, val in [("T5YIE", "2.3"), ("T10YIE", "2.5"), ("T5YIFR", "2.7")]:
httpx_mock.add_response(
url=httpx.URL(
"https://api.stlouisfed.org/fred/series/observations",
params={
"series_id": series_id,
"api_key": "k",
"file_type": "json",
"sort_order": "desc",
"limit": "1",
},
),
json={"observations": [{"value": val}]},
)
out = await fetch_breakeven_inflation(fred_api_key="k")
assert out["breakevens"]["be_5y"] == 2.3
assert out["breakevens"]["be_10y"] == 2.5
assert out["breakevens"]["be_5y5y_forward"] == 2.7
assert out["interpretation"] == "anchored"
@pytest.mark.asyncio
async def test_breakeven_high_inflation(httpx_mock: pytest_httpx.HTTPXMock):
for series_id in ("T5YIE", "T10YIE", "T5YIFR"):
httpx_mock.add_response(
url=httpx.URL(
"https://api.stlouisfed.org/fred/series/observations",
params={
"series_id": series_id,
"api_key": "k",
"file_type": "json",
"sort_order": "desc",
"limit": "1",
},
),
json={"observations": [{"value": "3.5"}]},
)
out = await fetch_breakeven_inflation(fred_api_key="k")
assert out["interpretation"] == "high_inflation_expected"
@pytest.mark.asyncio
async def test_fetch_cot_tff_happy_path(httpx_mock: pytest_httpx.HTTPXMock):
from mcp_macro.fetchers import fetch_cot_tff
httpx_mock.add_response(
url=httpx.URL(
"https://publicreporting.cftc.gov/resource/gpe5-46if.json",
params={
"cftc_contract_market_code": "13874A",
"$order": "report_date_as_yyyy_mm_dd DESC",
"$limit": "52",
},
),
json=[
{
"report_date_as_yyyy_mm_dd": "2026-04-22T00:00:00.000",
"dealer_positions_long_all": "12345",
"dealer_positions_short_all": "23456",
"asset_mgr_positions_long": "654321",
"asset_mgr_positions_short": "200000",
"lev_money_positions_long": "100000",
"lev_money_positions_short": "350000",
"other_rept_positions_long": "50000",
"other_rept_positions_short": "50000",
"open_interest_all": "2500000",
},
{
"report_date_as_yyyy_mm_dd": "2026-04-15T00:00:00.000",
"dealer_positions_long_all": "11000",
"dealer_positions_short_all": "22000",
"asset_mgr_positions_long": "640000",
"asset_mgr_positions_short": "210000",
"lev_money_positions_long": "110000",
"lev_money_positions_short": "320000",
"other_rept_positions_long": "48000",
"other_rept_positions_short": "52000",
"open_interest_all": "2480000",
},
],
)
out = await fetch_cot_tff("ES", lookback_weeks=52)
assert out["symbol"] == "ES"
assert out["report_type"] == "tff"
assert len(out["rows"]) == 2
# Ordering ASC by date (oldest first)
assert out["rows"][0]["report_date"] == "2026-04-15"
assert out["rows"][1]["report_date"] == "2026-04-22"
assert out["rows"][1]["lev_funds_net"] == -250000
assert "data_timestamp" in out
@pytest.mark.asyncio
async def test_fetch_cot_tff_unknown_symbol():
from mcp_macro.fetchers import fetch_cot_tff
out = await fetch_cot_tff("INVALID", lookback_weeks=52)
assert out.get("error") == "unknown_symbol"
assert "ES" in out.get("available", [])
@pytest.mark.asyncio
async def test_fetch_cot_disagg_happy_path(httpx_mock: pytest_httpx.HTTPXMock):
from mcp_macro.fetchers import fetch_cot_disaggregated
httpx_mock.add_response(
url=httpx.URL(
"https://publicreporting.cftc.gov/resource/72hh-3qpy.json",
params={
"cftc_contract_market_code": "067651",
"$order": "report_date_as_yyyy_mm_dd DESC",
"$limit": "52",
},
),
json=[
{
"report_date_as_yyyy_mm_dd": "2026-04-22T00:00:00.000",
"prod_merc_positions_long_all": "100000",
"prod_merc_positions_short_all": "300000",
"swap_positions_long_all": "50000",
"swap_positions_short_all": "60000",
"m_money_positions_long_all": "200000",
"m_money_positions_short_all": "80000",
"other_rept_positions_long_all": "10000",
"other_rept_positions_short_all": "10000",
"open_interest_all": "1500000",
},
],
)
out = await fetch_cot_disaggregated("CL", lookback_weeks=52)
assert out["symbol"] == "CL"
assert out["report_type"] == "disaggregated"
assert len(out["rows"]) == 1
assert out["rows"][0]["managed_money_net"] == 120000
assert out["rows"][0]["producer_net"] == -200000
@pytest.mark.asyncio
async def test_fetch_cot_disagg_unknown_symbol():
from mcp_macro.fetchers import fetch_cot_disaggregated
out = await fetch_cot_disaggregated("XYZ", lookback_weeks=52)
assert out.get("error") == "unknown_symbol"
assert "CL" in out.get("available", [])
@pytest.mark.asyncio
async def test_fetch_cot_extreme_positioning_flags_outliers(monkeypatch):
"""Mock fetch_cot_tff e fetch_cot_disagg per simulare history e ultimo punto."""
from unittest.mock import AsyncMock
from mcp_macro import fetchers as f
# Simula una serie ES dove ultimo lev_funds_net è in basso (extreme_short)
es_rows = [
{"report_date": f"2026-{m:02d}-01", "lev_funds_net": v}
for m, v in [(1, 0), (2, 50), (3, 100), (4, -500)]
]
cl_rows = [
{"report_date": f"2026-{m:02d}-01", "managed_money_net": v}
for m, v in [(1, 100), (2, 200), (3, 300), (4, 1000)]
]
async def fake_tff(symbol, lookback_weeks):
if symbol == "ES":
return {"symbol": "ES", "report_type": "tff", "rows": es_rows}
return {"symbol": symbol, "report_type": "tff", "rows": []}
async def fake_disagg(symbol, lookback_weeks):
if symbol == "CL":
return {"symbol": "CL", "report_type": "disaggregated", "rows": cl_rows}
return {"symbol": symbol, "report_type": "disaggregated", "rows": []}
monkeypatch.setattr(f, "fetch_cot_tff", AsyncMock(side_effect=fake_tff))
monkeypatch.setattr(f, "fetch_cot_disaggregated", AsyncMock(side_effect=fake_disagg))
out = await f.fetch_cot_extreme_positioning(lookback_weeks=4)
assert "extremes" in out
by_sym = {e["symbol"]: e for e in out["extremes"]}
assert by_sym["ES"]["signal"] == "extreme_short"
assert by_sym["ES"]["key_role"] == "lev_funds"
assert by_sym["CL"]["signal"] == "extreme_long"
assert by_sym["CL"]["key_role"] == "managed_money"
-202
View File
@@ -1,202 +0,0 @@
from __future__ import annotations
from unittest.mock import AsyncMock, patch
import pytest
from fastapi.testclient import TestClient
from mcp_common.auth import Principal, TokenStore
from mcp_macro.server import create_app
@pytest.fixture
def http():
store = TokenStore(
tokens={
"ct": Principal("core", {"core"}),
"ot": Principal("observer", {"observer"}),
}
)
app = create_app(fred_api_key="testfred", finnhub_api_key="testfinn", token_store=store)
return TestClient(app)
# --- Health ---
def test_health(http):
assert http.get("/health").status_code == 200
# --- get_economic_indicators ---
def test_get_economic_indicators_core_ok(http):
with patch(
"mcp_macro.server.fetch_economic_indicators",
new=AsyncMock(return_value={"fed_rate": 5.25, "updated_at": "2024-01-01T00:00:00+00:00"}),
):
r = http.post(
"/tools/get_economic_indicators",
headers={"Authorization": "Bearer ct"},
json={},
)
assert r.status_code == 200
assert r.json()["fed_rate"] == 5.25
def test_get_economic_indicators_observer_ok(http):
with patch(
"mcp_macro.server.fetch_economic_indicators",
new=AsyncMock(return_value={"fed_rate": 5.25}),
):
r = http.post(
"/tools/get_economic_indicators",
headers={"Authorization": "Bearer ot"},
json={},
)
assert r.status_code == 200
def test_get_economic_indicators_no_auth_401(http):
r = http.post("/tools/get_economic_indicators", json={})
assert r.status_code == 401
# --- get_macro_calendar ---
def test_get_macro_calendar_core_ok(http):
with patch(
"mcp_macro.server.fetch_macro_calendar",
new=AsyncMock(return_value={"events": []}),
):
r = http.post(
"/tools/get_macro_calendar",
headers={"Authorization": "Bearer ct"},
json={"days": 7},
)
assert r.status_code == 200
def test_get_macro_calendar_observer_ok(http):
with patch(
"mcp_macro.server.fetch_macro_calendar",
new=AsyncMock(return_value={"events": []}),
):
r = http.post(
"/tools/get_macro_calendar",
headers={"Authorization": "Bearer ot"},
json={},
)
assert r.status_code == 200
def test_get_macro_calendar_no_auth_401(http):
r = http.post("/tools/get_macro_calendar", json={})
assert r.status_code == 401
# --- get_market_overview ---
def test_get_market_overview_core_ok(http):
with patch(
"mcp_macro.server.fetch_market_overview",
new=AsyncMock(return_value={"btc_dominance": 52.0, "btc_price": 65000}),
):
r = http.post(
"/tools/get_market_overview",
headers={"Authorization": "Bearer ct"},
json={},
)
assert r.status_code == 200
assert r.json()["btc_price"] == 65000
def test_get_market_overview_observer_ok(http):
with patch(
"mcp_macro.server.fetch_market_overview",
new=AsyncMock(return_value={"btc_dominance": 52.0}),
):
r = http.post(
"/tools/get_market_overview",
headers={"Authorization": "Bearer ot"},
json={},
)
assert r.status_code == 200
def test_get_market_overview_no_auth_401(http):
r = http.post("/tools/get_market_overview", json={})
assert r.status_code == 401
def test_get_cot_tff_core_ok(http):
with patch(
"mcp_macro.server.fetch_cot_tff",
new=AsyncMock(return_value={"symbol": "ES", "rows": []}),
):
r = http.post(
"/tools/get_cot_tff",
headers={"Authorization": "Bearer ct"},
json={"symbol": "ES"},
)
assert r.status_code == 200
assert r.json()["symbol"] == "ES"
def test_get_cot_tff_observer_ok(http):
with patch(
"mcp_macro.server.fetch_cot_tff",
new=AsyncMock(return_value={"symbol": "ES", "rows": []}),
):
r = http.post(
"/tools/get_cot_tff",
headers={"Authorization": "Bearer ot"},
json={"symbol": "ES"},
)
assert r.status_code == 200
def test_get_cot_tff_no_auth_401(http):
r = http.post("/tools/get_cot_tff", json={"symbol": "ES"})
assert r.status_code == 401
def test_get_cot_disagg_observer_ok(http):
with patch(
"mcp_macro.server.fetch_cot_disaggregated",
new=AsyncMock(return_value={"symbol": "CL", "rows": []}),
):
r = http.post(
"/tools/get_cot_disaggregated",
headers={"Authorization": "Bearer ot"},
json={"symbol": "CL"},
)
assert r.status_code == 200
def test_get_cot_disagg_no_auth_401(http):
r = http.post("/tools/get_cot_disaggregated", json={"symbol": "CL"})
assert r.status_code == 401
def test_get_cot_extreme_positioning_ok(http):
with patch(
"mcp_macro.server.fetch_cot_extreme_positioning",
new=AsyncMock(return_value={"extremes": []}),
):
r = http.post(
"/tools/get_cot_extreme_positioning",
headers={"Authorization": "Bearer ot"},
json={},
)
assert r.status_code == 200
def test_get_cot_extreme_positioning_lookback_too_short(http):
"""Pydantic validation: lookback_weeks < 4 → 422."""
r = http.post(
"/tools/get_cot_extreme_positioning",
headers={"Authorization": "Bearer ct"},
json={"lookback_weeks": 2},
)
assert r.status_code == 422
-27
View File
@@ -1,27 +0,0 @@
[project]
name = "mcp-sentiment"
version = "0.1.0"
requires-python = ">=3.11"
dependencies = [
"mcp-common",
"fastapi>=0.115",
"uvicorn[standard]>=0.30",
"httpx>=0.27",
"pydantic>=2.6",
]
[project.optional-dependencies]
dev = ["pytest>=8", "pytest-asyncio>=0.23", "pytest-httpx>=0.30"]
[build-system]
requires = ["hatchling"]
build-backend = "hatchling.build"
[tool.hatch.build.targets.wheel]
packages = ["src/mcp_sentiment"]
[tool.uv.sources]
mcp-common = { workspace = true }
[project.scripts]
mcp-sentiment = "mcp_sentiment.__main__:main"
@@ -1,46 +0,0 @@
from __future__ import annotations
import json
import os
import uvicorn
from mcp_common.auth import load_token_store_from_files
from mcp_common.logging import configure_root_logging
from mcp_sentiment.server import create_app
def _load_cryptopanic_key() -> str:
"""CER-002: preferisci file secret, fallback a env CRYPTOPANIC_API_KEY."""
creds_file = os.environ.get("SENTIMENT_CREDENTIALS_FILE")
if creds_file and os.path.exists(creds_file):
try:
with open(creds_file) as f:
creds = json.load(f)
key = (creds.get("cryptopanic_key") or "").strip()
if key and key.lower() not in ("placeholder", "changeme", "none"):
return key
except (OSError, json.JSONDecodeError):
pass
return (os.environ.get("CRYPTOPANIC_API_KEY") or "").strip()
configure_root_logging() # CER-P5-009
def main():
key = _load_cryptopanic_key()
token_store = load_token_store_from_files(
core_token_file=os.environ.get("CORE_TOKEN_FILE"),
observer_token_file=os.environ.get("OBSERVER_TOKEN_FILE"),
)
app = create_app(cryptopanic_key=key, token_store=token_store)
uvicorn.run(
app,
log_config=None, # CER-P5-009: delega al root JSON logger
host=os.environ.get("HOST", "0.0.0.0"),
port=int(os.environ.get("PORT", "9014")),
)
if __name__ == "__main__":
main()
@@ -1,656 +0,0 @@
from __future__ import annotations
import os
import re
import xml.etree.ElementTree as ET
from typing import Any
from mcp_common.http import async_client
CRYPTOPANIC_URL = "https://cryptopanic.com/api/v1/posts/"
ALTERNATIVE_ME_URL = "https://api.alternative.me/fng/"
COINDESK_RSS = "https://www.coindesk.com/arc/outboundfeeds/rss/"
LUNARCRUSH_COIN_URL = "https://lunarcrush.com/api4/public/coins/{symbol}/v1"
CRYPTOCOMPARE_NEWS_URL = "https://min-api.cryptocompare.com/data/v2/news/"
MESSARI_NEWS_URL = "https://data.messari.io/api/v1/news"
async def _fetch_coindesk_headlines(limit: int = 20) -> list[dict[str, Any]]:
items: list[dict[str, Any]] = []
try:
async with async_client(timeout=10.0, follow_redirects=True) as client:
resp = await client.get(COINDESK_RSS)
if resp.status_code != 200:
return items
root = ET.fromstring(resp.text)
for node in root.findall(".//item")[:limit]:
items.append(
{
"title": node.findtext("title", ""),
"source": "CoinDesk",
"published_at": node.findtext("pubDate", ""),
"url": node.findtext("link", ""),
}
)
except Exception:
pass
return items
WORLD_NEWS_FEEDS = [
("Reuters Business", "https://feeds.reuters.com/reuters/businessNews"),
("CNBC Top News", "https://search.cnbc.com/rs/search/combinedcms/view.xml?partnerId=wrss01&id=100003114"),
("Bloomberg Markets", "https://feeds.bloomberg.com/markets/news.rss"),
("CoinDesk", "https://www.coindesk.com/arc/outboundfeeds/rss/"),
]
# Public funding rate endpoints (no auth required)
BINANCE_FUNDING_URL = "https://fapi.binance.com/fapi/v1/premiumIndex"
BYBIT_FUNDING_URL = "https://api.bybit.com/v5/market/tickers"
OKX_FUNDING_URL = "https://www.okx.com/api/v5/public/funding-rate"
BINANCE_OI_HIST_URL = "https://fapi.binance.com/futures/data/openInterestHist"
async def _fetch_cryptocompare_news(limit: int = 20) -> list[dict[str, Any]]:
"""CER-017: CryptoCompare news free (no key needed)."""
items: list[dict[str, Any]] = []
try:
async with async_client(timeout=10.0) as client:
resp = await client.get(CRYPTOCOMPARE_NEWS_URL, params={"lang": "EN"})
if resp.status_code != 200:
return items
data = resp.json()
for r in (data.get("Data") or [])[:limit]:
ts = r.get("published_on")
try:
import datetime as _dt
pub = _dt.datetime.fromtimestamp(int(ts), _dt.UTC).isoformat() if ts else ""
except (TypeError, ValueError):
pub = ""
items.append({
"title": r.get("title", ""),
"source": r.get("source", "CryptoCompare"),
"published_at": pub,
"url": r.get("url", ""),
"provider": "cryptocompare",
})
except Exception:
pass
return items
async def _fetch_messari_news(limit: int = 20) -> list[dict[str, Any]]:
"""CER-017: Messari news free (no key needed for basic feed)."""
items: list[dict[str, Any]] = []
try:
async with async_client(timeout=10.0) as client:
resp = await client.get(MESSARI_NEWS_URL)
if resp.status_code != 200:
return items
data = resp.json()
for r in (data.get("data") or [])[:limit]:
items.append({
"title": r.get("title", ""),
"source": (r.get("author") or {}).get("name") or "Messari",
"published_at": r.get("published_at", ""),
"url": r.get("url", ""),
"provider": "messari",
})
except Exception:
pass
return items
def _normalize_title(t: str) -> str:
"""Lowercase + strip non-alnum per dedup tra provider."""
return "".join(ch for ch in t.lower() if ch.isalnum() or ch.isspace()).strip()
async def fetch_crypto_news(api_key: str = "", limit: int = 20) -> dict[str, Any]:
"""CER-017: multi-source aggregator (CoinDesk + CryptoCompare + Messari) + dedup.
Se `api_key` Cryptopanic presente, include anche quella come 4° source.
"""
import asyncio
# CoinDesk + CryptoCompare + Messari sempre (free, no key)
tasks = [
_fetch_coindesk_headlines(limit),
_fetch_cryptocompare_news(limit),
_fetch_messari_news(limit),
]
include_cp = bool(api_key) and api_key.lower() not in ("placeholder", "none", "changeme")
if include_cp:
tasks.append(_fetch_cryptopanic_news(api_key, limit))
results = await asyncio.gather(*tasks, return_exceptions=True)
all_items: list[dict[str, Any]] = []
providers_ok: list[str] = []
providers_failed: list[str] = []
provider_names = ["coindesk", "cryptocompare", "messari"]
if include_cp:
provider_names.append("cryptopanic")
for name, res in zip(provider_names, results, strict=True):
if isinstance(res, Exception) or not res:
providers_failed.append(name)
continue
providers_ok.append(name)
for item in res:
if "provider" not in item:
item["provider"] = name
all_items.append(item)
# Dedup per normalized title — preserva primo match
seen: set[str] = set()
deduped: list[dict[str, Any]] = []
for h in all_items:
key = _normalize_title(h.get("title", ""))
if not key or key in seen:
continue
seen.add(key)
deduped.append(h)
# Sort per published_at DESC (stringhe ISO confrontabili; quelle vuote in fondo)
deduped.sort(key=lambda x: x.get("published_at") or "", reverse=True)
return {
"headlines": deduped[:limit],
"sources": providers_ok,
"sources_failed": providers_failed,
"total_before_dedup": len(all_items),
"total_after_dedup": len(deduped),
}
async def _fetch_cryptopanic_news(api_key: str, limit: int) -> list[dict[str, Any]]:
"""Cryptopanic as one of the sources. Failure → []."""
try:
async with async_client(timeout=10.0) as client:
resp = await client.get(
CRYPTOPANIC_URL,
params={"auth_token": api_key, "public": "true"},
)
if resp.status_code >= 400:
return []
data = resp.json()
except Exception:
return []
return [
{
"title": r.get("title", ""),
"source": (r.get("source") or {}).get("title", "Cryptopanic"),
"published_at": r.get("published_at", ""),
"url": r.get("url", ""),
"provider": "cryptopanic",
}
for r in (data.get("results") or [])[:limit]
]
async def _fetch_lunarcrush(symbol: str, api_key: str) -> dict | None:
"""CER-P2-005: LunarCrush v4 social metrics. Ritorna None se fail."""
try:
async with async_client(timeout=10.0) as client:
resp = await client.get(
LUNARCRUSH_COIN_URL.format(symbol=symbol.upper()),
headers={"Authorization": f"Bearer {api_key}"},
)
if resp.status_code != 200:
return None
data = (resp.json() or {}).get("data") or {}
return {
"galaxy_score": data.get("galaxy_score"),
"alt_rank": data.get("alt_rank"),
"sentiment": data.get("sentiment"), # 0-100 scale
"social_volume_24h": data.get("social_volume_24h"),
"social_dominance": data.get("social_dominance"),
}
except Exception:
return None
def _fng_to_sentiment(value: int) -> float:
"""Normalize fear&greed 0-100 to [-1, 1] proxy sentiment."""
return round((value - 50) / 50.0, 3)
async def fetch_social_sentiment(symbol: str = "BTC") -> dict[str, Any]:
"""CER-P2-005: provider chain LunarCrush + fear_greed proxy.
Se LUNARCRUSH_API_KEY env è presente e risponde, usa valori reali.
Altrimenti deriva proxy da fear_greed per popolare twitter/reddit sentiment
(marcato come derived=True così l'agent sa che è proxy).
"""
async with async_client(timeout=10.0) as client:
fng_resp = await client.get(ALTERNATIVE_ME_URL, params={"limit": 1})
fng_data = fng_resp.json()
fng_list = fng_data.get("data", [])
fng = fng_list[0] if fng_list else {}
fng_value = int(fng.get("value", 0))
proxy = _fng_to_sentiment(fng_value) if fng_value else 0.0
result: dict[str, Any] = {
"fear_greed_index": fng_value,
"fear_greed_label": fng.get("value_classification", ""),
"symbol": symbol.upper(),
"social_volume": 0,
"twitter_sentiment": 0.0,
"reddit_sentiment": 0.0,
"source": "fear_greed_only",
"derived": True,
}
lc_key = os.environ.get("LUNARCRUSH_API_KEY", "").strip()
if lc_key:
lc = await _fetch_lunarcrush(symbol, lc_key)
if lc is not None:
# LunarCrush sentiment 0-100 → normalize to [-1, 1]
lc_sent = lc.get("sentiment")
norm = round((float(lc_sent) - 50) / 50.0, 3) if lc_sent is not None else None
result.update({
"twitter_sentiment": norm if norm is not None else proxy,
"reddit_sentiment": norm if norm is not None else proxy,
"social_volume": int(lc.get("social_volume_24h") or 0),
"galaxy_score": lc.get("galaxy_score"),
"alt_rank": lc.get("alt_rank"),
"social_dominance": lc.get("social_dominance"),
"source": "lunarcrush+fear_greed",
"derived": False,
})
return result
# Proxy-only path
result["twitter_sentiment"] = proxy
result["reddit_sentiment"] = proxy
result["note"] = (
"twitter/reddit derived from fear_greed_index; configure LUNARCRUSH_API_KEY "
"for real social metrics"
)
return result
async def fetch_funding_rates(asset: str = "BTC") -> dict[str, Any]:
"""Fetch perpetual funding rates from Binance, Bybit and OKX public APIs."""
asset = asset.upper()
usdt_symbol = f"{asset}USDT"
okx_inst = f"{asset}-USDT-SWAP"
rates: list[dict[str, Any]] = []
async with async_client(timeout=10.0) as client:
# Binance
try:
resp = await client.get(BINANCE_FUNDING_URL, params={"symbol": usdt_symbol})
if resp.status_code == 200:
d = resp.json()
rates.append(
{
"exchange": "binance",
"asset": asset,
"rate": float(d.get("lastFundingRate", 0)),
"next_funding_time": d.get("nextFundingTime", ""),
}
)
except Exception:
pass
# Bybit
try:
resp = await client.get(
BYBIT_FUNDING_URL,
params={"category": "linear", "symbol": usdt_symbol},
)
if resp.status_code == 200:
items = resp.json().get("result", {}).get("list", [])
if items:
d = items[0]
rates.append(
{
"exchange": "bybit",
"asset": asset,
"rate": float(d.get("fundingRate", 0)),
"next_funding_time": d.get("nextFundingTime", ""),
}
)
except Exception:
pass
# OKX
try:
resp = await client.get(OKX_FUNDING_URL, params={"instId": okx_inst})
if resp.status_code == 200:
items = resp.json().get("data", [])
if items:
d = items[0]
rates.append(
{
"exchange": "okx",
"asset": asset,
"rate": float(d.get("fundingRate", 0)),
"next_funding_time": d.get("nextFundingTime", ""),
}
)
except Exception:
pass
return {"asset": asset, "rates": rates}
async def fetch_cross_exchange_funding(assets: list[str] | None = None) -> dict[str, Any]:
"""Snapshot multi-asset funding rates con spread e arbitrage detection."""
from datetime import UTC
from datetime import datetime as _dt
assets = [a.upper() for a in (assets or ["BTC", "ETH", "SOL"])]
snapshot: dict[str, dict[str, Any]] = {}
async with async_client(timeout=10.0) as client:
for asset in assets:
rates: dict[str, float | None] = {
"binance": None,
"bybit": None,
"okx": None,
"hyperliquid": None,
}
try:
resp = await client.get(
BINANCE_FUNDING_URL, params={"symbol": f"{asset}USDT"}
)
if resp.status_code == 200:
rates["binance"] = float(resp.json().get("lastFundingRate", 0))
except Exception:
pass
try:
resp = await client.get(
BYBIT_FUNDING_URL,
params={"category": "linear", "symbol": f"{asset}USDT"},
)
if resp.status_code == 200:
items = resp.json().get("result", {}).get("list", [])
if items:
rates["bybit"] = float(items[0].get("fundingRate", 0))
except Exception:
pass
try:
resp = await client.get(
OKX_FUNDING_URL, params={"instId": f"{asset}-USDT-SWAP"}
)
if resp.status_code == 200:
items = resp.json().get("data", [])
if items:
rates["okx"] = float(items[0].get("fundingRate", 0))
except Exception:
pass
try:
resp = await client.post(
"https://api.hyperliquid.xyz/info",
json={"type": "metaAndAssetCtxs"},
)
if resp.status_code == 200:
data = resp.json()
universe = data[0].get("universe") or []
ctx_list = data[1] if len(data) > 1 else []
for meta, ctx in zip(universe, ctx_list, strict=False):
if meta.get("name", "").upper() == asset:
rates["hyperliquid"] = float(ctx.get("funding", 0))
break
except Exception:
pass
present = [v for v in rates.values() if v is not None]
spread_max_min = max(present) - min(present) if present else None
anomaly = None
if present and spread_max_min is not None:
mean_r = sum(present) / len(present)
for name, v in rates.items():
if v is None:
continue
if abs(v - mean_r) > 2 * (spread_max_min / 2 or 1e-9):
anomaly = f"{name}_outlier"
break
snapshot[asset] = {
**rates,
"spread_max_min": spread_max_min,
"anomaly": anomaly,
}
# Arbitrage opportunities
arbs = []
for asset, data in snapshot.items():
values = [(k, v) for k, v in data.items() if k in ("binance", "bybit", "okx", "hyperliquid") and v is not None]
if len(values) < 2:
continue
values.sort(key=lambda x: x[1])
low_ex, low_v = values[0]
high_ex, high_v = values[-1]
diff = high_v - low_v
ann_pct = diff * 24 * 365 * 100 # hourly funding → annual pct
if ann_pct > 50:
arbs.append({
"asset": asset,
"pair": f"long_{low_ex}_short_{high_ex}",
"funding_differential_ann": round(ann_pct, 2),
"risk_adjusted": "acceptable" if ann_pct > 100 else "marginal",
})
return {
"assets": assets,
"snapshot": snapshot,
"arbitrage_opportunities": arbs,
"data_timestamp": _dt.now(UTC).isoformat(),
}
async def fetch_funding_arb_spread(assets: list[str] | None = None) -> dict[str, Any]:
"""Riassume opportunità di arbitrage funding su cross-exchange in un
formato compatto: per ogni asset, rate min/max + spread + annualized %.
Wrapper su fetch_cross_exchange_funding focalizzato su action items.
"""
base = await fetch_cross_exchange_funding(assets)
snapshot = base.get("snapshot") or {}
rows: list[dict[str, Any]] = []
for asset, data in snapshot.items():
rates = {k: v for k, v in data.items() if k in ("binance", "bybit", "okx", "hyperliquid") and v is not None}
if len(rates) < 2:
continue
sorted_rates = sorted(rates.items(), key=lambda x: x[1])
low_ex, low_v = sorted_rates[0]
high_ex, high_v = sorted_rates[-1]
spread = high_v - low_v
# Funding cycle: 8h on most, 1h on hyperliquid → assume 8h => 3x/day
ann_pct = spread * 3 * 365 * 100
actionable = ann_pct > 50
rows.append({
"asset": asset,
"long_venue": low_ex,
"short_venue": high_ex,
"long_funding": low_v,
"short_funding": high_v,
"spread": spread,
"annualized_pct": round(ann_pct, 2),
"actionable": actionable,
})
rows.sort(key=lambda r: -r["annualized_pct"])
return {
"opportunities": rows,
"data_timestamp": base.get("data_timestamp"),
}
async def fetch_liquidation_heatmap(asset: str = "BTC") -> dict[str, Any]:
"""Heuristic liquidation pressure: combina OI delta + funding extreme su
asset. NON usa feed liq paid (Coinglass): stima dove si concentra
leveraged exposure pronta a essere liquidata.
long_squeeze_risk: high se OI cresce + funding positivo (long crowded).
short_squeeze_risk: high se OI cresce + funding negativo (short crowded).
"""
asset = asset.upper()
oi = await fetch_oi_history(asset=asset, period="5m", limit=288)
funding = await fetch_cross_exchange_funding(assets=[asset])
snap = (funding.get("snapshot") or {}).get(asset) or {}
rates = [v for k, v in snap.items() if k in ("binance", "bybit", "okx", "hyperliquid") and v is not None]
avg_funding = sum(rates) / len(rates) if rates else None
delta_4h = oi.get("delta_pct_4h")
delta_24h = oi.get("delta_pct_24h")
long_risk = "low"
short_risk = "low"
if avg_funding is not None and delta_24h is not None:
if avg_funding > 0.0001 and delta_24h > 5:
long_risk = "high"
elif avg_funding > 0.00005 and delta_24h > 2:
long_risk = "medium"
if avg_funding < -0.0001 and delta_24h > 5:
short_risk = "high"
elif avg_funding < -0.00005 and delta_24h > 2:
short_risk = "medium"
return {
"asset": asset,
"avg_funding_rate": avg_funding,
"oi_delta_pct_4h": delta_4h,
"oi_delta_pct_24h": delta_24h,
"long_squeeze_risk": long_risk,
"short_squeeze_risk": short_risk,
"note": "heuristic — non sostituisce feed liq dedicati (Coinglass).",
}
async def fetch_cointegration_pairs(
pairs: list[list[str]] | None = None,
lookback_hours: int = 24,
) -> dict[str, Any]:
"""Test Engle-Granger su coppie crypto su Binance hourly.
pairs: lista di [base, quote] (es. [["BTC", "ETH"]]). Default top-3.
"""
from mcp_common.stats import cointegration_test
pairs = pairs or [["BTC", "ETH"], ["BTC", "SOL"], ["ETH", "SOL"]]
out: list[dict[str, Any]] = []
interval = "1h"
limit = max(50, lookback_hours)
async with async_client(timeout=15.0) as client:
for pair in pairs:
if len(pair) != 2:
continue
a, b = pair[0].upper(), pair[1].upper()
sym_a = f"{a}USDT"
sym_b = f"{b}USDT"
try:
resp_a = await client.get(
"https://api.binance.com/api/v3/klines",
params={"symbol": sym_a, "interval": interval, "limit": limit},
)
resp_b = await client.get(
"https://api.binance.com/api/v3/klines",
params={"symbol": sym_b, "interval": interval, "limit": limit},
)
if resp_a.status_code != 200 or resp_b.status_code != 200:
continue
closes_a = [float(k[4]) for k in resp_a.json()]
closes_b = [float(k[4]) for k in resp_b.json()]
if len(closes_a) != len(closes_b):
n = min(len(closes_a), len(closes_b))
closes_a = closes_a[-n:]
closes_b = closes_b[-n:]
result = cointegration_test(closes_a, closes_b)
out.append({
"pair": [a, b],
"samples": len(closes_a),
**result,
})
except Exception as e:
out.append({"pair": [a, b], "error": str(e)})
out.sort(key=lambda r: r.get("adf_t_stat") or 0)
return {
"results": out,
"lookback_hours": lookback_hours,
}
async def fetch_world_news() -> dict[str, Any]:
"""Fetch world financial news from free RSS feeds."""
articles: list[dict[str, Any]] = []
async with async_client(timeout=10.0, follow_redirects=True) as client:
for source_name, url in WORLD_NEWS_FEEDS:
try:
resp = await client.get(url)
if resp.status_code != 200:
continue
root = ET.fromstring(resp.text)
for item in root.findall(".//item")[:5]:
title = item.findtext("title", "")
link = item.findtext("link", "")
pub_date = item.findtext("pubDate", "")
description = item.findtext("description", "")
if "<" in description:
description = re.sub(r"<[^>]+>", "", description).strip()
articles.append(
{
"source": source_name,
"title": title,
"url": link,
"published": pub_date,
"summary": description[:200] if description else "",
}
)
except Exception:
continue
return {"articles": articles, "count": len(articles)}
async def fetch_oi_history(asset: str = "BTC", period: str = "5m", limit: int = 288) -> dict[str, Any]:
"""Open interest history perpetual da Binance futures (public).
period: 5m|15m|30m|1h|2h|4h|6h|12h|1d (Binance API).
limit: 1..500, default 288 = 24h a 5min.
"""
asset = asset.upper()
symbol = f"{asset}USDT"
limit = max(1, min(int(limit), 500))
points: list[dict[str, Any]] = []
try:
async with async_client(timeout=10.0) as client:
resp = await client.get(
BINANCE_OI_HIST_URL,
params={"symbol": symbol, "period": period, "limit": limit},
)
if resp.status_code == 200:
for row in resp.json() or []:
points.append(
{
"timestamp": int(row.get("timestamp", 0)),
"oi": float(row.get("sumOpenInterest", 0)),
"oi_value_usd": float(row.get("sumOpenInterestValue", 0)),
}
)
except Exception:
pass
def _delta_pct(points: list[dict[str, Any]], minutes_back: int) -> float | None:
if len(points) < 2:
return None
current = points[-1]
cutoff_ts = current["timestamp"] - minutes_back * 60 * 1000
past = next((p for p in reversed(points) if p["timestamp"] <= cutoff_ts), None)
if past is None or past["oi"] == 0:
return None
return round(100.0 * (current["oi"] - past["oi"]) / past["oi"], 3)
return {
"asset": asset,
"exchange": "binance",
"symbol": symbol,
"period": period,
"points": points,
"current_oi": points[-1]["oi"] if points else None,
"current_oi_value_usd": points[-1]["oi_value_usd"] if points else None,
"delta_pct_1h": _delta_pct(points, 60),
"delta_pct_4h": _delta_pct(points, 240),
"delta_pct_24h": _delta_pct(points, 1440),
"data_points": len(points),
}
@@ -1,174 +0,0 @@
from __future__ import annotations
import logging
import os
from fastapi import Depends, FastAPI, HTTPException
from mcp_common.auth import Principal, TokenStore, require_principal
from mcp_common.mcp_bridge import mount_mcp_endpoint
from mcp_common.server import build_app
from pydantic import BaseModel
from mcp_sentiment.fetchers import (
fetch_cointegration_pairs,
fetch_cross_exchange_funding,
fetch_crypto_news,
fetch_funding_arb_spread,
fetch_funding_rates,
fetch_liquidation_heatmap,
fetch_oi_history,
fetch_social_sentiment,
fetch_world_news,
)
logger = logging.getLogger(__name__)
# --- Body models ---
class GetCryptoNewsReq(BaseModel):
limit: int = 20
class GetSocialSentimentReq(BaseModel):
symbol: str = "BTC"
class GetFundingRatesReq(BaseModel):
asset: str = "BTC"
class GetWorldNewsReq(BaseModel):
pass
class GetCrossExchangeFundingReq(BaseModel):
assets: list[str] | None = None
class GetFundingArbSpreadReq(BaseModel):
assets: list[str] | None = None
class GetLiquidationHeatmapReq(BaseModel):
asset: str = "BTC"
class GetCointegrationPairsReq(BaseModel):
pairs: list[list[str]] | None = None
lookback_hours: int = 24
class GetOiHistoryReq(BaseModel):
asset: str = "BTC"
period: str = "5m"
limit: int = 288
# --- ACL helper ---
def _check(principal: Principal, *, core: bool = False, observer: bool = False) -> None:
allowed: set[str] = set()
if core:
allowed.add("core")
if observer:
allowed.add("observer")
if not (principal.capabilities & allowed):
raise HTTPException(403, f"capability required: {allowed}")
# --- App factory ---
def create_app(*, cryptopanic_key: str = "", token_store: TokenStore) -> FastAPI:
app = build_app(name="mcp-sentiment", version="0.1.0", token_store=token_store)
if not cryptopanic_key or cryptopanic_key.lower() in ("placeholder", "none", "changeme"):
logger.warning(
"mcp-sentiment: cryptopanic_key mancante o placeholder — get_crypto_news "
"ritornerà headlines=[] con note diagnostica"
)
@app.post("/tools/get_crypto_news", tags=["reads"])
async def t_get_crypto_news(
body: GetCryptoNewsReq, principal: Principal = Depends(require_principal)
):
_check(principal, core=True, observer=True)
return await fetch_crypto_news(api_key=cryptopanic_key, limit=body.limit)
@app.post("/tools/get_social_sentiment", tags=["reads"])
async def t_get_social_sentiment(
body: GetSocialSentimentReq, principal: Principal = Depends(require_principal)
):
_check(principal, core=True, observer=True)
return await fetch_social_sentiment(body.symbol)
@app.post("/tools/get_funding_rates", tags=["reads"])
async def t_get_funding_rates(
body: GetFundingRatesReq, principal: Principal = Depends(require_principal)
):
_check(principal, core=True, observer=True)
return await fetch_funding_rates(body.asset)
@app.post("/tools/get_world_news", tags=["reads"])
async def t_get_world_news(
body: GetWorldNewsReq, principal: Principal = Depends(require_principal)
):
_check(principal, core=True, observer=True)
return await fetch_world_news()
@app.post("/tools/get_cross_exchange_funding", tags=["reads"])
async def t_get_cross_exchange_funding(
body: GetCrossExchangeFundingReq, principal: Principal = Depends(require_principal)
):
_check(principal, core=True, observer=True)
return await fetch_cross_exchange_funding(body.assets)
@app.post("/tools/get_funding_arb_spread", tags=["reads"])
async def t_get_funding_arb_spread(
body: GetFundingArbSpreadReq, principal: Principal = Depends(require_principal)
):
_check(principal, core=True, observer=True)
return await fetch_funding_arb_spread(body.assets)
@app.post("/tools/get_liquidation_heatmap", tags=["reads"])
async def t_get_liquidation_heatmap(
body: GetLiquidationHeatmapReq, principal: Principal = Depends(require_principal)
):
_check(principal, core=True, observer=True)
return await fetch_liquidation_heatmap(body.asset)
@app.post("/tools/get_cointegration_pairs", tags=["reads"])
async def t_get_cointegration_pairs(
body: GetCointegrationPairsReq, principal: Principal = Depends(require_principal)
):
_check(principal, core=True, observer=True)
return await fetch_cointegration_pairs(body.pairs, body.lookback_hours)
@app.post("/tools/get_oi_history", tags=["reads"])
async def t_get_oi_history(
body: GetOiHistoryReq, principal: Principal = Depends(require_principal)
):
_check(principal, core=True, observer=True)
return await fetch_oi_history(body.asset, body.period, body.limit)
# ───── MCP endpoint (/mcp) — bridge verso /tools/* ─────
port = int(os.environ.get("PORT", "9014"))
mount_mcp_endpoint(
app,
name="cerbero-sentiment",
version="0.1.0",
token_store=token_store,
internal_base_url=f"http://localhost:{port}",
tools=[
{"name": "get_crypto_news", "description": "News crypto da CryptoPanic."},
{"name": "get_social_sentiment", "description": "Sentiment aggregato social."},
{"name": "get_funding_rates", "description": "Funding rates aggregati."},
{"name": "get_world_news", "description": "News macro/world."},
{"name": "get_cross_exchange_funding", "description": "Funding multi-asset multi-exchange + arbitrage opportunities."},
{"name": "get_oi_history", "description": "Open interest history perp (Binance) + delta_pct 1h/4h/24h."},
{"name": "get_funding_arb_spread", "description": "Opportunità arbitrage funding cross-exchange in formato compatto + annualized %."},
{"name": "get_liquidation_heatmap", "description": "Pressione liquidazioni heuristica da OI delta + funding (long/short squeeze risk)."},
{"name": "get_cointegration_pairs", "description": "Engle-Granger cointegration test su coppie crypto Binance hourly."},
],
)
return app
@@ -1,320 +0,0 @@
from __future__ import annotations
import httpx
import pytest
import pytest_httpx
from mcp_sentiment.fetchers import (
fetch_crypto_news,
fetch_funding_rates,
fetch_social_sentiment,
fetch_world_news,
)
# --- CER-017 multi-source news aggregator ---
_COINDESK_RSS = (
'<?xml version="1.0"?><rss><channel>'
"<item><title>ETH rally</title><link>https://coindesk.com/eth</link>"
"<pubDate>2026-04-19</pubDate></item>"
"<item><title>Common headline</title><link>https://coindesk.com/x</link>"
"<pubDate>2026-04-18</pubDate></item>"
"</channel></rss>"
)
def _mock_three_providers(httpx_mock: pytest_httpx.HTTPXMock, *, cc_items=None, messari_items=None):
httpx_mock.add_response(url="https://www.coindesk.com/arc/outboundfeeds/rss/", text=_COINDESK_RSS)
httpx_mock.add_response(
url="https://min-api.cryptocompare.com/data/v2/news/?lang=EN",
json={"Data": cc_items if cc_items is not None else [
{"title": "BTC ATH", "source": "CryptoCompare", "published_on": 1761868800, "url": "https://x/1"},
{"title": "Common headline", "source": "Reuters", "published_on": 1761782400, "url": "https://x/2"},
]},
)
httpx_mock.add_response(
url="https://data.messari.io/api/v1/news",
json={"data": messari_items if messari_items is not None else [
{"title": "SOL rally", "author": {"name": "Messari"}, "published_at": "2026-04-19T10:00:00Z", "url": "https://x/3"},
]},
)
@pytest.mark.asyncio
async def test_crypto_news_aggregates_three_sources(httpx_mock: pytest_httpx.HTTPXMock):
"""CER-017: CoinDesk + CryptoCompare + Messari in parallelo."""
_mock_three_providers(httpx_mock)
result = await fetch_crypto_news(limit=20)
titles = {h["title"] for h in result["headlines"]}
assert "ETH rally" in titles
assert "BTC ATH" in titles
assert "SOL rally" in titles
assert set(result["sources"]) == {"coindesk", "cryptocompare", "messari"}
assert result["sources_failed"] == []
@pytest.mark.asyncio
async def test_crypto_news_dedup_by_title(httpx_mock: pytest_httpx.HTTPXMock):
"""CER-017: stesso titolo su 2 provider → 1 sola entry."""
_mock_three_providers(httpx_mock)
result = await fetch_crypto_news(limit=20)
common_count = sum(1 for h in result["headlines"] if h["title"].lower() == "common headline")
assert common_count == 1
assert result["total_before_dedup"] > result["total_after_dedup"]
@pytest.mark.asyncio
async def test_crypto_news_partial_failure(httpx_mock: pytest_httpx.HTTPXMock):
"""CER-017: 1 provider 500 → altri proseguono, sources_failed riporta."""
httpx_mock.add_response(url="https://www.coindesk.com/arc/outboundfeeds/rss/", text=_COINDESK_RSS)
httpx_mock.add_response(
url="https://min-api.cryptocompare.com/data/v2/news/?lang=EN",
status_code=500,
)
httpx_mock.add_response(
url="https://data.messari.io/api/v1/news",
json={"data": [{"title": "OK Messari", "author": {"name": "M"}, "published_at": "2026-04-19T10:00:00Z", "url": "https://x"}]},
)
result = await fetch_crypto_news(limit=20)
assert "cryptocompare" in result["sources_failed"]
assert "coindesk" in result["sources"]
assert "messari" in result["sources"]
@pytest.mark.asyncio
async def test_crypto_news_sorted_desc_by_date(httpx_mock: pytest_httpx.HTTPXMock):
"""CER-017: ordine published_at DESC."""
_mock_three_providers(httpx_mock)
result = await fetch_crypto_news(limit=20)
dates = [h.get("published_at") or "" for h in result["headlines"] if h.get("published_at")]
assert dates == sorted(dates, reverse=True)
@pytest.mark.asyncio
async def test_crypto_news_with_cryptopanic_key(httpx_mock: pytest_httpx.HTTPXMock):
"""CER-017: se api_key presente, include Cryptopanic come 4° source."""
_mock_three_providers(httpx_mock)
httpx_mock.add_response(
url=httpx.URL("https://cryptopanic.com/api/v1/posts/", params={"auth_token": "k", "public": "true"}),
json={"results": [{
"title": "Cryptopanic exclusive",
"source": {"title": "CP"},
"published_at": "2026-04-20T00:00:00Z",
"url": "https://x/cp",
}]},
)
result = await fetch_crypto_news(api_key="k", limit=20)
titles = {h["title"] for h in result["headlines"]}
assert "Cryptopanic exclusive" in titles
assert "cryptopanic" in result["sources"]
@pytest.mark.asyncio
async def test_crypto_news_placeholder_key_skips_cryptopanic(httpx_mock: pytest_httpx.HTTPXMock):
"""CER-017: api_key placeholder → no Cryptopanic call."""
_mock_three_providers(httpx_mock)
result = await fetch_crypto_news(api_key="placeholder", limit=20)
assert "cryptopanic" not in result["sources"]
assert "cryptopanic" not in result["sources_failed"]
@pytest.mark.asyncio
async def test_crypto_news_provider_tracing(httpx_mock: pytest_httpx.HTTPXMock):
"""CER-017: ogni headline ha campo provider."""
_mock_three_providers(httpx_mock)
result = await fetch_crypto_news(limit=20)
for h in result["headlines"]:
assert h.get("provider") in {"coindesk", "cryptocompare", "messari"}
# --- fetch_social_sentiment ---
@pytest.mark.asyncio
async def test_social_sentiment_happy(httpx_mock: pytest_httpx.HTTPXMock):
httpx_mock.add_response(
url=httpx.URL(
"https://api.alternative.me/fng/",
params={"limit": "1"},
),
json={"data": [{"value": "72", "value_classification": "Greed"}]},
)
result = await fetch_social_sentiment()
assert result["fear_greed_index"] == 72
assert result["fear_greed_label"] == "Greed"
assert "social_volume" in result
@pytest.mark.asyncio
async def test_social_sentiment_empty_data(httpx_mock: pytest_httpx.HTTPXMock):
httpx_mock.add_response(
url=httpx.URL(
"https://api.alternative.me/fng/",
params={"limit": "1"},
),
json={"data": []},
)
result = await fetch_social_sentiment()
assert result["fear_greed_index"] == 0
assert result["fear_greed_label"] == ""
@pytest.mark.asyncio
async def test_social_sentiment_derives_proxy_from_fng(
httpx_mock: pytest_httpx.HTTPXMock, monkeypatch
):
"""CER-P2-005: senza LUNARCRUSH_API_KEY, twitter/reddit derivano da F&G."""
monkeypatch.delenv("LUNARCRUSH_API_KEY", raising=False)
httpx_mock.add_response(
url=httpx.URL("https://api.alternative.me/fng/", params={"limit": "1"}),
json={"data": [{"value": "25", "value_classification": "Extreme Fear"}]},
)
result = await fetch_social_sentiment()
assert result["twitter_sentiment"] == pytest.approx(-0.5)
assert result["reddit_sentiment"] == pytest.approx(-0.5)
assert result["derived"] is True
assert result["source"] == "fear_greed_only"
@pytest.mark.asyncio
async def test_social_sentiment_uses_lunarcrush_when_key_present(
httpx_mock: pytest_httpx.HTTPXMock, monkeypatch
):
"""CER-P2-005: con LUNARCRUSH_API_KEY, valori reali."""
monkeypatch.setenv("LUNARCRUSH_API_KEY", "test-key")
httpx_mock.add_response(
url=httpx.URL("https://api.alternative.me/fng/", params={"limit": "1"}),
json={"data": [{"value": "50", "value_classification": "Neutral"}]},
)
httpx_mock.add_response(
url="https://lunarcrush.com/api4/public/coins/BTC/v1",
json={"data": {
"sentiment": 80,
"galaxy_score": 75,
"alt_rank": 3,
"social_volume_24h": 12345,
"social_dominance": 25.5,
}},
)
result = await fetch_social_sentiment("BTC")
assert result["twitter_sentiment"] == pytest.approx(0.6)
assert result["reddit_sentiment"] == pytest.approx(0.6)
assert result["social_volume"] == 12345
assert result["galaxy_score"] == 75
assert result["derived"] is False
assert "lunarcrush" in result["source"]
@pytest.mark.asyncio
async def test_social_sentiment_lunarcrush_failure_fallback_to_proxy(
httpx_mock: pytest_httpx.HTTPXMock, monkeypatch
):
"""CER-P2-005: se LC fallisce, fallback a proxy F&G — no crash."""
monkeypatch.setenv("LUNARCRUSH_API_KEY", "broken-key")
httpx_mock.add_response(
url=httpx.URL("https://api.alternative.me/fng/", params={"limit": "1"}),
json={"data": [{"value": "75", "value_classification": "Greed"}]},
)
httpx_mock.add_response(
url="https://lunarcrush.com/api4/public/coins/BTC/v1",
status_code=401,
json={"error": "unauthorized"},
)
result = await fetch_social_sentiment("BTC")
assert result["twitter_sentiment"] == pytest.approx(0.5)
assert result["derived"] is True
assert result["source"] == "fear_greed_only"
# --- fetch_funding_rates ---
@pytest.mark.asyncio
async def test_funding_rates_all_exchanges(httpx_mock: pytest_httpx.HTTPXMock):
httpx_mock.add_response(
url=httpx.URL(
"https://fapi.binance.com/fapi/v1/premiumIndex",
params={"symbol": "BTCUSDT"},
),
json={"lastFundingRate": "0.0001", "nextFundingTime": 1700000000000},
)
httpx_mock.add_response(
url=httpx.URL(
"https://api.bybit.com/v5/market/tickers",
params={"category": "linear", "symbol": "BTCUSDT"},
),
json={"result": {"list": [{"fundingRate": "0.0002", "nextFundingTime": "1700000000000"}]}},
)
httpx_mock.add_response(
url=httpx.URL(
"https://www.okx.com/api/v5/public/funding-rate",
params={"instId": "BTC-USDT-SWAP"},
),
json={"data": [{"fundingRate": "0.00015", "nextFundingTime": "1700000000000"}]},
)
result = await fetch_funding_rates()
assert "rates" in result
exchanges = {r["exchange"] for r in result["rates"]}
assert "binance" in exchanges
assert "bybit" in exchanges
assert "okx" in exchanges
@pytest.mark.asyncio
async def test_funding_rates_partial_failure(httpx_mock: pytest_httpx.HTTPXMock):
"""If some exchanges fail, we still get results from others."""
httpx_mock.add_response(
url=httpx.URL(
"https://fapi.binance.com/fapi/v1/premiumIndex",
params={"symbol": "BTCUSDT"},
),
json={"lastFundingRate": "0.0001", "nextFundingTime": 1700000000000},
)
httpx_mock.add_response(
url=httpx.URL(
"https://api.bybit.com/v5/market/tickers",
params={"category": "linear", "symbol": "BTCUSDT"},
),
status_code=500,
)
httpx_mock.add_response(
url=httpx.URL(
"https://www.okx.com/api/v5/public/funding-rate",
params={"instId": "BTC-USDT-SWAP"},
),
status_code=500,
)
result = await fetch_funding_rates()
assert len(result["rates"]) == 1
assert result["rates"][0]["exchange"] == "binance"
# --- fetch_world_news ---
@pytest.mark.asyncio
async def test_world_news_happy(httpx_mock: pytest_httpx.HTTPXMock):
rss_xml = """<?xml version="1.0"?>
<rss version="2.0"><channel>
<item><title>Markets rally</title><link>http://example.com/1</link><pubDate>Mon, 15 Jan 2024 10:00:00 +0000</pubDate><description>Stocks up</description></item>
</channel></rss>"""
for _, url in [
("Reuters Business", "https://feeds.reuters.com/reuters/businessNews"),
("CNBC Top News", "https://search.cnbc.com/rs/search/combinedcms/view.xml?partnerId=wrss01&id=100003114"),
("Bloomberg Markets", "https://feeds.bloomberg.com/markets/news.rss"),
("CoinDesk", "https://www.coindesk.com/arc/outboundfeeds/rss/"),
]:
httpx_mock.add_response(url=url, text=rss_xml)
result = await fetch_world_news()
assert result["count"] == 4
assert result["articles"][0]["title"] == "Markets rally"
@pytest.mark.asyncio
async def test_world_news_all_fail(httpx_mock: pytest_httpx.HTTPXMock):
for _, url in [
("Reuters Business", "https://feeds.reuters.com/reuters/businessNews"),
("CNBC Top News", "https://search.cnbc.com/rs/search/combinedcms/view.xml?partnerId=wrss01&id=100003114"),
("Bloomberg Markets", "https://feeds.bloomberg.com/markets/news.rss"),
("CoinDesk", "https://www.coindesk.com/arc/outboundfeeds/rss/"),
]:
httpx_mock.add_response(url=url, status_code=503)
result = await fetch_world_news()
assert result["articles"] == []
assert result["count"] == 0
@@ -1,216 +0,0 @@
from __future__ import annotations
from unittest.mock import AsyncMock, patch
import pytest
from fastapi.testclient import TestClient
from mcp_common.auth import Principal, TokenStore
from mcp_sentiment.server import create_app
@pytest.fixture
def http():
store = TokenStore(
tokens={
"ct": Principal("core", {"core"}),
"ot": Principal("observer", {"observer"}),
}
)
app = create_app(cryptopanic_key="testkey", token_store=store)
return TestClient(app)
# --- Health ---
def test_health(http):
assert http.get("/health").status_code == 200
# --- get_crypto_news ---
def test_get_crypto_news_core_ok(http):
with patch(
"mcp_sentiment.server.fetch_crypto_news",
new=AsyncMock(return_value={"headlines": []}),
):
r = http.post(
"/tools/get_crypto_news",
headers={"Authorization": "Bearer ct"},
json={"limit": 5},
)
assert r.status_code == 200
def test_get_crypto_news_observer_ok(http):
with patch(
"mcp_sentiment.server.fetch_crypto_news",
new=AsyncMock(return_value={"headlines": []}),
):
r = http.post(
"/tools/get_crypto_news",
headers={"Authorization": "Bearer ot"},
json={},
)
assert r.status_code == 200
def test_get_crypto_news_no_auth_401(http):
r = http.post("/tools/get_crypto_news", json={})
assert r.status_code == 401
# --- get_social_sentiment ---
def test_get_social_sentiment_core_ok(http):
with patch(
"mcp_sentiment.server.fetch_social_sentiment",
new=AsyncMock(return_value={"fear_greed_index": 65, "fear_greed_label": "Greed"}),
):
r = http.post(
"/tools/get_social_sentiment",
headers={"Authorization": "Bearer ct"},
json={},
)
assert r.status_code == 200
assert r.json()["fear_greed_index"] == 65
def test_get_social_sentiment_observer_ok(http):
with patch(
"mcp_sentiment.server.fetch_social_sentiment",
new=AsyncMock(return_value={"fear_greed_index": 65}),
):
r = http.post(
"/tools/get_social_sentiment",
headers={"Authorization": "Bearer ot"},
json={},
)
assert r.status_code == 200
def test_get_social_sentiment_no_auth_401(http):
r = http.post("/tools/get_social_sentiment", json={})
assert r.status_code == 401
# --- get_funding_rates ---
def test_get_funding_rates_core_ok(http):
with patch(
"mcp_sentiment.server.fetch_funding_rates",
new=AsyncMock(return_value={"rates": []}),
):
r = http.post(
"/tools/get_funding_rates",
headers={"Authorization": "Bearer ct"},
json={},
)
assert r.status_code == 200
def test_get_funding_rates_observer_ok(http):
with patch(
"mcp_sentiment.server.fetch_funding_rates",
new=AsyncMock(return_value={"rates": []}),
):
r = http.post(
"/tools/get_funding_rates",
headers={"Authorization": "Bearer ot"},
json={},
)
assert r.status_code == 200
def test_get_funding_rates_no_auth_401(http):
r = http.post("/tools/get_funding_rates", json={})
assert r.status_code == 401
# --- get_world_news ---
def test_get_world_news_core_ok(http):
with patch(
"mcp_sentiment.server.fetch_world_news",
new=AsyncMock(return_value={"articles": [], "count": 0}),
):
r = http.post(
"/tools/get_world_news",
headers={"Authorization": "Bearer ct"},
json={},
)
assert r.status_code == 200
def test_get_world_news_observer_ok(http):
with patch(
"mcp_sentiment.server.fetch_world_news",
new=AsyncMock(return_value={"articles": [], "count": 0}),
):
r = http.post(
"/tools/get_world_news",
headers={"Authorization": "Bearer ot"},
json={},
)
assert r.status_code == 200
def test_get_world_news_no_auth_401(http):
r = http.post("/tools/get_world_news", json={})
assert r.status_code == 401
# --- New indicators: funding_arb_spread, liquidation_heatmap, cointegration_pairs ---
def test_get_funding_arb_spread_ok(http):
with patch(
"mcp_sentiment.server.fetch_funding_arb_spread",
new=AsyncMock(return_value={"opportunities": []}),
):
r = http.post(
"/tools/get_funding_arb_spread",
headers={"Authorization": "Bearer ot"},
json={},
)
assert r.status_code == 200
def test_get_funding_arb_spread_no_auth_401(http):
r = http.post("/tools/get_funding_arb_spread", json={})
assert r.status_code == 401
def test_get_liquidation_heatmap_ok(http):
with patch(
"mcp_sentiment.server.fetch_liquidation_heatmap",
new=AsyncMock(return_value={"asset": "BTC", "long_squeeze_risk": "low"}),
):
r = http.post(
"/tools/get_liquidation_heatmap",
headers={"Authorization": "Bearer ct"},
json={"asset": "BTC"},
)
assert r.status_code == 200
def test_get_liquidation_heatmap_no_auth_401(http):
r = http.post("/tools/get_liquidation_heatmap", json={"asset": "BTC"})
assert r.status_code == 401
def test_get_cointegration_pairs_ok(http):
with patch(
"mcp_sentiment.server.fetch_cointegration_pairs",
new=AsyncMock(return_value={"results": []}),
):
r = http.post(
"/tools/get_cointegration_pairs",
headers={"Authorization": "Bearer ot"},
json={"pairs": [["BTC", "ETH"]]},
)
assert r.status_code == 200
def test_get_cointegration_pairs_no_auth_401(http):
r = http.post("/tools/get_cointegration_pairs", json={})
assert r.status_code == 401