ce158a92dd
Adegua Cerbero Bite alla nuova versione 2.0.0 del server MCP unificato (testnet/mainnet routing per token, header X-Bot-Tag obbligatorio) e introduce due interruttori operativi indipendenti per separare la raccolta dati dall'esecuzione di strategia. Auth e collegamento MCP - Token bearer letto dalla nuova variabile CERBERO_BITE_MCP_TOKEN; il valore sceglie l'ambiente upstream (testnet vs mainnet) sul server. Rimosso il caricamento da file (`secrets/core.token`, CERBERO_BITE_CORE_TOKEN_FILE, Docker secret /run/secrets/core_token). - Aggiunto header X-Bot-Tag (default `BOT__CERBERO_BITE`, override via CERBERO_BITE_MCP_BOT_TAG) su ogni call MCP, con validazione lato client (non vuoto, ≤ 64 caratteri). - Cartella `secrets/` rimossa, `.gitignore` ripulito, Dockerfile e docker-compose.yml aggiornati con env passthrough e fail-fast quando manca il token. Modalità operativa (RuntimeFlags) - Nuovo modulo `config/runtime_flags.py` con `RuntimeFlags( data_analysis_enabled, strategy_enabled)` e loader che parserizza CERBERO_BITE_ENABLE_DATA_ANALYSIS e CERBERO_BITE_ENABLE_STRATEGY (true/false/yes/no/on/off/enabled/disabled, case-insensitive). - L'orchestratore espone i flag, audita e logga la modalità al boot (`engine started: env=… data_analysis=… strategy=…`), e in `install_scheduler` esclude i job `entry`/`monitor` quando strategy è off e il job `market_snapshot` quando data analysis è off. I job di infrastruttura (health, backup, manual_actions) restano sempre attivi. - Default profile = "solo analisi dati" (data_analysis=true, strategy=false), pensato per la finestra di soak post-deploy. GUI saldi - `gui/live_data.py::_fetch_deribit_currency` riconosce il campo soft `error` nel payload V2 (HTTP 200 con `error` valorizzato dal server quando l'auth Deribit fallisce) e lo propaga come `BalanceRow.error`, evitando di mostrare un fuorviante equity = 0,00. CLI - Sostituita l'opzione `--token-file` con `--token` (stringa) sui comandi start/dry-run/ping; il default proviene dall'env. Le chiamate al builder dell'orchestrator passano anche `bot_tag` e `flags`. Documentazione - `docs/04-mcp-integration.md`: descrizione del nuovo flusso di auth V2 (token = ambiente, X-Bot-Tag nell'audit) e router unificati. - `docs/06-operational-flow.md`: nuova sezione "Modalità operativa" con i tre profili canonici e tabella di gating per ogni job; aggiunto `market_snapshot` al cron summary. - `docs/10-config-spec.md`: nuova sezione "Variabili d'ambiente" tabellare con tutti gli env, comprese le bool dei flag operativi. - `docs/02-architecture.md`: layout del repo aggiornato (`secrets/` rimosso, `runtime_flags.py` aggiunto), descrizione di `config/` estesa. Test - 5 nuovi test su `_fetch_deribit_currency` (soft-error, payload pulito, eccezione, error blank, signature parity). - 7 nuovi test su `load_runtime_flags` (default, override, parsing truthy/falsy, blank fallback, valore invalido). - 4 nuovi test su `HttpToolClient` (X-Bot-Tag default e custom, blank e troppo lungo rifiutati). - 3 nuovi test integration sull'orchestratore (gating dei job in base ai flag). - Test esistenti su token/CLI ping/orchestrator aggiornati al nuovo schema. Suite intera: 404 passed, 1 skipped (sqlite3 CLI assente sull'host di sviluppo). Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
191 lines
5.9 KiB
Python
191 lines
5.9 KiB
Python
"""Tests for :class:`HttpToolClient`."""
|
|
|
|
from __future__ import annotations
|
|
|
|
import httpx
|
|
import pytest
|
|
from pytest_httpx import HTTPXMock
|
|
|
|
from cerbero_bite.clients._base import HttpToolClient
|
|
from cerbero_bite.clients._exceptions import (
|
|
McpAuthError,
|
|
McpNotFoundError,
|
|
McpServerError,
|
|
McpTimeoutError,
|
|
McpToolError,
|
|
)
|
|
|
|
|
|
def _make_client(**overrides: object) -> HttpToolClient:
|
|
base: dict[str, object] = {
|
|
"service": "test",
|
|
"base_url": "http://mcp-test:9000",
|
|
"token": "tok",
|
|
"retry_max": 1,
|
|
}
|
|
base.update(overrides)
|
|
return HttpToolClient(**base) # type: ignore[arg-type]
|
|
|
|
|
|
@pytest.mark.asyncio
|
|
async def test_call_returns_parsed_payload(httpx_mock: HTTPXMock) -> None:
|
|
httpx_mock.add_response(
|
|
url="http://mcp-test:9000/tools/get_thing",
|
|
json={"result": [1, 2, 3]},
|
|
)
|
|
client = _make_client()
|
|
out = await client.call("get_thing", {"x": 1})
|
|
assert out == {"result": [1, 2, 3]}
|
|
|
|
|
|
@pytest.mark.asyncio
|
|
async def test_call_attaches_bearer_token(httpx_mock: HTTPXMock) -> None:
|
|
httpx_mock.add_response(json={"ok": True})
|
|
client = _make_client(token="abc123")
|
|
await client.call("any")
|
|
request = httpx_mock.get_request()
|
|
assert request is not None
|
|
assert request.headers["Authorization"] == "Bearer abc123"
|
|
assert request.headers["Content-Type"] == "application/json"
|
|
# Default bot tag is sent on every request.
|
|
assert request.headers["X-Bot-Tag"] == "BOT__CERBERO_BITE"
|
|
|
|
|
|
@pytest.mark.asyncio
|
|
async def test_call_attaches_custom_bot_tag(httpx_mock: HTTPXMock) -> None:
|
|
httpx_mock.add_response(json={"ok": True})
|
|
client = _make_client(bot_tag="BOT__SHADOW")
|
|
await client.call("any")
|
|
request = httpx_mock.get_request()
|
|
assert request is not None
|
|
assert request.headers["X-Bot-Tag"] == "BOT__SHADOW"
|
|
|
|
|
|
def test_init_rejects_blank_bot_tag() -> None:
|
|
with pytest.raises(ValueError, match="non-empty"):
|
|
_make_client(bot_tag=" ")
|
|
|
|
|
|
def test_init_rejects_too_long_bot_tag() -> None:
|
|
with pytest.raises(ValueError, match="64"):
|
|
_make_client(bot_tag="x" * 65)
|
|
|
|
|
|
@pytest.mark.asyncio
|
|
async def test_call_raises_auth_error_on_401(httpx_mock: HTTPXMock) -> None:
|
|
httpx_mock.add_response(status_code=401, json={"detail": "nope"})
|
|
client = _make_client()
|
|
with pytest.raises(McpAuthError):
|
|
await client.call("any")
|
|
|
|
|
|
@pytest.mark.asyncio
|
|
async def test_call_raises_auth_error_on_403(httpx_mock: HTTPXMock) -> None:
|
|
httpx_mock.add_response(status_code=403, json={"detail": "forbidden"})
|
|
client = _make_client()
|
|
with pytest.raises(McpAuthError):
|
|
await client.call("any")
|
|
|
|
|
|
@pytest.mark.asyncio
|
|
async def test_call_raises_not_found_on_404(httpx_mock: HTTPXMock) -> None:
|
|
httpx_mock.add_response(status_code=404, text="missing")
|
|
client = _make_client()
|
|
with pytest.raises(McpNotFoundError):
|
|
await client.call("any")
|
|
|
|
|
|
@pytest.mark.asyncio
|
|
async def test_call_raises_server_error_on_500(httpx_mock: HTTPXMock) -> None:
|
|
httpx_mock.add_response(status_code=500, text="boom")
|
|
client = _make_client()
|
|
with pytest.raises(McpServerError):
|
|
await client.call("any")
|
|
|
|
|
|
@pytest.mark.asyncio
|
|
async def test_call_raises_tool_error_on_state_error_envelope(
|
|
httpx_mock: HTTPXMock,
|
|
) -> None:
|
|
httpx_mock.add_response(json={"state": "error", "error": "bad params"})
|
|
client = _make_client()
|
|
with pytest.raises(McpToolError) as info:
|
|
await client.call("any")
|
|
assert "bad params" in str(info.value)
|
|
assert info.value.payload == {"state": "error", "error": "bad params"}
|
|
|
|
|
|
@pytest.mark.asyncio
|
|
async def test_call_raises_tool_error_on_4xx_other(httpx_mock: HTTPXMock) -> None:
|
|
httpx_mock.add_response(status_code=422, text="invalid")
|
|
client = _make_client()
|
|
with pytest.raises(McpToolError):
|
|
await client.call("any")
|
|
|
|
|
|
@pytest.mark.asyncio
|
|
async def test_call_raises_server_error_when_response_not_json(
|
|
httpx_mock: HTTPXMock,
|
|
) -> None:
|
|
httpx_mock.add_response(content=b"not-json", headers={"content-type": "text/plain"})
|
|
client = _make_client()
|
|
with pytest.raises(McpServerError, match="not JSON"):
|
|
await client.call("any")
|
|
|
|
|
|
@pytest.mark.asyncio
|
|
async def test_call_passes_through_list_response(httpx_mock: HTTPXMock) -> None:
|
|
"""Some MCP tools (e.g. portfolio.get_holdings) return a list."""
|
|
httpx_mock.add_response(json=[1, 2, 3])
|
|
client = _make_client()
|
|
out = await client.call("any")
|
|
assert out == [1, 2, 3]
|
|
|
|
|
|
@pytest.mark.asyncio
|
|
async def test_call_raises_timeout(httpx_mock: HTTPXMock) -> None:
|
|
httpx_mock.add_exception(httpx.ReadTimeout("slow"))
|
|
client = _make_client()
|
|
with pytest.raises(McpTimeoutError):
|
|
await client.call("any")
|
|
|
|
|
|
@pytest.mark.asyncio
|
|
async def test_call_retries_on_server_error_then_succeeds(
|
|
httpx_mock: HTTPXMock,
|
|
) -> None:
|
|
httpx_mock.add_response(status_code=500, text="down")
|
|
httpx_mock.add_response(json={"ok": True})
|
|
|
|
sleeps: list[float] = []
|
|
|
|
async def _fake_sleep(seconds: float) -> None:
|
|
sleeps.append(seconds)
|
|
|
|
client = _make_client(retry_max=3, sleep=_fake_sleep)
|
|
out = await client.call("any")
|
|
assert out == {"ok": True}
|
|
# one retry → at least one sleep recorded
|
|
assert sleeps, "expected the retrier to sleep before retry"
|
|
|
|
|
|
@pytest.mark.asyncio
|
|
async def test_call_does_not_retry_auth_error(httpx_mock: HTTPXMock) -> None:
|
|
httpx_mock.add_response(status_code=401, json={"detail": "no"})
|
|
client = _make_client(retry_max=3)
|
|
with pytest.raises(McpAuthError):
|
|
await client.call("any")
|
|
# only one request made; no retry on auth
|
|
requests = httpx_mock.get_requests()
|
|
assert len(requests) == 1
|
|
|
|
|
|
@pytest.mark.asyncio
|
|
async def test_base_url_trailing_slash_is_stripped(httpx_mock: HTTPXMock) -> None:
|
|
httpx_mock.add_response(
|
|
url="http://mcp-test:9000/tools/foo",
|
|
json={"ok": True},
|
|
)
|
|
client = _make_client(base_url="http://mcp-test:9000///")
|
|
await client.call("foo")
|