Phase 4: orchestrator + cycles auto-execute
Componente runtime/ che cabla core+clients+state+safety in un engine autonomo notify-only: nessuna conferma manuale, ordini combo piazzati direttamente quando le regole passano. 311 test pass, copertura totale 94%, runtime/ 90%, mypy strict pulito, ruff clean. Moduli: - runtime/alert_manager.py: escalation tree LOW/MEDIUM/HIGH/CRITICAL → audit + Telegram + kill switch. - runtime/dependencies.py: build_runtime() costruisce RuntimeContext con tutti i client MCP, repository, audit log, kill switch, alert manager. - runtime/entry_cycle.py: flusso settimanale (snapshot parallelo spot/dvol/funding/macro/holdings/equity → validate_entry → compute_bias → options_chain → select_strikes → liquidity_gate → sizing_engine → combo_builder.build → place_combo_order → notify_position_opened). - runtime/monitor_cycle.py: loop 12h con dvol_history per il return_4h, exit_decision.evaluate, close auto-execute. - runtime/health_check.py: probe parallelo MCP + SQLite + environment match; 3 strikes consecutivi → kill switch HIGH. - runtime/recovery.py: riconciliazione SQLite vs broker all'avvio; mismatch → kill switch CRITICAL. - runtime/scheduler.py: AsyncIOScheduler builder con cron entry (lun 14:00), monitor (02/14), health (5min). - runtime/orchestrator.py: façade boot() + run_entry/monitor/health + install_scheduler + run_forever, con env check vs strategy. CLI: - start: avvia engine bloccante (asyncio.run + scheduler). - dry-run --cycle entry|monitor|health: esegue un singolo ciclo per debug/test in produzione. - stop: documenta lo shutdown via SIGTERM al container. Documentazione: - docs/06-operational-flow.md riscritto per il modello notify-only auto-execute (no conferma manuale, no memory, no brain-bridge). Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
This commit is contained in:
@@ -0,0 +1,128 @@
|
||||
"""Integration tests for the Orchestrator façade (boot + cycle wiring)."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from datetime import UTC, datetime
|
||||
from decimal import Decimal
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
from pytest_httpx import HTTPXMock
|
||||
|
||||
from cerbero_bite.config import golden_config
|
||||
from cerbero_bite.config.mcp_endpoints import load_endpoints
|
||||
from cerbero_bite.runtime import Orchestrator
|
||||
from cerbero_bite.runtime.dependencies import build_runtime
|
||||
|
||||
pytestmark = pytest.mark.httpx_mock(assert_all_responses_were_requested=False)
|
||||
|
||||
|
||||
def _now() -> datetime:
|
||||
return datetime(2026, 4, 27, 14, 0, tzinfo=UTC)
|
||||
|
||||
|
||||
def _wire_environment_info(
|
||||
httpx_mock: HTTPXMock,
|
||||
*,
|
||||
environment: str = "testnet",
|
||||
) -> None:
|
||||
httpx_mock.add_response(
|
||||
url="http://mcp-deribit:9011/tools/environment_info",
|
||||
json={
|
||||
"exchange": "deribit",
|
||||
"environment": environment,
|
||||
"source": "env",
|
||||
"env_value": "true" if environment == "testnet" else "false",
|
||||
"base_url": "https://test.deribit.com/api/v2",
|
||||
"max_leverage": 3,
|
||||
},
|
||||
is_reusable=True,
|
||||
)
|
||||
|
||||
|
||||
def _wire_health_probes(httpx_mock: HTTPXMock) -> None:
|
||||
httpx_mock.add_response(
|
||||
url="http://mcp-macro:9013/tools/get_macro_calendar",
|
||||
json={"events": []},
|
||||
is_reusable=True,
|
||||
)
|
||||
httpx_mock.add_response(
|
||||
url="http://mcp-sentiment:9014/tools/get_cross_exchange_funding",
|
||||
json={"snapshot": {}},
|
||||
is_reusable=True,
|
||||
)
|
||||
httpx_mock.add_response(
|
||||
url="http://mcp-hyperliquid:9012/tools/get_funding_rate",
|
||||
json={"asset": "ETH", "current_funding_rate": 0.0001},
|
||||
is_reusable=True,
|
||||
)
|
||||
httpx_mock.add_response(
|
||||
url="http://mcp-portfolio:9018/tools/get_total_portfolio_value",
|
||||
json={"total_value_eur": 1000.0},
|
||||
is_reusable=True,
|
||||
)
|
||||
|
||||
|
||||
def _build_orch(tmp_path: Path, *, expected: str = "testnet") -> Orchestrator:
|
||||
ctx = build_runtime(
|
||||
cfg=golden_config(),
|
||||
endpoints=load_endpoints(env={}),
|
||||
token="t",
|
||||
db_path=tmp_path / "state.sqlite",
|
||||
audit_path=tmp_path / "audit.log",
|
||||
retry_max=1,
|
||||
clock=_now,
|
||||
)
|
||||
return Orchestrator(
|
||||
ctx,
|
||||
expected_environment=expected, # type: ignore[arg-type]
|
||||
eur_to_usd=Decimal("1.075"),
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_boot_succeeds_when_environment_matches(
|
||||
tmp_path: Path, httpx_mock: HTTPXMock
|
||||
) -> None:
|
||||
_wire_environment_info(httpx_mock, environment="testnet")
|
||||
_wire_health_probes(httpx_mock)
|
||||
httpx_mock.add_response(
|
||||
url="http://mcp-deribit:9011/tools/get_positions",
|
||||
json=[],
|
||||
is_reusable=True,
|
||||
)
|
||||
|
||||
orch = _build_orch(tmp_path, expected="testnet")
|
||||
boot = await orch.boot()
|
||||
assert boot.environment == "testnet"
|
||||
assert boot.health.state == "ok"
|
||||
assert orch.context.kill_switch.is_armed() is False
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_boot_arms_kill_switch_on_environment_mismatch(
|
||||
tmp_path: Path, httpx_mock: HTTPXMock
|
||||
) -> None:
|
||||
_wire_environment_info(httpx_mock, environment="mainnet")
|
||||
_wire_health_probes(httpx_mock)
|
||||
httpx_mock.add_response(
|
||||
url="http://mcp-deribit:9011/tools/get_positions",
|
||||
json=[],
|
||||
is_reusable=True,
|
||||
)
|
||||
httpx_mock.add_response(
|
||||
url="http://mcp-telegram:9017/tools/notify_system_error",
|
||||
json={"ok": True},
|
||||
is_reusable=True,
|
||||
)
|
||||
|
||||
orch = _build_orch(tmp_path, expected="testnet")
|
||||
await orch.boot()
|
||||
assert orch.context.kill_switch.is_armed() is True
|
||||
|
||||
|
||||
def test_install_scheduler_registers_canonical_jobs(tmp_path: Path) -> None:
|
||||
orch = _build_orch(tmp_path)
|
||||
sched = orch.install_scheduler()
|
||||
job_ids = {j.id for j in sched.get_jobs()}
|
||||
assert job_ids == {"entry", "monitor", "health"}
|
||||
Reference in New Issue
Block a user