Phase 2: persistence + safety controls

Aggiunge la persistenza SQLite, l'audit log a hash chain, il kill
switch coordinato e i CLI di gestione documentati in
docs/05-data-model.md e docs/07-risk-controls.md. 197 test pass,
1 skipped (sqlite3 CLI mancante), copertura totale 97%.

State (`state/`):
- 0001_init.sql con positions, instructions, decisions, dvol_history,
  manual_actions, system_state.
- db.py: connect con WAL + foreign_keys + transaction ctx, runner
  forward-only basato su PRAGMA user_version.
- models.py: record Pydantic, Decimal preservato come TEXT.
- repository.py: CRUD typed con singola connessione passata, cache
  aware, posizioni concorrenti.

Safety (`safety/`):
- audit_log.py: AuditLog append-only con SHA-256 chain e fsync,
  verify_chain riconosce ogni manomissione (payload, prev_hash,
  hash, JSON, separatori).
- kill_switch.py: arm/disarm transazionali, idempotenti, accoppiati
  all'audit chain.

Config (`config/loader.py` + `strategy.yaml`):
- Loader YAML con deep-merge di strategy.local.yaml.
- Verifica config_hash SHA-256 (riga config_hash esclusa).
- File golden strategy.yaml + esempio override.

Scripts:
- dead_man.sh: watchdog shell indipendente da Python.
- backup.py: VACUUM INTO orario con retention 30 giorni.

CLI:
- audit verify (exit 2 su tampering).
- kill-switch arm/disarm/status su SQLite reale.
- state inspect con tabella posizioni aperte.
- config hash, config validate.

Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
This commit is contained in:
2026-04-27 13:35:35 +02:00
parent fbb7753cc6
commit 263470786d
25 changed files with 3669 additions and 14 deletions
+247 -10
View File
@@ -10,19 +10,32 @@ without changing the surface.
from __future__ import annotations
import sys
from datetime import UTC, datetime
from pathlib import Path
import click
from rich.console import Console
from rich.table import Table
from cerbero_bite import __version__
from cerbero_bite.config.loader import compute_config_hash, load_strategy
from cerbero_bite.logging import configure as configure_logging
from cerbero_bite.logging import get_logger
from cerbero_bite.safety.audit_log import AuditChainError, AuditLog
from cerbero_bite.safety.audit_log import verify_chain as verify_audit_chain
from cerbero_bite.safety.kill_switch import KillSwitch
from cerbero_bite.state import Repository, run_migrations, transaction
from cerbero_bite.state import connect as connect_state
console = Console()
log = get_logger("cli")
_DEFAULT_DB_PATH = Path("data/state.sqlite")
_DEFAULT_AUDIT_PATH = Path("data/audit.log")
_DEFAULT_STRATEGY_PATH = Path("strategy.yaml")
def _phase0_notice(action: str) -> None:
console.print(f"[yellow]\\[phase 0 placeholder][/yellow] {action}")
@@ -85,18 +98,131 @@ def kill_switch() -> None:
"""Manage the engine kill switch."""
def _make_kill_switch(
db_path: Path, audit_path: Path, *, config_version: str
) -> KillSwitch:
"""Wire a :class:`KillSwitch` against the on-disk paths.
``init_system_state`` is called eagerly so the CLI can be used on
a fresh checkout before the engine ever ran.
"""
db_path.parent.mkdir(parents=True, exist_ok=True)
audit_path.parent.mkdir(parents=True, exist_ok=True)
conn = connect_state(db_path)
try:
run_migrations(conn)
repo = Repository()
with transaction(conn):
repo.init_system_state(
conn, config_version=config_version, now=datetime.now(UTC)
)
finally:
conn.close()
return KillSwitch(
connection_factory=lambda: connect_state(db_path),
repository=Repository(),
audit_log=AuditLog(audit_path),
)
@kill_switch.command(name="arm")
@click.option("--reason", required=True, help="Why you are arming the kill switch.")
def kill_switch_arm(reason: str) -> None:
@click.option(
"--source",
default="manual",
show_default=True,
help="Trigger label (manual, mcp_timeout, hash_chain, ...).",
)
@click.option(
"--db",
type=click.Path(dir_okay=False, path_type=Path),
default=_DEFAULT_DB_PATH,
show_default=True,
)
@click.option(
"--audit",
type=click.Path(dir_okay=False, path_type=Path),
default=_DEFAULT_AUDIT_PATH,
show_default=True,
)
@click.option(
"--config-version",
default="unknown",
show_default=True,
help="Recorded next to the kill event when the singleton is initialised.",
)
def kill_switch_arm(
reason: str, source: str, db: Path, audit: Path, config_version: str
) -> None:
"""Arm the kill switch (engine refuses new entries)."""
_phase0_notice(f"kill-switch arm placeholder (reason: {reason!r}).")
ks = _make_kill_switch(db, audit, config_version=config_version)
ks.arm(reason=reason, source=source)
console.print(f"[red]kill switch ARMED[/red] reason={reason!r} source={source}")
@kill_switch.command(name="disarm")
@click.option("--reason", required=True, help="Why you are disarming.")
def kill_switch_disarm(reason: str) -> None:
@click.option(
"--source",
default="manual",
show_default=True,
)
@click.option(
"--db",
type=click.Path(dir_okay=False, path_type=Path),
default=_DEFAULT_DB_PATH,
show_default=True,
)
@click.option(
"--audit",
type=click.Path(dir_okay=False, path_type=Path),
default=_DEFAULT_AUDIT_PATH,
show_default=True,
)
@click.option(
"--config-version",
default="unknown",
show_default=True,
)
def kill_switch_disarm(
reason: str, source: str, db: Path, audit: Path, config_version: str
) -> None:
"""Disarm the kill switch."""
_phase0_notice(f"kill-switch disarm placeholder (reason: {reason!r}).")
ks = _make_kill_switch(db, audit, config_version=config_version)
ks.disarm(reason=reason, source=source)
console.print(f"[green]kill switch DISARMED[/green] reason={reason!r}")
@kill_switch.command(name="status")
@click.option(
"--db",
type=click.Path(dir_okay=False, path_type=Path),
default=_DEFAULT_DB_PATH,
show_default=True,
)
def kill_switch_status(db: Path) -> None:
"""Print the current kill switch state."""
if not db.exists():
console.print("[yellow]state.sqlite not found — engine never ran[/yellow]")
return
conn = connect_state(db)
try:
run_migrations(conn)
state = Repository().get_system_state(conn)
finally:
conn.close()
if state is None:
console.print("[yellow]system_state singleton missing[/yellow]")
return
armed = state.kill_switch == 1
flag = "[red]ARMED[/red]" if armed else "[green]disarmed[/green]"
console.print(
f"kill_switch: {flag}\n"
f"reason: {state.kill_reason or '-'}\n"
f"kill_at: {state.kill_at.isoformat() if state.kill_at else '-'}\n"
f"last_health_check: {state.last_health_check.isoformat()}"
)
@main.command()
@@ -123,9 +249,42 @@ def config() -> None:
@config.command(name="hash")
def config_hash() -> None:
"""Compute and print SHA-256 of strategy.yaml."""
_phase0_notice("config hash placeholder; will read strategy.yaml and compute SHA-256.")
@click.option(
"--file",
"yaml_path",
type=click.Path(exists=True, dir_okay=False, path_type=Path),
default=_DEFAULT_STRATEGY_PATH,
show_default=True,
)
def config_hash(yaml_path: Path) -> None:
"""Compute and print the SHA-256 of *yaml_path* (config_hash field excluded)."""
text = yaml_path.read_text(encoding="utf-8")
digest = compute_config_hash(text)
console.print(digest)
@config.command(name="validate")
@click.option(
"--file",
"yaml_path",
type=click.Path(exists=True, dir_okay=False, path_type=Path),
default=_DEFAULT_STRATEGY_PATH,
show_default=True,
)
@click.option(
"--enforce-hash/--no-enforce-hash",
default=True,
show_default=True,
help="When enabled, the recorded config_hash must match the file body.",
)
def config_validate(yaml_path: Path, enforce_hash: bool) -> None:
"""Load and validate ``strategy.yaml`` (and any local override)."""
loaded = load_strategy(yaml_path, enforce_hash=enforce_hash)
console.print(
f"[green]ok[/green] version={loaded.config.config_version} "
f"hash={loaded.computed_hash[:16]}"
f"sources={', '.join(p.name for p in loaded.sources)}"
)
@main.group()
@@ -134,9 +293,87 @@ def audit() -> None:
@audit.command(name="verify")
def audit_verify() -> None:
"""Verify audit chain integrity."""
_phase0_notice("audit verify placeholder; will walk audit.log hash chain.")
@click.option(
"--file",
"audit_path",
type=click.Path(dir_okay=False, path_type=Path),
default=_DEFAULT_AUDIT_PATH,
show_default=True,
)
def audit_verify(audit_path: Path) -> None:
"""Walk the hash chain in *audit_path* and report tampering."""
try:
count = verify_audit_chain(audit_path)
except AuditChainError as exc:
console.print(f"[red]TAMPERED[/red]: {exc}")
sys.exit(2)
if count == 0:
console.print("[yellow]audit log empty[/yellow]")
else:
console.print(f"[green]ok[/green] {count} entries verified")
@main.group()
def state() -> None:
"""State inspection utilities."""
@state.command(name="inspect")
@click.option(
"--db",
type=click.Path(dir_okay=False, path_type=Path),
default=_DEFAULT_DB_PATH,
show_default=True,
)
def state_inspect(db: Path) -> None:
"""Print a short snapshot of the SQLite state file."""
if not db.exists():
console.print("[yellow]state.sqlite not found[/yellow]")
return
conn = connect_state(db)
try:
run_migrations(conn)
repo = Repository()
sys_state = repo.get_system_state(conn)
positions = repo.list_open_positions(conn)
concurrent = repo.count_concurrent_positions(conn)
finally:
conn.close()
if sys_state is None:
console.print("[yellow]system_state singleton missing[/yellow]")
return
armed = "[red]ARMED[/red]" if sys_state.kill_switch == 1 else "[green]disarmed[/green]"
console.print(
f"engine state: kill_switch={armed}, "
f"open positions: {concurrent}, "
f"config_version: {sys_state.config_version}"
)
if not positions:
console.print("no open positions")
return
table = Table(title="open positions")
table.add_column("proposal_id")
table.add_column("status")
table.add_column("spread")
table.add_column("short")
table.add_column("long")
table.add_column("n")
table.add_column("expiry")
for pos in positions:
table.add_row(
str(pos.proposal_id)[:8],
pos.status,
pos.spread_type,
str(pos.short_strike),
str(pos.long_strike),
str(pos.n_contracts),
pos.expiry.isoformat(),
)
console.print(table)
def _entrypoint() -> None:
+10
View File
@@ -1,5 +1,11 @@
"""Strategy configuration: schema, loader, validation."""
from cerbero_bite.config.loader import (
ConfigHashError,
LoadedConfig,
compute_config_hash,
load_strategy,
)
from cerbero_bite.config.schema import (
AssetConfig,
DvolAdjustmentBand,
@@ -23,12 +29,14 @@ from cerbero_bite.config.schema import (
__all__ = [
"AssetConfig",
"ConfigHashError",
"DvolAdjustmentBand",
"EntryConfig",
"ExecutionConfig",
"ExitConfig",
"KellyConfig",
"LiquidityConfig",
"LoadedConfig",
"McpConfig",
"MonitoringConfig",
"ShortStrikeSpec",
@@ -39,5 +47,7 @@ __all__ = [
"StrategyConfig",
"StructureConfig",
"TelegramConfig",
"compute_config_hash",
"golden_config",
"load_strategy",
]
+141
View File
@@ -0,0 +1,141 @@
"""YAML loader for ``strategy.yaml`` with optional local override.
* Reads ``strategy.yaml`` (golden config).
* If ``strategy.local.yaml`` exists alongside, deep-merges its keys on
top — that file is ``.gitignore``'d and used by Adriano for emergency
overrides.
* Verifies ``config_hash`` matches the SHA-256 of the YAML *minus* the
``config_hash`` line itself. A mismatch is reported via
:class:`ConfigHashError` and the orchestrator must arm the kill switch
per ``docs/07-risk-controls.md``.
The loader does *not* depend on the runtime: it returns a validated
:class:`StrategyConfig` plus the computed hash; nothing else.
"""
from __future__ import annotations
import hashlib
from dataclasses import dataclass
from pathlib import Path
from typing import Any
import yaml
from cerbero_bite.config.schema import StrategyConfig
__all__ = [
"ConfigHashError",
"LoadedConfig",
"compute_config_hash",
"load_strategy",
]
_HASH_KEY = "config_hash"
class ConfigHashError(RuntimeError):
"""Raised when the recorded ``config_hash`` does not match the file."""
@dataclass(frozen=True)
class LoadedConfig:
"""Result of :func:`load_strategy`."""
config: StrategyConfig
computed_hash: str
sources: tuple[Path, ...]
def _strip_hash_line(text: str) -> str:
"""Return *text* with the ``config_hash:`` line replaced by an empty string.
We deliberately keep the surrounding whitespace so that any other
line numbers stay stable; only the value of the hash is removed.
"""
out: list[str] = []
for line in text.splitlines(keepends=True):
stripped = line.lstrip()
if stripped.startswith(f"{_HASH_KEY}:"):
# keep the key but strip the value, so identical files with
# different hashes still hash the same way
indent = line[: len(line) - len(stripped)]
out.append(f"{indent}{_HASH_KEY}:\n")
else:
out.append(line)
return "".join(out)
def compute_config_hash(text: str) -> str:
"""SHA-256 of the YAML text after stripping the ``config_hash`` value."""
canonical = _strip_hash_line(text).encode("utf-8")
return hashlib.sha256(canonical).hexdigest()
def _deep_merge(base: dict[str, Any], override: dict[str, Any]) -> dict[str, Any]:
"""Return ``base`` with values from ``override`` recursively merged in."""
out = dict(base)
for key, value in override.items():
existing = out.get(key)
if isinstance(existing, dict) and isinstance(value, dict):
out[key] = _deep_merge(existing, value)
else:
out[key] = value
return out
def _load_yaml(path: Path) -> dict[str, Any]:
data = yaml.safe_load(path.read_text(encoding="utf-8"))
if data is None:
return {}
if not isinstance(data, dict):
raise ValueError(f"{path}: expected a top-level mapping")
return data
def load_strategy(
yaml_path: Path | str,
*,
local_override_path: Path | str | None = None,
enforce_hash: bool = True,
) -> LoadedConfig:
"""Load and validate a strategy YAML, optionally merging a local file.
Args:
yaml_path: path to ``strategy.yaml``.
local_override_path: when ``None`` (default), use
``<yaml_path>.local.yaml`` if present. Pass ``False`` /
non-existent path to disable.
enforce_hash: when ``True``, raise :class:`ConfigHashError` if
the recorded hash does not match the file. Set to ``False``
in test fixtures or right after a manual edit.
"""
main_path = Path(yaml_path)
text = main_path.read_text(encoding="utf-8")
raw = _load_yaml(main_path)
sources: list[Path] = [main_path]
computed_hash = compute_config_hash(text)
declared_hash = raw.get(_HASH_KEY)
if enforce_hash and declared_hash != computed_hash:
raise ConfigHashError(
f"config_hash mismatch in {main_path}: "
f"declared={declared_hash}, computed={computed_hash}"
)
if local_override_path is None:
local_override_path = main_path.with_name(
main_path.stem + ".local" + main_path.suffix
)
override_path = Path(local_override_path)
if override_path.is_file():
override = _load_yaml(override_path)
raw = _deep_merge(raw, override)
sources.append(override_path)
return LoadedConfig(
config=StrategyConfig(**raw),
computed_hash=computed_hash,
sources=tuple(sources),
)
+19
View File
@@ -0,0 +1,19 @@
"""Cross-cutting safety controls: kill switch, dead-man, audit chain."""
from cerbero_bite.safety.audit_log import (
GENESIS_HASH,
AuditChainError,
AuditEntry,
AuditLog,
iter_entries,
verify_chain,
)
__all__ = [
"GENESIS_HASH",
"AuditChainError",
"AuditEntry",
"AuditLog",
"iter_entries",
"verify_chain",
]
+246
View File
@@ -0,0 +1,246 @@
"""Append-only hash-chained audit log (``docs/07-risk-controls.md``).
Every line is::
<iso-ts>|<event>|<json-payload>|prev_hash=<hex>|hash=<hex>
``hash`` is ``sha256("<iso-ts>|<event>|<json-payload>|<prev_hash>")`` so
that the integrity of the file can be re-verified by walking the chain
top-to-bottom. The first line uses ``prev_hash="0" * 64``.
The writer ``flush + os.fsync`` after every append; for the audit trail,
durability beats throughput.
"""
from __future__ import annotations
import hashlib
import json
import os
from collections.abc import Iterator
from dataclasses import dataclass
from datetime import UTC, datetime
from pathlib import Path
from typing import Any
__all__ = [
"GENESIS_HASH",
"AuditChainError",
"AuditEntry",
"AuditLog",
"verify_chain",
]
GENESIS_HASH = "0" * 64
_SEP = "|"
class AuditChainError(RuntimeError):
"""Raised when the audit chain fails verification."""
@dataclass(frozen=True)
class AuditEntry:
"""Parsed audit-log line."""
timestamp: datetime
event: str
payload: dict[str, Any]
prev_hash: str
hash: str
def _canonical_payload(payload: dict[str, Any]) -> str:
"""Serialize the payload deterministically (sorted keys, no whitespace)."""
return json.dumps(payload, sort_keys=True, separators=(",", ":"), default=str)
def _compute_hash(timestamp: str, event: str, payload_json: str, prev_hash: str) -> str:
raw = f"{timestamp}{_SEP}{event}{_SEP}{payload_json}{_SEP}{prev_hash}"
return hashlib.sha256(raw.encode("utf-8")).hexdigest()
def _format_line(
timestamp: str, event: str, payload_json: str, prev_hash: str, line_hash: str
) -> str:
return (
f"{timestamp}{_SEP}{event}{_SEP}{payload_json}{_SEP}"
f"prev_hash={prev_hash}{_SEP}hash={line_hash}\n"
)
def _parse_line(line: str) -> AuditEntry:
"""Parse a stored audit line back into :class:`AuditEntry`.
The payload may legitimately contain ``|`` characters inside JSON
strings; we therefore split from the right for the two trailing
fields and from the left for the timestamp + event + payload.
"""
if not line.endswith("\n"):
line = line + "\n"
body = line.rstrip("\n")
# Trailing parts.
try:
rest, hash_part = body.rsplit(_SEP, 1)
except ValueError as exc:
raise AuditChainError("missing hash= field") from exc
if not hash_part.startswith("hash="):
raise AuditChainError("missing hash= field")
line_hash = hash_part[len("hash=") :]
try:
rest, prev_part = rest.rsplit(_SEP, 1)
except ValueError as exc:
raise AuditChainError("missing prev_hash= field") from exc
if not prev_part.startswith("prev_hash="):
raise AuditChainError("missing prev_hash= field")
prev_hash = prev_part[len("prev_hash=") :]
# Leading parts.
parts = rest.split(_SEP, 2)
if len(parts) != 3:
raise AuditChainError("malformed leading section")
ts_str, event, payload_json = parts
try:
payload: dict[str, Any] = json.loads(payload_json)
except json.JSONDecodeError as exc:
raise AuditChainError("payload is not valid JSON") from exc
if not isinstance(payload, dict):
raise AuditChainError("payload must be a JSON object")
return AuditEntry(
timestamp=datetime.fromisoformat(ts_str),
event=event,
payload=payload,
prev_hash=prev_hash,
hash=line_hash,
)
def verify_chain(path: str | Path) -> int:
"""Re-walk *path* and raise :class:`AuditChainError` on tampering.
Returns the number of lines verified (0 if the file does not exist
or is empty).
"""
p = Path(path)
if not p.exists() or p.stat().st_size == 0:
return 0
expected_prev = GENESIS_HASH
count = 0
with p.open("r", encoding="utf-8") as fh:
for lineno, line in enumerate(fh, start=1):
if not line.strip():
continue
entry = _parse_line(line)
if entry.prev_hash != expected_prev:
raise AuditChainError(
f"line {lineno}: prev_hash mismatch "
f"(expected {expected_prev}, got {entry.prev_hash})"
)
recomputed = _compute_hash(
entry.timestamp.isoformat(),
entry.event,
_canonical_payload(entry.payload),
entry.prev_hash,
)
if recomputed != entry.hash:
raise AuditChainError(
f"line {lineno}: hash mismatch (expected {recomputed}, "
f"got {entry.hash})"
)
expected_prev = entry.hash
count += 1
return count
def iter_entries(path: str | Path) -> Iterator[AuditEntry]:
"""Yield each :class:`AuditEntry` from *path* without verifying."""
p = Path(path)
if not p.exists():
return
with p.open("r", encoding="utf-8") as fh:
for line in fh:
if line.strip():
yield _parse_line(line)
class AuditLog:
"""Writer for the hash-chained audit log.
A single instance per process is enough; concurrent writers are not
supported by design (the engine is the only writer). ``append`` is
fsync'd before returning.
"""
def __init__(self, path: str | Path) -> None:
self._path = Path(path)
self._path.parent.mkdir(parents=True, exist_ok=True)
self._last_hash: str = self._tail_hash() or GENESIS_HASH
@property
def path(self) -> Path: # pragma: no cover — accessor used by callers only
return self._path
@property
def last_hash(self) -> str:
return self._last_hash
def _tail_hash(self) -> str | None:
if not self._path.exists() or self._path.stat().st_size == 0:
return None
# Walk from EOF to find the last non-empty line. The chunked
# back-seek covers files larger than 4 KiB; the loop-exhausted
# branch is reached only when a partial / no-newline file is
# encountered (defensive — :func:`append` always writes "\n").
with self._path.open("rb") as fh:
fh.seek(0, os.SEEK_END)
size = fh.tell()
buf = b""
offset = size
chunk = 4096
while offset > 0: # pragma: no branch — terminates via break or offset==0
read = min(chunk, offset)
offset -= read
fh.seek(offset)
buf = fh.read(read) + buf
if b"\n" in buf:
break
text = buf.decode("utf-8", errors="strict")
for line in reversed(text.splitlines()): # pragma: no branch
if line.strip():
entry = _parse_line(line)
return entry.hash
return None # pragma: no cover — only hit when file is all blank lines
def append(
self,
*,
event: str,
payload: dict[str, Any] | None = None,
now: datetime | None = None,
) -> AuditEntry:
"""Append one event line and return the resulting entry."""
ts = (now or datetime.now(UTC)).astimezone(UTC)
ts_iso = ts.isoformat()
payload_json = _canonical_payload(payload or {})
prev_hash = self._last_hash
line_hash = _compute_hash(ts_iso, event, payload_json, prev_hash)
line = _format_line(ts_iso, event, payload_json, prev_hash, line_hash)
with self._path.open("a", encoding="utf-8") as fh:
fh.write(line)
fh.flush()
os.fsync(fh.fileno())
self._last_hash = line_hash
return AuditEntry(
timestamp=ts,
event=event,
payload=dict(payload or {}),
prev_hash=prev_hash,
hash=line_hash,
)
+120
View File
@@ -0,0 +1,120 @@
"""Kill switch coordinator (``docs/07-risk-controls.md``).
Encapsulates the three side effects required when arming/disarming:
1. Update the ``system_state`` row in SQLite.
2. Append a tamper-evident line to the audit log.
3. Make the new state observable via :meth:`is_armed` for the
orchestrator.
The orchestrator is the only caller; the GUI signals the same intent
via the :class:`ManualAction` queue, which the orchestrator drains and
forwards here.
"""
from __future__ import annotations
import sqlite3
from collections.abc import Callable
from datetime import UTC, datetime
from cerbero_bite.safety.audit_log import AuditLog
from cerbero_bite.state import Repository, transaction
__all__ = ["KillSwitch", "KillSwitchError"]
class KillSwitchError(RuntimeError):
"""Raised when an arm/disarm transition is invalid."""
class KillSwitch:
"""Arm/disarm + status helper backed by ``system_state`` + audit log.
All transitions must go through this class so the SQLite row and
the audit chain stay in lock-step.
"""
def __init__(
self,
*,
connection_factory: Callable[[], sqlite3.Connection],
repository: Repository,
audit_log: AuditLog,
clock: Callable[[], datetime] | None = None,
) -> None:
self._connect = connection_factory
self._repo = repository
self._audit = audit_log
self._clock = clock or (lambda: datetime.now(UTC))
# ------------------------------------------------------------------
# Status
# ------------------------------------------------------------------
def is_armed(self) -> bool:
conn = self._connect()
try:
state = self._repo.get_system_state(conn)
finally:
conn.close()
if state is None:
return False
return state.kill_switch == 1
# ------------------------------------------------------------------
# Transitions
# ------------------------------------------------------------------
def arm(self, *, reason: str, source: str) -> None:
"""Move the engine to the armed state.
``source`` is a short label (``"manual"``, ``"mcp_timeout"``,
``"hash_chain"``) that goes both into the audit payload and the
log so we can attribute the trigger later.
"""
if not reason:
raise KillSwitchError("reason is required to arm the kill switch")
now = self._clock()
conn = self._connect()
try:
with transaction(conn):
state = self._repo.get_system_state(conn)
if state is None:
raise KillSwitchError(
"system_state singleton missing — call init_system_state first"
)
if state.kill_switch == 1:
return # idempotent
self._repo.set_kill_switch(conn, armed=True, reason=reason, now=now)
finally:
conn.close()
self._audit.append(
event="KILL_SWITCH_ARMED",
payload={"reason": reason, "source": source},
now=now,
)
def disarm(self, *, reason: str, source: str) -> None:
"""Move the engine back to the disarmed state."""
if not reason:
raise KillSwitchError("reason is required to disarm the kill switch")
now = self._clock()
conn = self._connect()
try:
with transaction(conn):
state = self._repo.get_system_state(conn)
if state is None:
raise KillSwitchError(
"system_state singleton missing — call init_system_state first"
)
if state.kill_switch == 0:
return # idempotent
self._repo.set_kill_switch(conn, armed=False, reason=None, now=now)
finally:
conn.close()
self._audit.append(
event="KILL_SWITCH_DISARMED",
payload={"reason": reason, "source": source},
now=now,
)
+27
View File
@@ -0,0 +1,27 @@
"""Persistent state: SQLite schema, migrations, typed repository."""
from cerbero_bite.state.db import connect, run_migrations, transaction
from cerbero_bite.state.models import (
DecisionRecord,
DvolSnapshot,
InstructionRecord,
ManualAction,
PositionRecord,
PositionStatus,
SystemStateRecord,
)
from cerbero_bite.state.repository import Repository
__all__ = [
"DecisionRecord",
"DvolSnapshot",
"InstructionRecord",
"ManualAction",
"PositionRecord",
"PositionStatus",
"Repository",
"SystemStateRecord",
"connect",
"run_migrations",
"transaction",
]
+102
View File
@@ -0,0 +1,102 @@
"""SQLite connection helpers and forward-only migrations.
Connections use ``sqlite3`` with a few pragmas tuned for our workload:
* ``foreign_keys=ON`` — referential integrity for ``instructions`` →
``positions``.
* ``journal_mode=WAL`` — concurrent reader (the GUI) without blocking
the engine writer.
* ``synchronous=NORMAL`` — durable enough for our use-case (we also
append-fsync the audit chain) and noticeably faster than FULL.
Migrations live in ``state/migrations/NNNN_<name>.sql``. They are
applied in numeric order, each bumps ``PRAGMA user_version`` at the
last statement of the file. ``run_migrations`` is idempotent: it
re-checks ``user_version`` and replays only what is missing.
"""
from __future__ import annotations
import re
import sqlite3
from collections.abc import Iterator
from contextlib import contextmanager
from importlib import resources
from pathlib import Path
__all__ = ["connect", "current_version", "list_migrations", "run_migrations"]
_MIGRATION_PATTERN = re.compile(r"^(\d{4})_[a-z0-9_]+\.sql$")
def _apply_pragmas(conn: sqlite3.Connection) -> None:
conn.execute("PRAGMA foreign_keys = ON")
conn.execute("PRAGMA journal_mode = WAL")
conn.execute("PRAGMA synchronous = NORMAL")
def connect(path: str | Path) -> sqlite3.Connection:
"""Open a connection to ``path`` with the standard pragmas applied."""
db_path = Path(path)
db_path.parent.mkdir(parents=True, exist_ok=True)
conn = sqlite3.connect(
str(db_path),
isolation_level=None, # autocommit; we manage transactions explicitly
detect_types=sqlite3.PARSE_DECLTYPES,
)
conn.row_factory = sqlite3.Row
_apply_pragmas(conn)
return conn
@contextmanager
def transaction(conn: sqlite3.Connection) -> Iterator[sqlite3.Connection]:
"""Begin/commit/rollback wrapper that respects autocommit mode."""
conn.execute("BEGIN")
try:
yield conn
except Exception:
conn.execute("ROLLBACK")
raise
else:
conn.execute("COMMIT")
def current_version(conn: sqlite3.Connection) -> int:
row = conn.execute("PRAGMA user_version").fetchone()
return int(row[0])
def list_migrations() -> list[tuple[int, str, str]]:
"""Return ``[(version, filename, sql_text), ...]`` sorted by version."""
pkg_files = resources.files("cerbero_bite.state.migrations")
out: list[tuple[int, str, str]] = []
for entry in pkg_files.iterdir():
match = _MIGRATION_PATTERN.match(entry.name)
if not match:
continue
version = int(match.group(1))
out.append((version, entry.name, entry.read_text(encoding="utf-8")))
out.sort(key=lambda triple: triple[0])
return out
def run_migrations(conn: sqlite3.Connection) -> int:
"""Apply migrations whose version is greater than the current one.
Returns the new ``user_version`` after the run.
"""
applied_to = current_version(conn)
for version, name, sql in list_migrations():
if version <= applied_to:
continue
conn.executescript(sql)
new_version = current_version(conn)
if new_version != version:
raise RuntimeError(
f"Migration {name} did not bump user_version to {version} "
f"(observed {new_version})"
)
applied_to = new_version
return applied_to
@@ -0,0 +1,101 @@
-- 0001_init.sql — initial schema for Cerbero Bite (docs/05-data-model.md)
--
-- Forward-only. Run by state/migrations.py once user_version == 0.
-- Bumps user_version to 1 at the end.
PRAGMA foreign_keys = ON;
CREATE TABLE positions (
proposal_id TEXT PRIMARY KEY,
spread_type TEXT NOT NULL,
asset TEXT NOT NULL DEFAULT 'ETH',
expiry TEXT NOT NULL,
short_strike NUMERIC NOT NULL,
long_strike NUMERIC NOT NULL,
short_instrument TEXT NOT NULL,
long_instrument TEXT NOT NULL,
n_contracts INTEGER NOT NULL,
spread_width_usd NUMERIC NOT NULL,
spread_width_pct NUMERIC NOT NULL,
credit_eth NUMERIC NOT NULL,
credit_usd NUMERIC NOT NULL,
max_loss_usd NUMERIC NOT NULL,
spot_at_entry NUMERIC NOT NULL,
dvol_at_entry NUMERIC NOT NULL,
delta_at_entry NUMERIC NOT NULL,
eth_price_at_entry NUMERIC NOT NULL,
proposed_at TEXT NOT NULL,
opened_at TEXT,
closed_at TEXT,
close_reason TEXT,
debit_paid_eth NUMERIC,
pnl_eth NUMERIC,
pnl_usd NUMERIC,
status TEXT NOT NULL,
created_at TEXT NOT NULL,
updated_at TEXT NOT NULL
);
CREATE INDEX idx_positions_status ON positions(status);
CREATE INDEX idx_positions_closed_at ON positions(closed_at);
CREATE TABLE instructions (
instruction_id TEXT PRIMARY KEY,
proposal_id TEXT NOT NULL REFERENCES positions(proposal_id),
kind TEXT NOT NULL,
payload_json TEXT NOT NULL,
sent_at TEXT NOT NULL,
acknowledged_at TEXT,
filled_at TEXT,
cancelled_at TEXT,
actual_fill_eth NUMERIC,
actual_fees_eth NUMERIC
);
CREATE INDEX idx_instructions_proposal ON instructions(proposal_id);
CREATE TABLE decisions (
id INTEGER PRIMARY KEY AUTOINCREMENT,
decision_type TEXT NOT NULL,
proposal_id TEXT,
timestamp TEXT NOT NULL,
inputs_json TEXT NOT NULL,
outputs_json TEXT NOT NULL,
action_taken TEXT,
notes TEXT
);
CREATE INDEX idx_decisions_timestamp ON decisions(timestamp);
CREATE INDEX idx_decisions_proposal ON decisions(proposal_id);
CREATE TABLE dvol_history (
timestamp TEXT PRIMARY KEY,
dvol NUMERIC NOT NULL,
eth_spot NUMERIC NOT NULL
);
CREATE TABLE manual_actions (
id INTEGER PRIMARY KEY AUTOINCREMENT,
kind TEXT NOT NULL,
proposal_id TEXT,
payload_json TEXT,
created_at TEXT NOT NULL,
consumed_at TEXT,
consumed_by TEXT,
result TEXT
);
CREATE INDEX idx_manual_actions_unconsumed ON manual_actions(consumed_at);
CREATE TABLE system_state (
id INTEGER PRIMARY KEY CHECK (id = 1),
kill_switch INTEGER NOT NULL DEFAULT 0,
kill_reason TEXT,
kill_at TEXT,
last_health_check TEXT NOT NULL,
last_kelly_calib TEXT,
config_version TEXT NOT NULL,
started_at TEXT NOT NULL
);
PRAGMA user_version = 1;
@@ -0,0 +1 @@
"""Forward-only SQL migrations for the Cerbero Bite state database."""
+154
View File
@@ -0,0 +1,154 @@
"""Pydantic record types mirroring the SQLite tables.
Every numeric column documented as ``NUMERIC`` in
``state/migrations/0001_init.sql`` is exposed as :class:`decimal.Decimal`
on the Python side. The repository layer is responsible for serialising
to ``TEXT`` (using ``str``) when writing and parsing back when reading,
so precision is never lost via ``float`` coercion.
"""
from __future__ import annotations
from datetime import datetime
from decimal import Decimal
from typing import Literal
from uuid import UUID
from pydantic import BaseModel, ConfigDict, Field
__all__ = [
"DecisionRecord",
"DvolSnapshot",
"InstructionRecord",
"ManualAction",
"PositionRecord",
"PositionStatus",
"SystemStateRecord",
]
PositionStatus = Literal[
"proposed",
"awaiting_fill",
"open",
"closing",
"closed",
"cancelled",
]
class PositionRecord(BaseModel):
"""Row of the ``positions`` table."""
model_config = ConfigDict(extra="forbid")
proposal_id: UUID
spread_type: str
asset: str = "ETH"
expiry: datetime
short_strike: Decimal
long_strike: Decimal
short_instrument: str
long_instrument: str
n_contracts: int
spread_width_usd: Decimal
spread_width_pct: Decimal
credit_eth: Decimal
credit_usd: Decimal
max_loss_usd: Decimal
spot_at_entry: Decimal
dvol_at_entry: Decimal
delta_at_entry: Decimal
eth_price_at_entry: Decimal
proposed_at: datetime
opened_at: datetime | None = None
closed_at: datetime | None = None
close_reason: str | None = None
debit_paid_eth: Decimal | None = None
pnl_eth: Decimal | None = None
pnl_usd: Decimal | None = None
status: PositionStatus
created_at: datetime
updated_at: datetime
class InstructionRecord(BaseModel):
"""Row of the ``instructions`` table."""
model_config = ConfigDict(extra="forbid")
instruction_id: UUID
proposal_id: UUID
kind: Literal["open_combo", "close_combo"]
payload_json: str
sent_at: datetime
acknowledged_at: datetime | None = None
filled_at: datetime | None = None
cancelled_at: datetime | None = None
actual_fill_eth: Decimal | None = None
actual_fees_eth: Decimal | None = None
class DecisionRecord(BaseModel):
"""Row of the ``decisions`` table.
``id`` is :class:`int` and may be ``None`` before the row has been
inserted; the repository sets it after the auto-increment fires.
"""
model_config = ConfigDict(extra="forbid")
id: int | None = None
decision_type: Literal["entry_check", "exit_check", "kelly_recalib"]
proposal_id: UUID | None = None
timestamp: datetime
inputs_json: str
outputs_json: str
action_taken: str | None = None
notes: str | None = None
class DvolSnapshot(BaseModel):
"""Row of the ``dvol_history`` table."""
model_config = ConfigDict(extra="forbid")
timestamp: datetime
dvol: Decimal
eth_spot: Decimal
class ManualAction(BaseModel):
"""Row of the ``manual_actions`` table."""
model_config = ConfigDict(extra="forbid")
id: int | None = None
kind: Literal[
"approve_proposal",
"reject_proposal",
"force_close",
"arm_kill",
"disarm_kill",
]
proposal_id: UUID | None = None
payload_json: str | None = None
created_at: datetime
consumed_at: datetime | None = None
consumed_by: str | None = None
result: str | None = None
class SystemStateRecord(BaseModel):
"""Singleton row of the ``system_state`` table."""
model_config = ConfigDict(extra="forbid")
id: int = Field(default=1)
kill_switch: int = 0
kill_reason: str | None = None
kill_at: datetime | None = None
last_health_check: datetime
last_kelly_calib: datetime | None = None
config_version: str
started_at: datetime
+553
View File
@@ -0,0 +1,553 @@
"""Typed CRUD layer over the SQLite database.
All methods take a :class:`sqlite3.Connection` so callers can compose a
single transaction across multiple writes (the orchestrator does this
when persisting an entry decision + the resulting proposal). The
repository never opens its own connection: that responsibility is left
to :func:`cerbero_bite.state.db.connect`.
Decimals are stored as TEXT to preserve precision (see
``state/models.py``).
"""
from __future__ import annotations
import sqlite3
from datetime import UTC, datetime
from decimal import Decimal
from typing import Any
from uuid import UUID
from cerbero_bite.state.models import (
DecisionRecord,
DvolSnapshot,
InstructionRecord,
ManualAction,
PositionRecord,
PositionStatus,
SystemStateRecord,
)
__all__ = ["Repository"]
# ---------------------------------------------------------------------------
# Encoding helpers
# ---------------------------------------------------------------------------
def _enc_dec(value: Decimal | None) -> str | None:
return None if value is None else str(value)
def _enc_dt(value: datetime | None) -> str | None:
if value is None:
return None
if value.tzinfo is None:
value = value.replace(tzinfo=UTC)
return value.astimezone(UTC).isoformat()
def _enc_uuid(value: UUID | None) -> str | None:
return None if value is None else str(value)
def _dec_dec(value: Any) -> Decimal | None:
if value is None:
return None
return Decimal(str(value))
def _dec_dt(value: Any) -> datetime | None:
if value is None:
return None
return datetime.fromisoformat(str(value))
def _dec_uuid(value: Any) -> UUID | None:
if value is None:
return None
return UUID(str(value))
# ---------------------------------------------------------------------------
# Repository
# ---------------------------------------------------------------------------
class Repository:
"""Typed CRUD wrapper. One instance per process, all calls take a conn."""
# ------------------------------------------------------------------
# positions
# ------------------------------------------------------------------
def create_position(self, conn: sqlite3.Connection, record: PositionRecord) -> None:
conn.execute(
"""
INSERT INTO positions (
proposal_id, spread_type, asset, expiry,
short_strike, long_strike, short_instrument, long_instrument,
n_contracts, spread_width_usd, spread_width_pct,
credit_eth, credit_usd, max_loss_usd,
spot_at_entry, dvol_at_entry, delta_at_entry, eth_price_at_entry,
proposed_at, opened_at, closed_at, close_reason,
debit_paid_eth, pnl_eth, pnl_usd,
status, created_at, updated_at
) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)
""",
(
_enc_uuid(record.proposal_id),
record.spread_type,
record.asset,
_enc_dt(record.expiry),
_enc_dec(record.short_strike),
_enc_dec(record.long_strike),
record.short_instrument,
record.long_instrument,
record.n_contracts,
_enc_dec(record.spread_width_usd),
_enc_dec(record.spread_width_pct),
_enc_dec(record.credit_eth),
_enc_dec(record.credit_usd),
_enc_dec(record.max_loss_usd),
_enc_dec(record.spot_at_entry),
_enc_dec(record.dvol_at_entry),
_enc_dec(record.delta_at_entry),
_enc_dec(record.eth_price_at_entry),
_enc_dt(record.proposed_at),
_enc_dt(record.opened_at),
_enc_dt(record.closed_at),
record.close_reason,
_enc_dec(record.debit_paid_eth),
_enc_dec(record.pnl_eth),
_enc_dec(record.pnl_usd),
record.status,
_enc_dt(record.created_at),
_enc_dt(record.updated_at),
),
)
def get_position(
self, conn: sqlite3.Connection, proposal_id: UUID
) -> PositionRecord | None:
row = conn.execute(
"SELECT * FROM positions WHERE proposal_id = ?",
(_enc_uuid(proposal_id),),
).fetchone()
return None if row is None else _row_to_position(row)
def list_positions(
self,
conn: sqlite3.Connection,
*,
status: PositionStatus | None = None,
) -> list[PositionRecord]:
if status is None:
cursor = conn.execute(
"SELECT * FROM positions ORDER BY created_at DESC"
)
else:
cursor = conn.execute(
"SELECT * FROM positions WHERE status = ? ORDER BY created_at DESC",
(status,),
)
return [_row_to_position(r) for r in cursor.fetchall()]
def list_open_positions(self, conn: sqlite3.Connection) -> list[PositionRecord]:
rows = conn.execute(
"SELECT * FROM positions WHERE status IN ('open', 'awaiting_fill', "
"'closing') ORDER BY created_at DESC"
).fetchall()
return [_row_to_position(r) for r in rows]
def update_position_status(
self,
conn: sqlite3.Connection,
proposal_id: UUID,
*,
status: PositionStatus,
opened_at: datetime | None = None,
closed_at: datetime | None = None,
close_reason: str | None = None,
debit_paid_eth: Decimal | None = None,
pnl_eth: Decimal | None = None,
pnl_usd: Decimal | None = None,
now: datetime,
) -> None:
sets: list[str] = ["status = ?", "updated_at = ?"]
params: list[Any] = [status, _enc_dt(now)]
if opened_at is not None:
sets.append("opened_at = ?")
params.append(_enc_dt(opened_at))
if closed_at is not None:
sets.append("closed_at = ?")
params.append(_enc_dt(closed_at))
if close_reason is not None:
sets.append("close_reason = ?")
params.append(close_reason)
if debit_paid_eth is not None:
sets.append("debit_paid_eth = ?")
params.append(_enc_dec(debit_paid_eth))
if pnl_eth is not None:
sets.append("pnl_eth = ?")
params.append(_enc_dec(pnl_eth))
if pnl_usd is not None:
sets.append("pnl_usd = ?")
params.append(_enc_dec(pnl_usd))
params.append(_enc_uuid(proposal_id))
conn.execute(
f"UPDATE positions SET {', '.join(sets)} WHERE proposal_id = ?",
params,
)
def count_concurrent_positions(self, conn: sqlite3.Connection) -> int:
row = conn.execute(
"SELECT COUNT(*) FROM positions WHERE status IN "
"('awaiting_fill', 'open', 'closing')"
).fetchone()
return int(row[0])
# ------------------------------------------------------------------
# instructions
# ------------------------------------------------------------------
def create_instruction(
self, conn: sqlite3.Connection, record: InstructionRecord
) -> None:
conn.execute(
"""
INSERT INTO instructions (
instruction_id, proposal_id, kind, payload_json,
sent_at, acknowledged_at, filled_at, cancelled_at,
actual_fill_eth, actual_fees_eth
) VALUES (?,?,?,?,?,?,?,?,?,?)
""",
(
_enc_uuid(record.instruction_id),
_enc_uuid(record.proposal_id),
record.kind,
record.payload_json,
_enc_dt(record.sent_at),
_enc_dt(record.acknowledged_at),
_enc_dt(record.filled_at),
_enc_dt(record.cancelled_at),
_enc_dec(record.actual_fill_eth),
_enc_dec(record.actual_fees_eth),
),
)
def update_instruction(
self,
conn: sqlite3.Connection,
instruction_id: UUID,
*,
acknowledged_at: datetime | None = None,
filled_at: datetime | None = None,
cancelled_at: datetime | None = None,
actual_fill_eth: Decimal | None = None,
actual_fees_eth: Decimal | None = None,
) -> None:
sets: list[str] = []
params: list[Any] = []
if acknowledged_at is not None:
sets.append("acknowledged_at = ?")
params.append(_enc_dt(acknowledged_at))
if filled_at is not None:
sets.append("filled_at = ?")
params.append(_enc_dt(filled_at))
if cancelled_at is not None:
sets.append("cancelled_at = ?")
params.append(_enc_dt(cancelled_at))
if actual_fill_eth is not None:
sets.append("actual_fill_eth = ?")
params.append(_enc_dec(actual_fill_eth))
if actual_fees_eth is not None:
sets.append("actual_fees_eth = ?")
params.append(_enc_dec(actual_fees_eth))
if not sets:
return
params.append(_enc_uuid(instruction_id))
conn.execute(
f"UPDATE instructions SET {', '.join(sets)} "
f"WHERE instruction_id = ?",
params,
)
def list_instructions(
self, conn: sqlite3.Connection, proposal_id: UUID
) -> list[InstructionRecord]:
rows = conn.execute(
"SELECT * FROM instructions WHERE proposal_id = ? ORDER BY sent_at",
(_enc_uuid(proposal_id),),
).fetchall()
return [_row_to_instruction(r) for r in rows]
# ------------------------------------------------------------------
# decisions
# ------------------------------------------------------------------
def record_decision(
self, conn: sqlite3.Connection, record: DecisionRecord
) -> int:
cursor = conn.execute(
"""
INSERT INTO decisions (
decision_type, proposal_id, timestamp,
inputs_json, outputs_json, action_taken, notes
) VALUES (?,?,?,?,?,?,?)
""",
(
record.decision_type,
_enc_uuid(record.proposal_id),
_enc_dt(record.timestamp),
record.inputs_json,
record.outputs_json,
record.action_taken,
record.notes,
),
)
return int(cursor.lastrowid or 0)
def list_decisions(
self,
conn: sqlite3.Connection,
*,
proposal_id: UUID | None = None,
limit: int = 100,
) -> list[DecisionRecord]:
if proposal_id is None:
rows = conn.execute(
"SELECT * FROM decisions ORDER BY timestamp DESC LIMIT ?",
(limit,),
).fetchall()
else:
rows = conn.execute(
"SELECT * FROM decisions WHERE proposal_id = ? "
"ORDER BY timestamp DESC LIMIT ?",
(_enc_uuid(proposal_id), limit),
).fetchall()
return [_row_to_decision(r) for r in rows]
# ------------------------------------------------------------------
# dvol_history
# ------------------------------------------------------------------
def record_dvol_snapshot(
self, conn: sqlite3.Connection, snapshot: DvolSnapshot
) -> None:
conn.execute(
"INSERT OR REPLACE INTO dvol_history(timestamp, dvol, eth_spot) "
"VALUES (?,?,?)",
(
_enc_dt(snapshot.timestamp),
_enc_dec(snapshot.dvol),
_enc_dec(snapshot.eth_spot),
),
)
# ------------------------------------------------------------------
# manual_actions
# ------------------------------------------------------------------
def enqueue_manual_action(
self, conn: sqlite3.Connection, action: ManualAction
) -> int:
cursor = conn.execute(
"INSERT INTO manual_actions(kind, proposal_id, payload_json, created_at) "
"VALUES (?,?,?,?)",
(
action.kind,
_enc_uuid(action.proposal_id),
action.payload_json,
_enc_dt(action.created_at),
),
)
return int(cursor.lastrowid or 0)
def next_unconsumed_action(
self, conn: sqlite3.Connection
) -> ManualAction | None:
row = conn.execute(
"SELECT * FROM manual_actions WHERE consumed_at IS NULL "
"ORDER BY created_at ASC LIMIT 1"
).fetchone()
return None if row is None else _row_to_manual(row)
def mark_action_consumed(
self,
conn: sqlite3.Connection,
action_id: int,
*,
consumed_by: str,
result: str,
now: datetime,
) -> None:
conn.execute(
"UPDATE manual_actions SET consumed_at = ?, consumed_by = ?, "
"result = ? WHERE id = ?",
(_enc_dt(now), consumed_by, result, action_id),
)
# ------------------------------------------------------------------
# system_state
# ------------------------------------------------------------------
def init_system_state(
self, conn: sqlite3.Connection, *, config_version: str, now: datetime
) -> None:
"""Insert the singleton row if it does not already exist."""
existing = conn.execute(
"SELECT 1 FROM system_state WHERE id = 1"
).fetchone()
if existing is not None:
return
conn.execute(
"INSERT INTO system_state(id, kill_switch, last_health_check, "
"config_version, started_at) VALUES (1, 0, ?, ?, ?)",
(_enc_dt(now), config_version, _enc_dt(now)),
)
def get_system_state(
self, conn: sqlite3.Connection
) -> SystemStateRecord | None:
row = conn.execute("SELECT * FROM system_state WHERE id = 1").fetchone()
if row is None:
return None
return SystemStateRecord(
id=int(row["id"]),
kill_switch=int(row["kill_switch"]),
kill_reason=row["kill_reason"],
kill_at=_dec_dt(row["kill_at"]),
last_health_check=_dec_dt_required(row["last_health_check"]),
last_kelly_calib=_dec_dt(row["last_kelly_calib"]),
config_version=row["config_version"],
started_at=_dec_dt_required(row["started_at"]),
)
def set_kill_switch(
self,
conn: sqlite3.Connection,
*,
armed: bool,
reason: str | None,
now: datetime,
) -> None:
conn.execute(
"UPDATE system_state SET kill_switch = ?, kill_reason = ?, "
"kill_at = ?, last_health_check = ? WHERE id = 1",
(
1 if armed else 0,
reason,
_enc_dt(now) if armed else None,
_enc_dt(now),
),
)
def touch_health_check(
self, conn: sqlite3.Connection, *, now: datetime
) -> None:
conn.execute(
"UPDATE system_state SET last_health_check = ? WHERE id = 1",
(_enc_dt(now),),
)
# ---------------------------------------------------------------------------
# Row → model converters
# ---------------------------------------------------------------------------
def _dec_dt_required(value: Any) -> datetime:
out = _dec_dt(value)
if out is None:
raise ValueError("expected non-null datetime in row")
return out
def _row_to_position(row: sqlite3.Row) -> PositionRecord:
proposal_id = _dec_uuid(row["proposal_id"])
if proposal_id is None:
raise ValueError("positions.proposal_id was NULL")
return PositionRecord(
proposal_id=proposal_id,
spread_type=row["spread_type"],
asset=row["asset"],
expiry=_dec_dt_required(row["expiry"]),
short_strike=_dec_dec_required(row["short_strike"]),
long_strike=_dec_dec_required(row["long_strike"]),
short_instrument=row["short_instrument"],
long_instrument=row["long_instrument"],
n_contracts=int(row["n_contracts"]),
spread_width_usd=_dec_dec_required(row["spread_width_usd"]),
spread_width_pct=_dec_dec_required(row["spread_width_pct"]),
credit_eth=_dec_dec_required(row["credit_eth"]),
credit_usd=_dec_dec_required(row["credit_usd"]),
max_loss_usd=_dec_dec_required(row["max_loss_usd"]),
spot_at_entry=_dec_dec_required(row["spot_at_entry"]),
dvol_at_entry=_dec_dec_required(row["dvol_at_entry"]),
delta_at_entry=_dec_dec_required(row["delta_at_entry"]),
eth_price_at_entry=_dec_dec_required(row["eth_price_at_entry"]),
proposed_at=_dec_dt_required(row["proposed_at"]),
opened_at=_dec_dt(row["opened_at"]),
closed_at=_dec_dt(row["closed_at"]),
close_reason=row["close_reason"],
debit_paid_eth=_dec_dec(row["debit_paid_eth"]),
pnl_eth=_dec_dec(row["pnl_eth"]),
pnl_usd=_dec_dec(row["pnl_usd"]),
status=row["status"],
created_at=_dec_dt_required(row["created_at"]),
updated_at=_dec_dt_required(row["updated_at"]),
)
def _row_to_instruction(row: sqlite3.Row) -> InstructionRecord:
instruction_id = _dec_uuid(row["instruction_id"])
proposal_id = _dec_uuid(row["proposal_id"])
if instruction_id is None or proposal_id is None:
raise ValueError("instructions row missing required UUID")
return InstructionRecord(
instruction_id=instruction_id,
proposal_id=proposal_id,
kind=row["kind"],
payload_json=row["payload_json"],
sent_at=_dec_dt_required(row["sent_at"]),
acknowledged_at=_dec_dt(row["acknowledged_at"]),
filled_at=_dec_dt(row["filled_at"]),
cancelled_at=_dec_dt(row["cancelled_at"]),
actual_fill_eth=_dec_dec(row["actual_fill_eth"]),
actual_fees_eth=_dec_dec(row["actual_fees_eth"]),
)
def _row_to_decision(row: sqlite3.Row) -> DecisionRecord:
return DecisionRecord(
id=int(row["id"]),
decision_type=row["decision_type"],
proposal_id=_dec_uuid(row["proposal_id"]),
timestamp=_dec_dt_required(row["timestamp"]),
inputs_json=row["inputs_json"],
outputs_json=row["outputs_json"],
action_taken=row["action_taken"],
notes=row["notes"],
)
def _row_to_manual(row: sqlite3.Row) -> ManualAction:
return ManualAction(
id=int(row["id"]),
kind=row["kind"],
proposal_id=_dec_uuid(row["proposal_id"]),
payload_json=row["payload_json"],
created_at=_dec_dt_required(row["created_at"]),
consumed_at=_dec_dt(row["consumed_at"]),
consumed_by=row["consumed_by"],
result=row["result"],
)
def _dec_dec_required(value: Any) -> Decimal:
out = _dec_dec(value)
if out is None:
raise ValueError("expected non-null Decimal in row")
return out