Phase 2: persistence + safety controls

Aggiunge la persistenza SQLite, l'audit log a hash chain, il kill
switch coordinato e i CLI di gestione documentati in
docs/05-data-model.md e docs/07-risk-controls.md. 197 test pass,
1 skipped (sqlite3 CLI mancante), copertura totale 97%.

State (`state/`):
- 0001_init.sql con positions, instructions, decisions, dvol_history,
  manual_actions, system_state.
- db.py: connect con WAL + foreign_keys + transaction ctx, runner
  forward-only basato su PRAGMA user_version.
- models.py: record Pydantic, Decimal preservato come TEXT.
- repository.py: CRUD typed con singola connessione passata, cache
  aware, posizioni concorrenti.

Safety (`safety/`):
- audit_log.py: AuditLog append-only con SHA-256 chain e fsync,
  verify_chain riconosce ogni manomissione (payload, prev_hash,
  hash, JSON, separatori).
- kill_switch.py: arm/disarm transazionali, idempotenti, accoppiati
  all'audit chain.

Config (`config/loader.py` + `strategy.yaml`):
- Loader YAML con deep-merge di strategy.local.yaml.
- Verifica config_hash SHA-256 (riga config_hash esclusa).
- File golden strategy.yaml + esempio override.

Scripts:
- dead_man.sh: watchdog shell indipendente da Python.
- backup.py: VACUUM INTO orario con retention 30 giorni.

CLI:
- audit verify (exit 2 su tampering).
- kill-switch arm/disarm/status su SQLite reale.
- state inspect con tabella posizioni aperte.
- config hash, config validate.

Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
This commit is contained in:
2026-04-27 13:35:35 +02:00
parent fbb7753cc6
commit 263470786d
25 changed files with 3669 additions and 14 deletions
+27
View File
@@ -0,0 +1,27 @@
"""Persistent state: SQLite schema, migrations, typed repository."""
from cerbero_bite.state.db import connect, run_migrations, transaction
from cerbero_bite.state.models import (
DecisionRecord,
DvolSnapshot,
InstructionRecord,
ManualAction,
PositionRecord,
PositionStatus,
SystemStateRecord,
)
from cerbero_bite.state.repository import Repository
__all__ = [
"DecisionRecord",
"DvolSnapshot",
"InstructionRecord",
"ManualAction",
"PositionRecord",
"PositionStatus",
"Repository",
"SystemStateRecord",
"connect",
"run_migrations",
"transaction",
]
+102
View File
@@ -0,0 +1,102 @@
"""SQLite connection helpers and forward-only migrations.
Connections use ``sqlite3`` with a few pragmas tuned for our workload:
* ``foreign_keys=ON`` — referential integrity for ``instructions`` →
``positions``.
* ``journal_mode=WAL`` — concurrent reader (the GUI) without blocking
the engine writer.
* ``synchronous=NORMAL`` — durable enough for our use-case (we also
append-fsync the audit chain) and noticeably faster than FULL.
Migrations live in ``state/migrations/NNNN_<name>.sql``. They are
applied in numeric order, each bumps ``PRAGMA user_version`` at the
last statement of the file. ``run_migrations`` is idempotent: it
re-checks ``user_version`` and replays only what is missing.
"""
from __future__ import annotations
import re
import sqlite3
from collections.abc import Iterator
from contextlib import contextmanager
from importlib import resources
from pathlib import Path
__all__ = ["connect", "current_version", "list_migrations", "run_migrations"]
_MIGRATION_PATTERN = re.compile(r"^(\d{4})_[a-z0-9_]+\.sql$")
def _apply_pragmas(conn: sqlite3.Connection) -> None:
conn.execute("PRAGMA foreign_keys = ON")
conn.execute("PRAGMA journal_mode = WAL")
conn.execute("PRAGMA synchronous = NORMAL")
def connect(path: str | Path) -> sqlite3.Connection:
"""Open a connection to ``path`` with the standard pragmas applied."""
db_path = Path(path)
db_path.parent.mkdir(parents=True, exist_ok=True)
conn = sqlite3.connect(
str(db_path),
isolation_level=None, # autocommit; we manage transactions explicitly
detect_types=sqlite3.PARSE_DECLTYPES,
)
conn.row_factory = sqlite3.Row
_apply_pragmas(conn)
return conn
@contextmanager
def transaction(conn: sqlite3.Connection) -> Iterator[sqlite3.Connection]:
"""Begin/commit/rollback wrapper that respects autocommit mode."""
conn.execute("BEGIN")
try:
yield conn
except Exception:
conn.execute("ROLLBACK")
raise
else:
conn.execute("COMMIT")
def current_version(conn: sqlite3.Connection) -> int:
row = conn.execute("PRAGMA user_version").fetchone()
return int(row[0])
def list_migrations() -> list[tuple[int, str, str]]:
"""Return ``[(version, filename, sql_text), ...]`` sorted by version."""
pkg_files = resources.files("cerbero_bite.state.migrations")
out: list[tuple[int, str, str]] = []
for entry in pkg_files.iterdir():
match = _MIGRATION_PATTERN.match(entry.name)
if not match:
continue
version = int(match.group(1))
out.append((version, entry.name, entry.read_text(encoding="utf-8")))
out.sort(key=lambda triple: triple[0])
return out
def run_migrations(conn: sqlite3.Connection) -> int:
"""Apply migrations whose version is greater than the current one.
Returns the new ``user_version`` after the run.
"""
applied_to = current_version(conn)
for version, name, sql in list_migrations():
if version <= applied_to:
continue
conn.executescript(sql)
new_version = current_version(conn)
if new_version != version:
raise RuntimeError(
f"Migration {name} did not bump user_version to {version} "
f"(observed {new_version})"
)
applied_to = new_version
return applied_to
@@ -0,0 +1,101 @@
-- 0001_init.sql — initial schema for Cerbero Bite (docs/05-data-model.md)
--
-- Forward-only. Run by state/migrations.py once user_version == 0.
-- Bumps user_version to 1 at the end.
PRAGMA foreign_keys = ON;
CREATE TABLE positions (
proposal_id TEXT PRIMARY KEY,
spread_type TEXT NOT NULL,
asset TEXT NOT NULL DEFAULT 'ETH',
expiry TEXT NOT NULL,
short_strike NUMERIC NOT NULL,
long_strike NUMERIC NOT NULL,
short_instrument TEXT NOT NULL,
long_instrument TEXT NOT NULL,
n_contracts INTEGER NOT NULL,
spread_width_usd NUMERIC NOT NULL,
spread_width_pct NUMERIC NOT NULL,
credit_eth NUMERIC NOT NULL,
credit_usd NUMERIC NOT NULL,
max_loss_usd NUMERIC NOT NULL,
spot_at_entry NUMERIC NOT NULL,
dvol_at_entry NUMERIC NOT NULL,
delta_at_entry NUMERIC NOT NULL,
eth_price_at_entry NUMERIC NOT NULL,
proposed_at TEXT NOT NULL,
opened_at TEXT,
closed_at TEXT,
close_reason TEXT,
debit_paid_eth NUMERIC,
pnl_eth NUMERIC,
pnl_usd NUMERIC,
status TEXT NOT NULL,
created_at TEXT NOT NULL,
updated_at TEXT NOT NULL
);
CREATE INDEX idx_positions_status ON positions(status);
CREATE INDEX idx_positions_closed_at ON positions(closed_at);
CREATE TABLE instructions (
instruction_id TEXT PRIMARY KEY,
proposal_id TEXT NOT NULL REFERENCES positions(proposal_id),
kind TEXT NOT NULL,
payload_json TEXT NOT NULL,
sent_at TEXT NOT NULL,
acknowledged_at TEXT,
filled_at TEXT,
cancelled_at TEXT,
actual_fill_eth NUMERIC,
actual_fees_eth NUMERIC
);
CREATE INDEX idx_instructions_proposal ON instructions(proposal_id);
CREATE TABLE decisions (
id INTEGER PRIMARY KEY AUTOINCREMENT,
decision_type TEXT NOT NULL,
proposal_id TEXT,
timestamp TEXT NOT NULL,
inputs_json TEXT NOT NULL,
outputs_json TEXT NOT NULL,
action_taken TEXT,
notes TEXT
);
CREATE INDEX idx_decisions_timestamp ON decisions(timestamp);
CREATE INDEX idx_decisions_proposal ON decisions(proposal_id);
CREATE TABLE dvol_history (
timestamp TEXT PRIMARY KEY,
dvol NUMERIC NOT NULL,
eth_spot NUMERIC NOT NULL
);
CREATE TABLE manual_actions (
id INTEGER PRIMARY KEY AUTOINCREMENT,
kind TEXT NOT NULL,
proposal_id TEXT,
payload_json TEXT,
created_at TEXT NOT NULL,
consumed_at TEXT,
consumed_by TEXT,
result TEXT
);
CREATE INDEX idx_manual_actions_unconsumed ON manual_actions(consumed_at);
CREATE TABLE system_state (
id INTEGER PRIMARY KEY CHECK (id = 1),
kill_switch INTEGER NOT NULL DEFAULT 0,
kill_reason TEXT,
kill_at TEXT,
last_health_check TEXT NOT NULL,
last_kelly_calib TEXT,
config_version TEXT NOT NULL,
started_at TEXT NOT NULL
);
PRAGMA user_version = 1;
@@ -0,0 +1 @@
"""Forward-only SQL migrations for the Cerbero Bite state database."""
+154
View File
@@ -0,0 +1,154 @@
"""Pydantic record types mirroring the SQLite tables.
Every numeric column documented as ``NUMERIC`` in
``state/migrations/0001_init.sql`` is exposed as :class:`decimal.Decimal`
on the Python side. The repository layer is responsible for serialising
to ``TEXT`` (using ``str``) when writing and parsing back when reading,
so precision is never lost via ``float`` coercion.
"""
from __future__ import annotations
from datetime import datetime
from decimal import Decimal
from typing import Literal
from uuid import UUID
from pydantic import BaseModel, ConfigDict, Field
__all__ = [
"DecisionRecord",
"DvolSnapshot",
"InstructionRecord",
"ManualAction",
"PositionRecord",
"PositionStatus",
"SystemStateRecord",
]
PositionStatus = Literal[
"proposed",
"awaiting_fill",
"open",
"closing",
"closed",
"cancelled",
]
class PositionRecord(BaseModel):
"""Row of the ``positions`` table."""
model_config = ConfigDict(extra="forbid")
proposal_id: UUID
spread_type: str
asset: str = "ETH"
expiry: datetime
short_strike: Decimal
long_strike: Decimal
short_instrument: str
long_instrument: str
n_contracts: int
spread_width_usd: Decimal
spread_width_pct: Decimal
credit_eth: Decimal
credit_usd: Decimal
max_loss_usd: Decimal
spot_at_entry: Decimal
dvol_at_entry: Decimal
delta_at_entry: Decimal
eth_price_at_entry: Decimal
proposed_at: datetime
opened_at: datetime | None = None
closed_at: datetime | None = None
close_reason: str | None = None
debit_paid_eth: Decimal | None = None
pnl_eth: Decimal | None = None
pnl_usd: Decimal | None = None
status: PositionStatus
created_at: datetime
updated_at: datetime
class InstructionRecord(BaseModel):
"""Row of the ``instructions`` table."""
model_config = ConfigDict(extra="forbid")
instruction_id: UUID
proposal_id: UUID
kind: Literal["open_combo", "close_combo"]
payload_json: str
sent_at: datetime
acknowledged_at: datetime | None = None
filled_at: datetime | None = None
cancelled_at: datetime | None = None
actual_fill_eth: Decimal | None = None
actual_fees_eth: Decimal | None = None
class DecisionRecord(BaseModel):
"""Row of the ``decisions`` table.
``id`` is :class:`int` and may be ``None`` before the row has been
inserted; the repository sets it after the auto-increment fires.
"""
model_config = ConfigDict(extra="forbid")
id: int | None = None
decision_type: Literal["entry_check", "exit_check", "kelly_recalib"]
proposal_id: UUID | None = None
timestamp: datetime
inputs_json: str
outputs_json: str
action_taken: str | None = None
notes: str | None = None
class DvolSnapshot(BaseModel):
"""Row of the ``dvol_history`` table."""
model_config = ConfigDict(extra="forbid")
timestamp: datetime
dvol: Decimal
eth_spot: Decimal
class ManualAction(BaseModel):
"""Row of the ``manual_actions`` table."""
model_config = ConfigDict(extra="forbid")
id: int | None = None
kind: Literal[
"approve_proposal",
"reject_proposal",
"force_close",
"arm_kill",
"disarm_kill",
]
proposal_id: UUID | None = None
payload_json: str | None = None
created_at: datetime
consumed_at: datetime | None = None
consumed_by: str | None = None
result: str | None = None
class SystemStateRecord(BaseModel):
"""Singleton row of the ``system_state`` table."""
model_config = ConfigDict(extra="forbid")
id: int = Field(default=1)
kill_switch: int = 0
kill_reason: str | None = None
kill_at: datetime | None = None
last_health_check: datetime
last_kelly_calib: datetime | None = None
config_version: str
started_at: datetime
+553
View File
@@ -0,0 +1,553 @@
"""Typed CRUD layer over the SQLite database.
All methods take a :class:`sqlite3.Connection` so callers can compose a
single transaction across multiple writes (the orchestrator does this
when persisting an entry decision + the resulting proposal). The
repository never opens its own connection: that responsibility is left
to :func:`cerbero_bite.state.db.connect`.
Decimals are stored as TEXT to preserve precision (see
``state/models.py``).
"""
from __future__ import annotations
import sqlite3
from datetime import UTC, datetime
from decimal import Decimal
from typing import Any
from uuid import UUID
from cerbero_bite.state.models import (
DecisionRecord,
DvolSnapshot,
InstructionRecord,
ManualAction,
PositionRecord,
PositionStatus,
SystemStateRecord,
)
__all__ = ["Repository"]
# ---------------------------------------------------------------------------
# Encoding helpers
# ---------------------------------------------------------------------------
def _enc_dec(value: Decimal | None) -> str | None:
return None if value is None else str(value)
def _enc_dt(value: datetime | None) -> str | None:
if value is None:
return None
if value.tzinfo is None:
value = value.replace(tzinfo=UTC)
return value.astimezone(UTC).isoformat()
def _enc_uuid(value: UUID | None) -> str | None:
return None if value is None else str(value)
def _dec_dec(value: Any) -> Decimal | None:
if value is None:
return None
return Decimal(str(value))
def _dec_dt(value: Any) -> datetime | None:
if value is None:
return None
return datetime.fromisoformat(str(value))
def _dec_uuid(value: Any) -> UUID | None:
if value is None:
return None
return UUID(str(value))
# ---------------------------------------------------------------------------
# Repository
# ---------------------------------------------------------------------------
class Repository:
"""Typed CRUD wrapper. One instance per process, all calls take a conn."""
# ------------------------------------------------------------------
# positions
# ------------------------------------------------------------------
def create_position(self, conn: sqlite3.Connection, record: PositionRecord) -> None:
conn.execute(
"""
INSERT INTO positions (
proposal_id, spread_type, asset, expiry,
short_strike, long_strike, short_instrument, long_instrument,
n_contracts, spread_width_usd, spread_width_pct,
credit_eth, credit_usd, max_loss_usd,
spot_at_entry, dvol_at_entry, delta_at_entry, eth_price_at_entry,
proposed_at, opened_at, closed_at, close_reason,
debit_paid_eth, pnl_eth, pnl_usd,
status, created_at, updated_at
) VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)
""",
(
_enc_uuid(record.proposal_id),
record.spread_type,
record.asset,
_enc_dt(record.expiry),
_enc_dec(record.short_strike),
_enc_dec(record.long_strike),
record.short_instrument,
record.long_instrument,
record.n_contracts,
_enc_dec(record.spread_width_usd),
_enc_dec(record.spread_width_pct),
_enc_dec(record.credit_eth),
_enc_dec(record.credit_usd),
_enc_dec(record.max_loss_usd),
_enc_dec(record.spot_at_entry),
_enc_dec(record.dvol_at_entry),
_enc_dec(record.delta_at_entry),
_enc_dec(record.eth_price_at_entry),
_enc_dt(record.proposed_at),
_enc_dt(record.opened_at),
_enc_dt(record.closed_at),
record.close_reason,
_enc_dec(record.debit_paid_eth),
_enc_dec(record.pnl_eth),
_enc_dec(record.pnl_usd),
record.status,
_enc_dt(record.created_at),
_enc_dt(record.updated_at),
),
)
def get_position(
self, conn: sqlite3.Connection, proposal_id: UUID
) -> PositionRecord | None:
row = conn.execute(
"SELECT * FROM positions WHERE proposal_id = ?",
(_enc_uuid(proposal_id),),
).fetchone()
return None if row is None else _row_to_position(row)
def list_positions(
self,
conn: sqlite3.Connection,
*,
status: PositionStatus | None = None,
) -> list[PositionRecord]:
if status is None:
cursor = conn.execute(
"SELECT * FROM positions ORDER BY created_at DESC"
)
else:
cursor = conn.execute(
"SELECT * FROM positions WHERE status = ? ORDER BY created_at DESC",
(status,),
)
return [_row_to_position(r) for r in cursor.fetchall()]
def list_open_positions(self, conn: sqlite3.Connection) -> list[PositionRecord]:
rows = conn.execute(
"SELECT * FROM positions WHERE status IN ('open', 'awaiting_fill', "
"'closing') ORDER BY created_at DESC"
).fetchall()
return [_row_to_position(r) for r in rows]
def update_position_status(
self,
conn: sqlite3.Connection,
proposal_id: UUID,
*,
status: PositionStatus,
opened_at: datetime | None = None,
closed_at: datetime | None = None,
close_reason: str | None = None,
debit_paid_eth: Decimal | None = None,
pnl_eth: Decimal | None = None,
pnl_usd: Decimal | None = None,
now: datetime,
) -> None:
sets: list[str] = ["status = ?", "updated_at = ?"]
params: list[Any] = [status, _enc_dt(now)]
if opened_at is not None:
sets.append("opened_at = ?")
params.append(_enc_dt(opened_at))
if closed_at is not None:
sets.append("closed_at = ?")
params.append(_enc_dt(closed_at))
if close_reason is not None:
sets.append("close_reason = ?")
params.append(close_reason)
if debit_paid_eth is not None:
sets.append("debit_paid_eth = ?")
params.append(_enc_dec(debit_paid_eth))
if pnl_eth is not None:
sets.append("pnl_eth = ?")
params.append(_enc_dec(pnl_eth))
if pnl_usd is not None:
sets.append("pnl_usd = ?")
params.append(_enc_dec(pnl_usd))
params.append(_enc_uuid(proposal_id))
conn.execute(
f"UPDATE positions SET {', '.join(sets)} WHERE proposal_id = ?",
params,
)
def count_concurrent_positions(self, conn: sqlite3.Connection) -> int:
row = conn.execute(
"SELECT COUNT(*) FROM positions WHERE status IN "
"('awaiting_fill', 'open', 'closing')"
).fetchone()
return int(row[0])
# ------------------------------------------------------------------
# instructions
# ------------------------------------------------------------------
def create_instruction(
self, conn: sqlite3.Connection, record: InstructionRecord
) -> None:
conn.execute(
"""
INSERT INTO instructions (
instruction_id, proposal_id, kind, payload_json,
sent_at, acknowledged_at, filled_at, cancelled_at,
actual_fill_eth, actual_fees_eth
) VALUES (?,?,?,?,?,?,?,?,?,?)
""",
(
_enc_uuid(record.instruction_id),
_enc_uuid(record.proposal_id),
record.kind,
record.payload_json,
_enc_dt(record.sent_at),
_enc_dt(record.acknowledged_at),
_enc_dt(record.filled_at),
_enc_dt(record.cancelled_at),
_enc_dec(record.actual_fill_eth),
_enc_dec(record.actual_fees_eth),
),
)
def update_instruction(
self,
conn: sqlite3.Connection,
instruction_id: UUID,
*,
acknowledged_at: datetime | None = None,
filled_at: datetime | None = None,
cancelled_at: datetime | None = None,
actual_fill_eth: Decimal | None = None,
actual_fees_eth: Decimal | None = None,
) -> None:
sets: list[str] = []
params: list[Any] = []
if acknowledged_at is not None:
sets.append("acknowledged_at = ?")
params.append(_enc_dt(acknowledged_at))
if filled_at is not None:
sets.append("filled_at = ?")
params.append(_enc_dt(filled_at))
if cancelled_at is not None:
sets.append("cancelled_at = ?")
params.append(_enc_dt(cancelled_at))
if actual_fill_eth is not None:
sets.append("actual_fill_eth = ?")
params.append(_enc_dec(actual_fill_eth))
if actual_fees_eth is not None:
sets.append("actual_fees_eth = ?")
params.append(_enc_dec(actual_fees_eth))
if not sets:
return
params.append(_enc_uuid(instruction_id))
conn.execute(
f"UPDATE instructions SET {', '.join(sets)} "
f"WHERE instruction_id = ?",
params,
)
def list_instructions(
self, conn: sqlite3.Connection, proposal_id: UUID
) -> list[InstructionRecord]:
rows = conn.execute(
"SELECT * FROM instructions WHERE proposal_id = ? ORDER BY sent_at",
(_enc_uuid(proposal_id),),
).fetchall()
return [_row_to_instruction(r) for r in rows]
# ------------------------------------------------------------------
# decisions
# ------------------------------------------------------------------
def record_decision(
self, conn: sqlite3.Connection, record: DecisionRecord
) -> int:
cursor = conn.execute(
"""
INSERT INTO decisions (
decision_type, proposal_id, timestamp,
inputs_json, outputs_json, action_taken, notes
) VALUES (?,?,?,?,?,?,?)
""",
(
record.decision_type,
_enc_uuid(record.proposal_id),
_enc_dt(record.timestamp),
record.inputs_json,
record.outputs_json,
record.action_taken,
record.notes,
),
)
return int(cursor.lastrowid or 0)
def list_decisions(
self,
conn: sqlite3.Connection,
*,
proposal_id: UUID | None = None,
limit: int = 100,
) -> list[DecisionRecord]:
if proposal_id is None:
rows = conn.execute(
"SELECT * FROM decisions ORDER BY timestamp DESC LIMIT ?",
(limit,),
).fetchall()
else:
rows = conn.execute(
"SELECT * FROM decisions WHERE proposal_id = ? "
"ORDER BY timestamp DESC LIMIT ?",
(_enc_uuid(proposal_id), limit),
).fetchall()
return [_row_to_decision(r) for r in rows]
# ------------------------------------------------------------------
# dvol_history
# ------------------------------------------------------------------
def record_dvol_snapshot(
self, conn: sqlite3.Connection, snapshot: DvolSnapshot
) -> None:
conn.execute(
"INSERT OR REPLACE INTO dvol_history(timestamp, dvol, eth_spot) "
"VALUES (?,?,?)",
(
_enc_dt(snapshot.timestamp),
_enc_dec(snapshot.dvol),
_enc_dec(snapshot.eth_spot),
),
)
# ------------------------------------------------------------------
# manual_actions
# ------------------------------------------------------------------
def enqueue_manual_action(
self, conn: sqlite3.Connection, action: ManualAction
) -> int:
cursor = conn.execute(
"INSERT INTO manual_actions(kind, proposal_id, payload_json, created_at) "
"VALUES (?,?,?,?)",
(
action.kind,
_enc_uuid(action.proposal_id),
action.payload_json,
_enc_dt(action.created_at),
),
)
return int(cursor.lastrowid or 0)
def next_unconsumed_action(
self, conn: sqlite3.Connection
) -> ManualAction | None:
row = conn.execute(
"SELECT * FROM manual_actions WHERE consumed_at IS NULL "
"ORDER BY created_at ASC LIMIT 1"
).fetchone()
return None if row is None else _row_to_manual(row)
def mark_action_consumed(
self,
conn: sqlite3.Connection,
action_id: int,
*,
consumed_by: str,
result: str,
now: datetime,
) -> None:
conn.execute(
"UPDATE manual_actions SET consumed_at = ?, consumed_by = ?, "
"result = ? WHERE id = ?",
(_enc_dt(now), consumed_by, result, action_id),
)
# ------------------------------------------------------------------
# system_state
# ------------------------------------------------------------------
def init_system_state(
self, conn: sqlite3.Connection, *, config_version: str, now: datetime
) -> None:
"""Insert the singleton row if it does not already exist."""
existing = conn.execute(
"SELECT 1 FROM system_state WHERE id = 1"
).fetchone()
if existing is not None:
return
conn.execute(
"INSERT INTO system_state(id, kill_switch, last_health_check, "
"config_version, started_at) VALUES (1, 0, ?, ?, ?)",
(_enc_dt(now), config_version, _enc_dt(now)),
)
def get_system_state(
self, conn: sqlite3.Connection
) -> SystemStateRecord | None:
row = conn.execute("SELECT * FROM system_state WHERE id = 1").fetchone()
if row is None:
return None
return SystemStateRecord(
id=int(row["id"]),
kill_switch=int(row["kill_switch"]),
kill_reason=row["kill_reason"],
kill_at=_dec_dt(row["kill_at"]),
last_health_check=_dec_dt_required(row["last_health_check"]),
last_kelly_calib=_dec_dt(row["last_kelly_calib"]),
config_version=row["config_version"],
started_at=_dec_dt_required(row["started_at"]),
)
def set_kill_switch(
self,
conn: sqlite3.Connection,
*,
armed: bool,
reason: str | None,
now: datetime,
) -> None:
conn.execute(
"UPDATE system_state SET kill_switch = ?, kill_reason = ?, "
"kill_at = ?, last_health_check = ? WHERE id = 1",
(
1 if armed else 0,
reason,
_enc_dt(now) if armed else None,
_enc_dt(now),
),
)
def touch_health_check(
self, conn: sqlite3.Connection, *, now: datetime
) -> None:
conn.execute(
"UPDATE system_state SET last_health_check = ? WHERE id = 1",
(_enc_dt(now),),
)
# ---------------------------------------------------------------------------
# Row → model converters
# ---------------------------------------------------------------------------
def _dec_dt_required(value: Any) -> datetime:
out = _dec_dt(value)
if out is None:
raise ValueError("expected non-null datetime in row")
return out
def _row_to_position(row: sqlite3.Row) -> PositionRecord:
proposal_id = _dec_uuid(row["proposal_id"])
if proposal_id is None:
raise ValueError("positions.proposal_id was NULL")
return PositionRecord(
proposal_id=proposal_id,
spread_type=row["spread_type"],
asset=row["asset"],
expiry=_dec_dt_required(row["expiry"]),
short_strike=_dec_dec_required(row["short_strike"]),
long_strike=_dec_dec_required(row["long_strike"]),
short_instrument=row["short_instrument"],
long_instrument=row["long_instrument"],
n_contracts=int(row["n_contracts"]),
spread_width_usd=_dec_dec_required(row["spread_width_usd"]),
spread_width_pct=_dec_dec_required(row["spread_width_pct"]),
credit_eth=_dec_dec_required(row["credit_eth"]),
credit_usd=_dec_dec_required(row["credit_usd"]),
max_loss_usd=_dec_dec_required(row["max_loss_usd"]),
spot_at_entry=_dec_dec_required(row["spot_at_entry"]),
dvol_at_entry=_dec_dec_required(row["dvol_at_entry"]),
delta_at_entry=_dec_dec_required(row["delta_at_entry"]),
eth_price_at_entry=_dec_dec_required(row["eth_price_at_entry"]),
proposed_at=_dec_dt_required(row["proposed_at"]),
opened_at=_dec_dt(row["opened_at"]),
closed_at=_dec_dt(row["closed_at"]),
close_reason=row["close_reason"],
debit_paid_eth=_dec_dec(row["debit_paid_eth"]),
pnl_eth=_dec_dec(row["pnl_eth"]),
pnl_usd=_dec_dec(row["pnl_usd"]),
status=row["status"],
created_at=_dec_dt_required(row["created_at"]),
updated_at=_dec_dt_required(row["updated_at"]),
)
def _row_to_instruction(row: sqlite3.Row) -> InstructionRecord:
instruction_id = _dec_uuid(row["instruction_id"])
proposal_id = _dec_uuid(row["proposal_id"])
if instruction_id is None or proposal_id is None:
raise ValueError("instructions row missing required UUID")
return InstructionRecord(
instruction_id=instruction_id,
proposal_id=proposal_id,
kind=row["kind"],
payload_json=row["payload_json"],
sent_at=_dec_dt_required(row["sent_at"]),
acknowledged_at=_dec_dt(row["acknowledged_at"]),
filled_at=_dec_dt(row["filled_at"]),
cancelled_at=_dec_dt(row["cancelled_at"]),
actual_fill_eth=_dec_dec(row["actual_fill_eth"]),
actual_fees_eth=_dec_dec(row["actual_fees_eth"]),
)
def _row_to_decision(row: sqlite3.Row) -> DecisionRecord:
return DecisionRecord(
id=int(row["id"]),
decision_type=row["decision_type"],
proposal_id=_dec_uuid(row["proposal_id"]),
timestamp=_dec_dt_required(row["timestamp"]),
inputs_json=row["inputs_json"],
outputs_json=row["outputs_json"],
action_taken=row["action_taken"],
notes=row["notes"],
)
def _row_to_manual(row: sqlite3.Row) -> ManualAction:
return ManualAction(
id=int(row["id"]),
kind=row["kind"],
proposal_id=_dec_uuid(row["proposal_id"]),
payload_json=row["payload_json"],
created_at=_dec_dt_required(row["created_at"]),
consumed_at=_dec_dt(row["consumed_at"]),
consumed_by=row["consumed_by"],
result=row["result"],
)
def _dec_dec_required(value: Any) -> Decimal:
out = _dec_dec(value)
if out is None:
raise ValueError("expected non-null Decimal in row")
return out