refactor(llm): route all tiers via OpenRouter, drop Anthropic SDK

Tutti i tier (S/A/B/C/D) ora passano per OpenRouter via OpenAI SDK.
Modelli Anthropic raggiungibili via prefisso `anthropic/...`.

- pyproject: rimosso `anthropic>=0.39` da deps + uv.lock
- config: rimosso `anthropic_api_key` field
- LLMClient: dispatch unico, single client OpenAI con base_url OpenRouter
- defaults S/A/B → `anthropic/claude-{opus-4-7,sonnet-4-6}`
- retry exceptions: solo openai.* (drop anthropic.*)
- test rinominati e adattati: tier S/A/B mockano OpenAI con prefisso `anthropic/`
- rimosso test `tier_S_without_anthropic_key_raises` (non più rilevante)

Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
This commit is contained in:
2026-05-10 09:39:23 +02:00
parent 70b8bc3d6c
commit 4dad8be36b
8 changed files with 84 additions and 177 deletions
+8 -59
View File
@@ -2,9 +2,7 @@ from __future__ import annotations
from dataclasses import dataclass
import anthropic
import openai
from anthropic import Anthropic
from openai import OpenAI
from tenacity import (
retry,
@@ -15,12 +13,12 @@ from tenacity import (
from ..genome.hypothesis import HypothesisAgentGenome, ModelTier
# Modelli configurati per Phase 1
MODEL_TIER_S = "claude-opus-4-7" # via Anthropic
MODEL_TIER_A = "claude-sonnet-4-6" # via Anthropic (premium override)
MODEL_TIER_B = "claude-sonnet-4-6" # via Anthropic
MODEL_TIER_C = "qwen/qwen-2.5-72b-instruct" # via OpenRouter
MODEL_TIER_D = "meta-llama/llama-3.3-70b-instruct" # via OpenRouter
# Modelli configurati per Phase 1 — tutti via OpenRouter
MODEL_TIER_S = "anthropic/claude-opus-4-7"
MODEL_TIER_A = "anthropic/claude-sonnet-4-6"
MODEL_TIER_B = "anthropic/claude-sonnet-4-6"
MODEL_TIER_C = "qwen/qwen-2.5-72b-instruct"
MODEL_TIER_D = "meta-llama/llama-3.3-70b-instruct"
OPENROUTER_BASE_URL = "https://openrouter.ai/api/v1"
# Errori transient: retry. RateLimit/Auth/InvalidRequest: NO retry.
@@ -28,9 +26,6 @@ _RETRYABLE_EXCEPTIONS: tuple[type[BaseException], ...] = (
openai.APIConnectionError,
openai.APITimeoutError,
openai.InternalServerError,
anthropic.APIConnectionError,
anthropic.APITimeoutError,
anthropic.InternalServerError,
)
@@ -44,13 +39,9 @@ class CompletionResult:
class LLMClient:
_ANTHROPIC_TIERS: tuple[ModelTier, ...] = (ModelTier.S, ModelTier.A, ModelTier.B)
_OPENROUTER_TIERS: tuple[ModelTier, ...] = (ModelTier.C, ModelTier.D)
def __init__(
self,
openrouter_api_key: str,
anthropic_api_key: str | None = None,
model_tier_s: str = MODEL_TIER_S,
model_tier_a: str = MODEL_TIER_A,
model_tier_b: str = MODEL_TIER_B,
@@ -71,8 +62,7 @@ class LLMClient:
ModelTier.C: model_tier_c,
ModelTier.D: model_tier_d,
}
self._openrouter = OpenAI(api_key=openrouter_api_key, base_url=openrouter_base_url)
self._anthropic = Anthropic(api_key=anthropic_api_key) if anthropic_api_key else None
self._client = OpenAI(api_key=openrouter_api_key, base_url=openrouter_base_url)
@retry(
stop=stop_after_attempt(3),
@@ -88,19 +78,7 @@ class LLMClient:
max_tokens: int = 2000,
) -> CompletionResult:
model = self._tier_models[genome.model_tier]
if genome.model_tier in self._ANTHROPIC_TIERS:
return self._call_anthropic(genome, system, user, max_tokens, model)
return self._call_openrouter(genome, system, user, max_tokens, model)
def _call_openrouter(
self,
genome: HypothesisAgentGenome,
system: str,
user: str,
max_tokens: int,
model: str,
) -> CompletionResult:
resp = self._openrouter.chat.completions.create(
resp = self._client.chat.completions.create(
model=model,
messages=[
{"role": "system", "content": system},
@@ -119,32 +97,3 @@ class LLMClient:
tier=genome.model_tier,
model=model,
)
def _call_anthropic(
self,
genome: HypothesisAgentGenome,
system: str,
user: str,
max_tokens: int,
model: str,
) -> CompletionResult:
if self._anthropic is None:
raise RuntimeError(
f"ANTHROPIC_API_KEY required for tier {genome.model_tier.value} genomes"
)
msg = self._anthropic.messages.create(
model=model,
system=system,
messages=[{"role": "user", "content": user}],
temperature=genome.temperature,
top_p=genome.top_p,
max_tokens=max_tokens,
)
text = "".join(block.text for block in msg.content if hasattr(block, "text"))
return CompletionResult(
text=text,
input_tokens=msg.usage.input_tokens,
output_tokens=msg.usage.output_tokens,
tier=genome.model_tier,
model=model,
)