feat(llm): make tier-C/tier-B model + OpenRouter URL configurable from .env
LLM_MODEL_TIER_C, LLM_MODEL_TIER_B e OPENROUTER_BASE_URL ora override-abili via env. Default invariati (back-compat). LLMClient accetta i tre valori come kwargs opzionali; run_phase1 li propaga da Settings. Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
This commit is contained in:
@@ -45,8 +45,14 @@ class LLMClient:
|
||||
self,
|
||||
openrouter_api_key: str,
|
||||
anthropic_api_key: str | None = None,
|
||||
model_tier_c: str = MODEL_TIER_C,
|
||||
model_tier_b: str = MODEL_TIER_B,
|
||||
openrouter_base_url: str = OPENROUTER_BASE_URL,
|
||||
) -> None:
|
||||
self._openrouter = OpenAI(api_key=openrouter_api_key, base_url=OPENROUTER_BASE_URL)
|
||||
self.model_tier_c = model_tier_c
|
||||
self.model_tier_b = model_tier_b
|
||||
self.openrouter_base_url = openrouter_base_url
|
||||
self._openrouter = OpenAI(api_key=openrouter_api_key, base_url=openrouter_base_url)
|
||||
self._anthropic = Anthropic(api_key=anthropic_api_key) if anthropic_api_key else None
|
||||
|
||||
@retry(
|
||||
@@ -64,7 +70,7 @@ class LLMClient:
|
||||
) -> CompletionResult:
|
||||
if genome.model_tier == ModelTier.C:
|
||||
resp = self._openrouter.chat.completions.create(
|
||||
model=MODEL_TIER_C,
|
||||
model=self.model_tier_c,
|
||||
messages=[
|
||||
{"role": "system", "content": system},
|
||||
{"role": "user", "content": user},
|
||||
@@ -80,14 +86,14 @@ class LLMClient:
|
||||
input_tokens=usage.prompt_tokens,
|
||||
output_tokens=usage.completion_tokens,
|
||||
tier=ModelTier.C,
|
||||
model=MODEL_TIER_C,
|
||||
model=self.model_tier_c,
|
||||
)
|
||||
|
||||
if self._anthropic is None:
|
||||
raise RuntimeError("ANTHROPIC_API_KEY required for tier B genomes")
|
||||
|
||||
msg = self._anthropic.messages.create(
|
||||
model=MODEL_TIER_B,
|
||||
model=self.model_tier_b,
|
||||
system=system,
|
||||
messages=[{"role": "user", "content": user}],
|
||||
temperature=genome.temperature,
|
||||
@@ -100,5 +106,5 @@ class LLMClient:
|
||||
input_tokens=msg.usage.input_tokens,
|
||||
output_tokens=msg.usage.output_tokens,
|
||||
tier=ModelTier.B,
|
||||
model=MODEL_TIER_B,
|
||||
model=self.model_tier_b,
|
||||
)
|
||||
|
||||
Reference in New Issue
Block a user