refactor: route main agent client + fallback through centralized router

Phase 2 of the provider router migration — route the main agent's
client construction and fallback activation through
resolve_provider_client() instead of duplicated ad-hoc logic.

run_agent.py:
- __init__: When no explicit api_key/base_url, use
  resolve_provider_client(provider, raw_codex=True) for client
  construction. Explicit creds (from CLI/gateway runtime provider)
  still construct directly.
- _try_activate_fallback: Replace _resolve_fallback_credentials and
  its duplicated _FALLBACK_API_KEY_PROVIDERS / _FALLBACK_OAUTH_PROVIDERS
  dicts with a single resolve_provider_client() call. The router
  handles all provider types (API-key, OAuth, Codex) centrally.
- Remove _resolve_fallback_credentials method and both fallback dicts.

agent/auxiliary_client.py:
- Add raw_codex parameter to resolve_provider_client(). When True,
  returns the raw OpenAI client for Codex providers instead of wrapping
  in CodexAuxiliaryClient. The main agent needs this for direct
  responses.stream() access.

3251 passed, 2 pre-existing unrelated failures.
This commit is contained in:
teknium1 2026-03-11 21:38:29 -07:00
parent 29ef69c703
commit a29801286f
3 changed files with 206 additions and 196 deletions

View file

@ -536,6 +536,7 @@ def resolve_provider_client(
provider: str,
model: str = None,
async_mode: bool = False,
raw_codex: bool = False,
) -> Tuple[Optional[Any], Optional[str]]:
"""Central router: given a provider name and optional model, return a
configured client with the correct auth, base URL, and API format.
@ -553,6 +554,10 @@ def resolve_provider_client(
model: Model slug override. If None, uses the provider's default
auxiliary model.
async_mode: If True, return an async-compatible client.
raw_codex: If True, return a raw OpenAI client for Codex providers
instead of wrapping in CodexAuxiliaryClient. Use this when
the caller needs direct access to responses.stream() (e.g.,
the main agent loop).
Returns:
(client, resolved_model) or (None, None) if auth is unavailable.
@ -597,6 +602,18 @@ def resolve_provider_client(
# ── OpenAI Codex (OAuth → Responses API) ─────────────────────────
if provider == "openai-codex":
if raw_codex:
# Return the raw OpenAI client for callers that need direct
# access to responses.stream() (e.g., the main agent loop).
codex_token = _read_codex_access_token()
if not codex_token:
logger.warning("resolve_provider_client: openai-codex requested "
"but no Codex OAuth token found (run: hermes model)")
return None, None
final_model = model or _CODEX_AUX_MODEL
raw_client = OpenAI(api_key=codex_token, base_url=_CODEX_AUX_BASE_URL)
return (raw_client, final_model)
# Standard path: wrap in CodexAuxiliaryClient adapter
client, default = _try_codex()
if client is None:
logger.warning("resolve_provider_client: openai-codex requested "

View file

@ -418,36 +418,50 @@ class AIAgent:
]:
logging.getLogger(quiet_logger).setLevel(logging.ERROR)
# Initialize OpenAI client - defaults to OpenRouter
client_kwargs = {}
# Default to OpenRouter if no base_url provided
if base_url:
client_kwargs["base_url"] = base_url
# Initialize OpenAI client via centralized provider router.
# The router handles auth resolution, base URL, headers, and
# Codex wrapping for all known providers.
# raw_codex=True because the main agent needs direct responses.stream()
# access for Codex Responses API streaming.
if api_key and base_url:
# Explicit credentials from CLI/gateway — construct directly.
# The runtime provider resolver already handled auth for us.
client_kwargs = {"api_key": api_key, "base_url": base_url}
effective_base = base_url
if "openrouter" in effective_base.lower():
client_kwargs["default_headers"] = {
"HTTP-Referer": "https://github.com/NousResearch/hermes-agent",
"X-OpenRouter-Title": "Hermes Agent",
"X-OpenRouter-Categories": "productivity,cli-agent",
}
elif "api.kimi.com" in effective_base.lower():
client_kwargs["default_headers"] = {
"User-Agent": "KimiCLI/1.0",
}
else:
client_kwargs["base_url"] = OPENROUTER_BASE_URL
# Handle API key - OpenRouter is the primary provider
if api_key:
client_kwargs["api_key"] = api_key
else:
# Primary: OPENROUTER_API_KEY, fallback to direct provider keys
client_kwargs["api_key"] = os.getenv("OPENROUTER_API_KEY", "")
# OpenRouter app attribution — shows hermes-agent in rankings/analytics
effective_base = client_kwargs.get("base_url", "")
if "openrouter" in effective_base.lower():
client_kwargs["default_headers"] = {
"HTTP-Referer": "https://github.com/NousResearch/hermes-agent",
"X-OpenRouter-Title": "Hermes Agent",
"X-OpenRouter-Categories": "productivity,cli-agent",
}
elif "api.kimi.com" in effective_base.lower():
# Kimi Code API requires a recognized coding-agent User-Agent
# (see https://github.com/MoonshotAI/kimi-cli)
client_kwargs["default_headers"] = {
"User-Agent": "KimiCLI/1.0",
}
# No explicit creds — use the centralized provider router
from agent.auxiliary_client import resolve_provider_client
_routed_client, _ = resolve_provider_client(
self.provider or "auto", model=self.model, raw_codex=True)
if _routed_client is not None:
client_kwargs = {
"api_key": _routed_client.api_key,
"base_url": str(_routed_client.base_url),
}
# Preserve any default_headers the router set
if hasattr(_routed_client, '_default_headers') and _routed_client._default_headers:
client_kwargs["default_headers"] = dict(_routed_client._default_headers)
else:
# Final fallback: try raw OpenRouter key
client_kwargs = {
"api_key": os.getenv("OPENROUTER_API_KEY", ""),
"base_url": OPENROUTER_BASE_URL,
"default_headers": {
"HTTP-Referer": "https://github.com/NousResearch/hermes-agent",
"X-OpenRouter-Title": "Hermes Agent",
"X-OpenRouter-Categories": "productivity,cli-agent",
},
}
self._client_kwargs = client_kwargs # stored for rebuilding after interrupt
try:
@ -2236,75 +2250,6 @@ class AIAgent:
# ── Provider fallback ──────────────────────────────────────────────────
# API-key providers: provider → (base_url, [env_var_names])
_FALLBACK_API_KEY_PROVIDERS = {
"openrouter": (OPENROUTER_BASE_URL, ["OPENROUTER_API_KEY"]),
"zai": ("https://api.z.ai/api/paas/v4", ["ZAI_API_KEY", "Z_AI_API_KEY"]),
"kimi-coding": ("https://api.moonshot.ai/v1", ["KIMI_API_KEY"]),
"minimax": ("https://api.minimax.io/v1", ["MINIMAX_API_KEY"]),
"minimax-cn": ("https://api.minimaxi.com/v1", ["MINIMAX_CN_API_KEY"]),
}
# OAuth providers: provider → (resolver_import_path, api_mode)
# Each resolver returns {"api_key": ..., "base_url": ...}.
_FALLBACK_OAUTH_PROVIDERS = {
"openai-codex": ("resolve_codex_runtime_credentials", "codex_responses"),
"nous": ("resolve_nous_runtime_credentials", "chat_completions"),
}
def _resolve_fallback_credentials(
self, fb_provider: str, fb_config: dict
) -> Optional[tuple]:
"""Resolve credentials for a fallback provider.
Returns (api_key, base_url, api_mode) on success, or None on failure.
Handles three cases:
1. OAuth providers (openai-codex, nous) call credential resolver
2. API-key providers (openrouter, zai, etc.) read env var
3. Custom endpoints use base_url + api_key_env from config
"""
# ── 1. OAuth providers ────────────────────────────────────────
if fb_provider in self._FALLBACK_OAUTH_PROVIDERS:
resolver_name, api_mode = self._FALLBACK_OAUTH_PROVIDERS[fb_provider]
try:
import hermes_cli.auth as _auth
resolver = getattr(_auth, resolver_name)
creds = resolver()
return creds["api_key"], creds["base_url"], api_mode
except Exception as e:
logging.warning(
"Fallback to %s failed (credential resolution): %s",
fb_provider, e,
)
return None
# ── 2. API-key providers ──────────────────────────────────────
fb_key = (fb_config.get("api_key") or "").strip()
if not fb_key:
key_env = (fb_config.get("api_key_env") or "").strip()
if key_env:
fb_key = os.getenv(key_env, "")
elif fb_provider in self._FALLBACK_API_KEY_PROVIDERS:
for env_var in self._FALLBACK_API_KEY_PROVIDERS[fb_provider][1]:
fb_key = os.getenv(env_var, "")
if fb_key:
break
if not fb_key:
logging.warning(
"Fallback model configured but no API key found for provider '%s'",
fb_provider,
)
return None
# ── 3. Resolve base URL ───────────────────────────────────────
fb_base_url = (fb_config.get("base_url") or "").strip()
if not fb_base_url and fb_provider in self._FALLBACK_API_KEY_PROVIDERS:
fb_base_url = self._FALLBACK_API_KEY_PROVIDERS[fb_provider][0]
if not fb_base_url:
fb_base_url = OPENROUTER_BASE_URL
return fb_key, fb_base_url, "chat_completions"
def _try_activate_fallback(self) -> bool:
"""Switch to the configured fallback model/provider.
@ -2312,6 +2257,10 @@ class AIAgent:
OpenAI client, model slug, and provider in-place so the retry loop
can continue with the new backend. One-shot: returns False if
already activated or not configured.
Uses the centralized provider router (resolve_provider_client) for
auth resolution and client construction no duplicated providerkey
mappings.
"""
if self._fallback_activated or not self._fallback_model:
return False
@ -2322,25 +2271,31 @@ class AIAgent:
if not fb_provider or not fb_model:
return False
resolved = self._resolve_fallback_credentials(fb_provider, fb)
if resolved is None:
return False
fb_key, fb_base_url, fb_api_mode = resolved
# Build new client
# Use centralized router for client construction.
# raw_codex=True because the main agent needs direct responses.stream()
# access for Codex providers.
try:
client_kwargs = {"api_key": fb_key, "base_url": fb_base_url}
if "openrouter" in fb_base_url.lower():
client_kwargs["default_headers"] = {
"HTTP-Referer": "https://github.com/NousResearch/hermes-agent",
"X-OpenRouter-Title": "Hermes Agent",
"X-OpenRouter-Categories": "productivity,cli-agent",
}
elif "api.kimi.com" in fb_base_url.lower():
client_kwargs["default_headers"] = {"User-Agent": "KimiCLI/1.0"}
from agent.auxiliary_client import resolve_provider_client
fb_client, _ = resolve_provider_client(
fb_provider, model=fb_model, raw_codex=True)
if fb_client is None:
logging.warning(
"Fallback to %s failed: provider not configured",
fb_provider)
return False
self.client = OpenAI(**client_kwargs)
self._client_kwargs = client_kwargs
# Determine api_mode from provider
fb_api_mode = "chat_completions"
if fb_provider == "openai-codex":
fb_api_mode = "codex_responses"
fb_base_url = str(fb_client.base_url)
# Swap client and config in-place
self.client = fb_client
self._client_kwargs = {
"api_key": fb_client.api_key,
"base_url": fb_base_url,
}
old_model = self.model
self.model = fb_model
self.provider = fb_provider

View file

@ -35,7 +35,7 @@ def _make_agent(fallback_model=None):
patch("run_agent.OpenAI"),
):
agent = AIAgent(
api_key="test-key-primary",
api_key="test-key",
quiet_mode=True,
skip_context_files=True,
skip_memory=True,
@ -45,6 +45,14 @@ def _make_agent(fallback_model=None):
return agent
def _mock_resolve(base_url="https://openrouter.ai/api/v1", api_key="test-key"):
"""Helper to create a mock client for resolve_provider_client."""
mock_client = MagicMock()
mock_client.api_key = api_key
mock_client.base_url = base_url
return mock_client
# =============================================================================
# _try_activate_fallback()
# =============================================================================
@ -71,9 +79,13 @@ class TestTryActivateFallback:
agent = _make_agent(
fallback_model={"provider": "openrouter", "model": "anthropic/claude-sonnet-4"},
)
with (
patch.dict("os.environ", {"OPENROUTER_API_KEY": "sk-or-fallback-key"}),
patch("run_agent.OpenAI") as mock_openai,
mock_client = _mock_resolve(
api_key="sk-or-fallback-key",
base_url="https://openrouter.ai/api/v1",
)
with patch(
"agent.auxiliary_client.resolve_provider_client",
return_value=(mock_client, "anthropic/claude-sonnet-4"),
):
result = agent._try_activate_fallback()
assert result is True
@ -81,36 +93,37 @@ class TestTryActivateFallback:
assert agent.model == "anthropic/claude-sonnet-4"
assert agent.provider == "openrouter"
assert agent.api_mode == "chat_completions"
mock_openai.assert_called_once()
call_kwargs = mock_openai.call_args[1]
assert call_kwargs["api_key"] == "sk-or-fallback-key"
assert "openrouter" in call_kwargs["base_url"].lower()
# OpenRouter should get attribution headers
assert "default_headers" in call_kwargs
assert agent.client is mock_client
def test_activates_zai_fallback(self):
agent = _make_agent(
fallback_model={"provider": "zai", "model": "glm-5"},
)
with (
patch.dict("os.environ", {"ZAI_API_KEY": "sk-zai-key"}),
patch("run_agent.OpenAI") as mock_openai,
mock_client = _mock_resolve(
api_key="sk-zai-key",
base_url="https://open.z.ai/api/v1",
)
with patch(
"agent.auxiliary_client.resolve_provider_client",
return_value=(mock_client, "glm-5"),
):
result = agent._try_activate_fallback()
assert result is True
assert agent.model == "glm-5"
assert agent.provider == "zai"
call_kwargs = mock_openai.call_args[1]
assert call_kwargs["api_key"] == "sk-zai-key"
assert "z.ai" in call_kwargs["base_url"].lower()
assert agent.client is mock_client
def test_activates_kimi_fallback(self):
agent = _make_agent(
fallback_model={"provider": "kimi-coding", "model": "kimi-k2.5"},
)
with (
patch.dict("os.environ", {"KIMI_API_KEY": "sk-kimi-key"}),
patch("run_agent.OpenAI"),
mock_client = _mock_resolve(
api_key="sk-kimi-key",
base_url="https://api.moonshot.ai/v1",
)
with patch(
"agent.auxiliary_client.resolve_provider_client",
return_value=(mock_client, "kimi-k2.5"),
):
assert agent._try_activate_fallback() is True
assert agent.model == "kimi-k2.5"
@ -120,23 +133,30 @@ class TestTryActivateFallback:
agent = _make_agent(
fallback_model={"provider": "minimax", "model": "MiniMax-M2.5"},
)
with (
patch.dict("os.environ", {"MINIMAX_API_KEY": "sk-mm-key"}),
patch("run_agent.OpenAI") as mock_openai,
mock_client = _mock_resolve(
api_key="sk-mm-key",
base_url="https://api.minimax.io/v1",
)
with patch(
"agent.auxiliary_client.resolve_provider_client",
return_value=(mock_client, "MiniMax-M2.5"),
):
assert agent._try_activate_fallback() is True
assert agent.model == "MiniMax-M2.5"
assert agent.provider == "minimax"
call_kwargs = mock_openai.call_args[1]
assert "minimax.io" in call_kwargs["base_url"]
assert agent.client is mock_client
def test_only_fires_once(self):
agent = _make_agent(
fallback_model={"provider": "openrouter", "model": "anthropic/claude-sonnet-4"},
)
with (
patch.dict("os.environ", {"OPENROUTER_API_KEY": "sk-or-key"}),
patch("run_agent.OpenAI"),
mock_client = _mock_resolve(
api_key="sk-or-key",
base_url="https://openrouter.ai/api/v1",
)
with patch(
"agent.auxiliary_client.resolve_provider_client",
return_value=(mock_client, "anthropic/claude-sonnet-4"),
):
assert agent._try_activate_fallback() is True
# Second attempt should return False
@ -147,9 +167,10 @@ class TestTryActivateFallback:
agent = _make_agent(
fallback_model={"provider": "minimax", "model": "MiniMax-M2.5"},
)
# Ensure MINIMAX_API_KEY is not in the environment
env = {k: v for k, v in os.environ.items() if k != "MINIMAX_API_KEY"}
with patch.dict("os.environ", env, clear=True):
with patch(
"agent.auxiliary_client.resolve_provider_client",
return_value=(None, None),
):
assert agent._try_activate_fallback() is False
assert agent._fallback_activated is False
@ -163,22 +184,29 @@ class TestTryActivateFallback:
"api_key_env": "MY_CUSTOM_KEY",
},
)
with (
patch.dict("os.environ", {"MY_CUSTOM_KEY": "custom-secret"}),
patch("run_agent.OpenAI") as mock_openai,
mock_client = _mock_resolve(
api_key="custom-secret",
base_url="http://localhost:8080/v1",
)
with patch(
"agent.auxiliary_client.resolve_provider_client",
return_value=(mock_client, "my-model"),
):
assert agent._try_activate_fallback() is True
call_kwargs = mock_openai.call_args[1]
assert call_kwargs["base_url"] == "http://localhost:8080/v1"
assert call_kwargs["api_key"] == "custom-secret"
assert agent.client is mock_client
assert agent.model == "my-model"
def test_prompt_caching_enabled_for_claude_on_openrouter(self):
agent = _make_agent(
fallback_model={"provider": "openrouter", "model": "anthropic/claude-sonnet-4"},
)
with (
patch.dict("os.environ", {"OPENROUTER_API_KEY": "sk-or-key"}),
patch("run_agent.OpenAI"),
mock_client = _mock_resolve(
api_key="sk-or-key",
base_url="https://openrouter.ai/api/v1",
)
with patch(
"agent.auxiliary_client.resolve_provider_client",
return_value=(mock_client, "anthropic/claude-sonnet-4"),
):
agent._try_activate_fallback()
assert agent._use_prompt_caching is True
@ -187,9 +215,13 @@ class TestTryActivateFallback:
agent = _make_agent(
fallback_model={"provider": "openrouter", "model": "google/gemini-2.5-flash"},
)
with (
patch.dict("os.environ", {"OPENROUTER_API_KEY": "sk-or-key"}),
patch("run_agent.OpenAI"),
mock_client = _mock_resolve(
api_key="sk-or-key",
base_url="https://openrouter.ai/api/v1",
)
with patch(
"agent.auxiliary_client.resolve_provider_client",
return_value=(mock_client, "google/gemini-2.5-flash"),
):
agent._try_activate_fallback()
assert agent._use_prompt_caching is False
@ -198,9 +230,13 @@ class TestTryActivateFallback:
agent = _make_agent(
fallback_model={"provider": "zai", "model": "glm-5"},
)
with (
patch.dict("os.environ", {"ZAI_API_KEY": "sk-zai-key"}),
patch("run_agent.OpenAI"),
mock_client = _mock_resolve(
api_key="sk-zai-key",
base_url="https://open.z.ai/api/v1",
)
with patch(
"agent.auxiliary_client.resolve_provider_client",
return_value=(mock_client, "glm-5"),
):
agent._try_activate_fallback()
assert agent._use_prompt_caching is False
@ -210,35 +246,36 @@ class TestTryActivateFallback:
agent = _make_agent(
fallback_model={"provider": "zai", "model": "glm-5"},
)
with (
patch.dict("os.environ", {"Z_AI_API_KEY": "sk-alt-key"}),
patch("run_agent.OpenAI") as mock_openai,
mock_client = _mock_resolve(
api_key="sk-alt-key",
base_url="https://open.z.ai/api/v1",
)
with patch(
"agent.auxiliary_client.resolve_provider_client",
return_value=(mock_client, "glm-5"),
):
assert agent._try_activate_fallback() is True
call_kwargs = mock_openai.call_args[1]
assert call_kwargs["api_key"] == "sk-alt-key"
assert agent.client is mock_client
def test_activates_codex_fallback(self):
"""OpenAI Codex fallback should use OAuth credentials and codex_responses mode."""
agent = _make_agent(
fallback_model={"provider": "openai-codex", "model": "gpt-5.3-codex"},
)
mock_creds = {
"api_key": "codex-oauth-token",
"base_url": "https://chatgpt.com/backend-api/codex",
}
with (
patch("hermes_cli.auth.resolve_codex_runtime_credentials", return_value=mock_creds),
patch("run_agent.OpenAI") as mock_openai,
mock_client = _mock_resolve(
api_key="codex-oauth-token",
base_url="https://chatgpt.com/backend-api/codex",
)
with patch(
"agent.auxiliary_client.resolve_provider_client",
return_value=(mock_client, "gpt-5.3-codex"),
):
result = agent._try_activate_fallback()
assert result is True
assert agent.model == "gpt-5.3-codex"
assert agent.provider == "openai-codex"
assert agent.api_mode == "codex_responses"
call_kwargs = mock_openai.call_args[1]
assert call_kwargs["api_key"] == "codex-oauth-token"
assert "chatgpt.com" in call_kwargs["base_url"]
assert agent.client is mock_client
def test_codex_fallback_fails_gracefully_without_credentials(self):
"""Codex fallback should return False if no OAuth credentials available."""
@ -246,8 +283,8 @@ class TestTryActivateFallback:
fallback_model={"provider": "openai-codex", "model": "gpt-5.3-codex"},
)
with patch(
"hermes_cli.auth.resolve_codex_runtime_credentials",
side_effect=Exception("No Codex credentials"),
"agent.auxiliary_client.resolve_provider_client",
return_value=(None, None),
):
assert agent._try_activate_fallback() is False
assert agent._fallback_activated is False
@ -257,22 +294,20 @@ class TestTryActivateFallback:
agent = _make_agent(
fallback_model={"provider": "nous", "model": "nous-hermes-3"},
)
mock_creds = {
"api_key": "nous-agent-key-abc",
"base_url": "https://inference-api.nousresearch.com/v1",
}
with (
patch("hermes_cli.auth.resolve_nous_runtime_credentials", return_value=mock_creds),
patch("run_agent.OpenAI") as mock_openai,
mock_client = _mock_resolve(
api_key="nous-agent-key-abc",
base_url="https://inference-api.nousresearch.com/v1",
)
with patch(
"agent.auxiliary_client.resolve_provider_client",
return_value=(mock_client, "nous-hermes-3"),
):
result = agent._try_activate_fallback()
assert result is True
assert agent.model == "nous-hermes-3"
assert agent.provider == "nous"
assert agent.api_mode == "chat_completions"
call_kwargs = mock_openai.call_args[1]
assert call_kwargs["api_key"] == "nous-agent-key-abc"
assert "nousresearch.com" in call_kwargs["base_url"]
assert agent.client is mock_client
def test_nous_fallback_fails_gracefully_without_login(self):
"""Nous fallback should return False if not logged in."""
@ -280,8 +315,8 @@ class TestTryActivateFallback:
fallback_model={"provider": "nous", "model": "nous-hermes-3"},
)
with patch(
"hermes_cli.auth.resolve_nous_runtime_credentials",
side_effect=Exception("Not logged in to Nous Portal"),
"agent.auxiliary_client.resolve_provider_client",
return_value=(None, None),
):
assert agent._try_activate_fallback() is False
assert agent._fallback_activated is False
@ -315,7 +350,7 @@ class TestFallbackInit:
# =============================================================================
class TestProviderCredentials:
"""Verify that each supported provider resolves its API key correctly."""
"""Verify that each supported provider resolves via the centralized router."""
@pytest.mark.parametrize("provider,env_var,base_url_fragment", [
("openrouter", "OPENROUTER_API_KEY", "openrouter"),
@ -328,12 +363,15 @@ class TestProviderCredentials:
agent = _make_agent(
fallback_model={"provider": provider, "model": "test-model"},
)
with (
patch.dict("os.environ", {env_var: "test-key-123"}),
patch("run_agent.OpenAI") as mock_openai,
mock_client = MagicMock()
mock_client.api_key = "test-api-key"
mock_client.base_url = f"https://{base_url_fragment}/v1"
with patch(
"agent.auxiliary_client.resolve_provider_client",
return_value=(mock_client, "test-model"),
):
result = agent._try_activate_fallback()
assert result is True, f"Failed to activate fallback for {provider}"
call_kwargs = mock_openai.call_args[1]
assert call_kwargs["api_key"] == "test-key-123"
assert base_url_fragment in call_kwargs["base_url"].lower()
assert agent.client is mock_client
assert agent.model == "test-model"
assert agent.provider == provider