mirror of
https://github.com/NousResearch/hermes-agent.git
synced 2026-04-25 00:51:20 +00:00
fix: warn and clear stale OPENAI_BASE_URL on provider switch (#5161)
This commit is contained in:
parent
d3c5d65563
commit
c89719ad9c
4 changed files with 222 additions and 1 deletions
|
|
@ -59,6 +59,9 @@ from hermes_constants import OPENROUTER_BASE_URL
|
|||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Module-level flag: only warn once per process about stale OPENAI_BASE_URL.
|
||||
_stale_base_url_warned = False
|
||||
|
||||
_PROVIDER_ALIASES = {
|
||||
"google": "gemini",
|
||||
"google-gemini": "gemini",
|
||||
|
|
@ -1133,9 +1136,28 @@ def _resolve_auto() -> Tuple[Optional[OpenAI], Optional[str]]:
|
|||
provider they already have credentials for — no OpenRouter key needed.
|
||||
2. OpenRouter → Nous → custom → Codex → API-key providers (original chain).
|
||||
"""
|
||||
global auxiliary_is_nous
|
||||
global auxiliary_is_nous, _stale_base_url_warned
|
||||
auxiliary_is_nous = False # Reset — _try_nous() will set True if it wins
|
||||
|
||||
# ── Warn once if OPENAI_BASE_URL is set but config.yaml uses a named
|
||||
# provider (not 'custom'). This catches the common "env poisoning"
|
||||
# scenario where a user switches providers via `hermes model` but the
|
||||
# old OPENAI_BASE_URL lingers in ~/.hermes/.env. ──
|
||||
if not _stale_base_url_warned:
|
||||
_env_base = os.getenv("OPENAI_BASE_URL", "").strip()
|
||||
_cfg_provider = _read_main_provider()
|
||||
if (_env_base and _cfg_provider
|
||||
and _cfg_provider != "custom"
|
||||
and not _cfg_provider.startswith("custom:")):
|
||||
logger.warning(
|
||||
"OPENAI_BASE_URL is set (%s) but model.provider is '%s'. "
|
||||
"Auxiliary clients may route to the wrong endpoint. "
|
||||
"Run: hermes model to reconfigure, or remove "
|
||||
"OPENAI_BASE_URL from ~/.hermes/.env",
|
||||
_env_base, _cfg_provider,
|
||||
)
|
||||
_stale_base_url_warned = True
|
||||
|
||||
# ── Step 1: non-aggregator main provider → use main model directly ──
|
||||
main_provider = _read_main_provider()
|
||||
main_model = _read_main_model()
|
||||
|
|
|
|||
|
|
@ -1080,6 +1080,42 @@ def select_provider_and_model(args=None):
|
|||
elif selected_provider in ("gemini", "zai", "minimax", "minimax-cn", "kilocode", "opencode-zen", "opencode-go", "ai-gateway", "alibaba", "huggingface"):
|
||||
_model_flow_api_key_provider(config, selected_provider, current_model)
|
||||
|
||||
# ── Post-switch cleanup: clear stale OPENAI_BASE_URL ──────────────
|
||||
# When the user switches to a named provider (anything except "custom"),
|
||||
# a leftover OPENAI_BASE_URL in ~/.hermes/.env can poison auxiliary
|
||||
# clients that use provider:auto. Clear it proactively. (#5161)
|
||||
if selected_provider not in ("custom", "cancel", "remove-custom") \
|
||||
and not selected_provider.startswith("custom:"):
|
||||
_clear_stale_openai_base_url()
|
||||
|
||||
|
||||
def _clear_stale_openai_base_url():
|
||||
"""Remove OPENAI_BASE_URL from ~/.hermes/.env if the active provider is not 'custom'.
|
||||
|
||||
After a provider switch, a leftover OPENAI_BASE_URL causes auxiliary
|
||||
clients (compression, vision, delegation) with provider:auto to route
|
||||
requests to the old custom endpoint instead of the newly selected
|
||||
provider. See issue #5161.
|
||||
"""
|
||||
from hermes_cli.config import get_env_value, save_env_value, load_config
|
||||
|
||||
cfg = load_config()
|
||||
model_cfg = cfg.get("model", {})
|
||||
if isinstance(model_cfg, dict):
|
||||
provider = (model_cfg.get("provider") or "").strip().lower()
|
||||
else:
|
||||
provider = ""
|
||||
|
||||
if provider == "custom" or not provider:
|
||||
return # custom provider legitimately uses OPENAI_BASE_URL
|
||||
|
||||
stale_url = get_env_value("OPENAI_BASE_URL")
|
||||
if stale_url:
|
||||
save_env_value("OPENAI_BASE_URL", "")
|
||||
print(f"Cleared stale OPENAI_BASE_URL from .env (was: {stale_url[:40]}...)"
|
||||
if len(stale_url) > 40
|
||||
else f"Cleared stale OPENAI_BASE_URL from .env (was: {stale_url})")
|
||||
|
||||
|
||||
def _prompt_provider_choice(choices, *, default=0):
|
||||
"""Show provider selection menu with curses arrow-key navigation.
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
"""Tests for agent.auxiliary_client resolution chain, provider overrides, and model overrides."""
|
||||
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
from pathlib import Path
|
||||
from unittest.mock import patch, MagicMock, AsyncMock
|
||||
|
|
@ -1472,3 +1473,90 @@ class TestAsyncCallLlmFallback:
|
|||
|
||||
assert result is fb_response
|
||||
mock_fb.assert_called_once_with("auto", "compression", reason="connection error")
|
||||
class TestStaleBaseUrlWarning:
|
||||
"""_resolve_auto() warns when OPENAI_BASE_URL conflicts with config provider (#5161)."""
|
||||
|
||||
def test_warns_when_openai_base_url_set_with_named_provider(self, monkeypatch, caplog):
|
||||
"""Warning fires when OPENAI_BASE_URL is set but provider is a named provider."""
|
||||
import agent.auxiliary_client as mod
|
||||
# Reset the module-level flag so the warning fires
|
||||
monkeypatch.setattr(mod, "_stale_base_url_warned", False)
|
||||
monkeypatch.setenv("OPENAI_BASE_URL", "http://localhost:11434/v1")
|
||||
monkeypatch.setenv("OPENROUTER_API_KEY", "sk-or-test")
|
||||
|
||||
with patch("agent.auxiliary_client._read_main_provider", return_value="openrouter"), \
|
||||
patch("agent.auxiliary_client._read_main_model", return_value="google/gemini-flash"), \
|
||||
caplog.at_level(logging.WARNING, logger="agent.auxiliary_client"):
|
||||
_resolve_auto()
|
||||
|
||||
assert any("OPENAI_BASE_URL is set" in rec.message for rec in caplog.records), \
|
||||
"Expected a warning about stale OPENAI_BASE_URL"
|
||||
assert mod._stale_base_url_warned is True
|
||||
|
||||
def test_no_warning_when_provider_is_custom(self, monkeypatch, caplog):
|
||||
"""No warning when the provider is 'custom' — OPENAI_BASE_URL is expected."""
|
||||
import agent.auxiliary_client as mod
|
||||
monkeypatch.setattr(mod, "_stale_base_url_warned", False)
|
||||
monkeypatch.setenv("OPENAI_BASE_URL", "http://localhost:11434/v1")
|
||||
monkeypatch.setenv("OPENAI_API_KEY", "test-key")
|
||||
|
||||
with patch("agent.auxiliary_client._read_main_provider", return_value="custom"), \
|
||||
patch("agent.auxiliary_client._read_main_model", return_value="llama3"), \
|
||||
patch("agent.auxiliary_client._resolve_custom_runtime",
|
||||
return_value=("http://localhost:11434/v1", "test-key", None)), \
|
||||
patch("agent.auxiliary_client.OpenAI") as mock_openai, \
|
||||
caplog.at_level(logging.WARNING, logger="agent.auxiliary_client"):
|
||||
mock_openai.return_value = MagicMock()
|
||||
_resolve_auto()
|
||||
|
||||
assert not any("OPENAI_BASE_URL is set" in rec.message for rec in caplog.records), \
|
||||
"Should NOT warn when provider is 'custom'"
|
||||
|
||||
def test_no_warning_when_provider_is_named_custom(self, monkeypatch, caplog):
|
||||
"""No warning when the provider is 'custom:myname' — base_url comes from config."""
|
||||
import agent.auxiliary_client as mod
|
||||
monkeypatch.setattr(mod, "_stale_base_url_warned", False)
|
||||
monkeypatch.setenv("OPENAI_BASE_URL", "http://localhost:11434/v1")
|
||||
monkeypatch.setenv("OPENAI_API_KEY", "test-key")
|
||||
|
||||
with patch("agent.auxiliary_client._read_main_provider", return_value="custom:ollama-local"), \
|
||||
patch("agent.auxiliary_client._read_main_model", return_value="llama3"), \
|
||||
patch("agent.auxiliary_client.resolve_provider_client",
|
||||
return_value=(MagicMock(), "llama3")), \
|
||||
caplog.at_level(logging.WARNING, logger="agent.auxiliary_client"):
|
||||
_resolve_auto()
|
||||
|
||||
assert not any("OPENAI_BASE_URL is set" in rec.message for rec in caplog.records), \
|
||||
"Should NOT warn when provider is 'custom:*'"
|
||||
|
||||
def test_no_warning_when_openai_base_url_not_set(self, monkeypatch, caplog):
|
||||
"""No warning when OPENAI_BASE_URL is absent."""
|
||||
import agent.auxiliary_client as mod
|
||||
monkeypatch.setattr(mod, "_stale_base_url_warned", False)
|
||||
monkeypatch.delenv("OPENAI_BASE_URL", raising=False)
|
||||
monkeypatch.setenv("OPENROUTER_API_KEY", "sk-or-test")
|
||||
|
||||
with patch("agent.auxiliary_client._read_main_provider", return_value="openrouter"), \
|
||||
patch("agent.auxiliary_client._read_main_model", return_value="google/gemini-flash"), \
|
||||
caplog.at_level(logging.WARNING, logger="agent.auxiliary_client"):
|
||||
_resolve_auto()
|
||||
|
||||
assert not any("OPENAI_BASE_URL is set" in rec.message for rec in caplog.records), \
|
||||
"Should NOT warn when OPENAI_BASE_URL is not set"
|
||||
|
||||
def test_warning_only_fires_once(self, monkeypatch, caplog):
|
||||
"""Warning is suppressed after the first invocation."""
|
||||
import agent.auxiliary_client as mod
|
||||
monkeypatch.setattr(mod, "_stale_base_url_warned", False)
|
||||
monkeypatch.setenv("OPENAI_BASE_URL", "http://localhost:11434/v1")
|
||||
monkeypatch.setenv("OPENROUTER_API_KEY", "sk-or-test")
|
||||
|
||||
with patch("agent.auxiliary_client._read_main_provider", return_value="openrouter"), \
|
||||
patch("agent.auxiliary_client._read_main_model", return_value="google/gemini-flash"), \
|
||||
caplog.at_level(logging.WARNING, logger="agent.auxiliary_client"):
|
||||
_resolve_auto()
|
||||
caplog.clear()
|
||||
_resolve_auto()
|
||||
|
||||
assert not any("OPENAI_BASE_URL is set" in rec.message for rec in caplog.records), \
|
||||
"Warning should not fire a second time"
|
||||
|
|
|
|||
75
tests/hermes_cli/test_clear_stale_base_url.py
Normal file
75
tests/hermes_cli/test_clear_stale_base_url.py
Normal file
|
|
@ -0,0 +1,75 @@
|
|||
"""Tests for _clear_stale_openai_base_url() cleanup after provider switch (#5161)."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from unittest.mock import patch
|
||||
|
||||
from hermes_cli.config import load_config, save_config, save_env_value, get_env_value
|
||||
|
||||
|
||||
def _write_provider(provider: str, model: str = "test-model"):
|
||||
"""Helper: write a provider + model to config.yaml."""
|
||||
cfg = load_config()
|
||||
model_cfg = cfg.get("model", {})
|
||||
if not isinstance(model_cfg, dict):
|
||||
model_cfg = {}
|
||||
model_cfg["provider"] = provider
|
||||
model_cfg["default"] = model
|
||||
cfg["model"] = model_cfg
|
||||
save_config(cfg)
|
||||
|
||||
|
||||
class TestClearStaleOpenaiBaseUrl:
|
||||
"""_clear_stale_openai_base_url() removes OPENAI_BASE_URL when provider is not custom."""
|
||||
|
||||
def test_clears_when_provider_is_named(self, monkeypatch):
|
||||
"""OPENAI_BASE_URL is cleared when config provider is a named provider."""
|
||||
from hermes_cli.main import _clear_stale_openai_base_url
|
||||
|
||||
_write_provider("openrouter")
|
||||
save_env_value("OPENAI_BASE_URL", "http://localhost:11434/v1")
|
||||
|
||||
_clear_stale_openai_base_url()
|
||||
|
||||
result = get_env_value("OPENAI_BASE_URL")
|
||||
assert not result, f"Expected OPENAI_BASE_URL to be cleared, got: {result!r}"
|
||||
|
||||
def test_preserves_when_provider_is_custom(self, monkeypatch):
|
||||
"""OPENAI_BASE_URL is NOT cleared when config provider is 'custom'."""
|
||||
from hermes_cli.main import _clear_stale_openai_base_url
|
||||
|
||||
_write_provider("custom")
|
||||
save_env_value("OPENAI_BASE_URL", "http://localhost:11434/v1")
|
||||
|
||||
_clear_stale_openai_base_url()
|
||||
|
||||
result = get_env_value("OPENAI_BASE_URL")
|
||||
assert result == "http://localhost:11434/v1", \
|
||||
f"Expected OPENAI_BASE_URL to be preserved, got: {result!r}"
|
||||
|
||||
def test_noop_when_no_openai_base_url(self, monkeypatch):
|
||||
"""No error when OPENAI_BASE_URL is not set."""
|
||||
from hermes_cli.main import _clear_stale_openai_base_url
|
||||
|
||||
_write_provider("openrouter")
|
||||
# Ensure it's not set
|
||||
save_env_value("OPENAI_BASE_URL", "")
|
||||
monkeypatch.delenv("OPENAI_BASE_URL", raising=False)
|
||||
|
||||
# Should not raise
|
||||
_clear_stale_openai_base_url()
|
||||
|
||||
def test_noop_when_provider_empty(self, monkeypatch):
|
||||
"""No cleanup when provider is not set in config."""
|
||||
from hermes_cli.main import _clear_stale_openai_base_url
|
||||
|
||||
cfg = load_config()
|
||||
cfg.pop("model", None)
|
||||
save_config(cfg)
|
||||
save_env_value("OPENAI_BASE_URL", "http://localhost:11434/v1")
|
||||
|
||||
_clear_stale_openai_base_url()
|
||||
|
||||
result = get_env_value("OPENAI_BASE_URL")
|
||||
assert result == "http://localhost:11434/v1", \
|
||||
"Should not clear when provider is not configured"
|
||||
Loading…
Add table
Add a link
Reference in a new issue