diff --git a/tests/test_tui_gateway_server.py b/tests/test_tui_gateway_server.py index 1daf77bd2f..43a368ce81 100644 --- a/tests/test_tui_gateway_server.py +++ b/tests/test_tui_gateway_server.py @@ -85,11 +85,21 @@ def test_status_callback_accepts_single_message_argument(): def test_resolve_model_uses_inference_model_env(monkeypatch): monkeypatch.delenv("HERMES_MODEL", raising=False) - monkeypatch.setenv("HERMES_INFERENCE_MODEL", "anthropic/claude-sonnet-4.6") + monkeypatch.setenv("HERMES_INFERENCE_MODEL", " anthropic/claude-sonnet-4.6\n") assert server._resolve_model() == "anthropic/claude-sonnet-4.6" +def test_resolve_model_strips_config_model(monkeypatch): + monkeypatch.delenv("HERMES_MODEL", raising=False) + monkeypatch.delenv("HERMES_INFERENCE_MODEL", raising=False) + monkeypatch.setattr( + server, "_load_cfg", lambda: {"model": {"default": " nous/hermes-test "}} + ) + + assert server._resolve_model() == "nous/hermes-test" + + def test_startup_runtime_uses_tui_provider_env(monkeypatch): monkeypatch.setenv("HERMES_MODEL", "nous/hermes-test") monkeypatch.setenv("HERMES_TUI_PROVIDER", "nous") @@ -457,6 +467,55 @@ def test_config_set_model_syncs_inference_provider_env(monkeypatch): assert os.environ["HERMES_INFERENCE_PROVIDER"] == "anthropic" +def test_config_set_model_syncs_tui_provider_env(monkeypatch): + class Agent: + model = "gpt-5.3-codex" + provider = "openai-codex" + base_url = "" + api_key = "" + + def switch_model(self, **kwargs): + self.model = kwargs["new_model"] + self.provider = kwargs["new_provider"] + + agent = Agent() + server._sessions["sid"] = _session(agent=agent) + monkeypatch.setenv("HERMES_TUI_PROVIDER", "openai-codex") + monkeypatch.setattr(server, "_restart_slash_worker", lambda session: None) + monkeypatch.setattr(server, "_emit", lambda *args, **kwargs: None) + + def fake_switch_model(**kwargs): + return types.SimpleNamespace( + success=True, + new_model="anthropic/claude-sonnet-4.6", + target_provider="anthropic", + api_key="key", + base_url="https://api.anthropic.com", + api_mode="anthropic_messages", + warning_message="", + ) + + monkeypatch.setattr("hermes_cli.model_switch.switch_model", fake_switch_model) + + try: + resp = server.handle_request( + { + "id": "1", + "method": "config.set", + "params": { + "session_id": "sid", + "key": "model", + "value": "anthropic/claude-sonnet-4.6 --provider anthropic", + }, + } + ) + + assert resp["result"]["value"] == "anthropic/claude-sonnet-4.6" + assert os.environ["HERMES_TUI_PROVIDER"] == "anthropic" + finally: + server._sessions.clear() + + def test_config_set_personality_rejects_unknown_name(monkeypatch): monkeypatch.setattr( server, diff --git a/tui_gateway/server.py b/tui_gateway/server.py index fa01a68fb4..9f188de645 100644 --- a/tui_gateway/server.py +++ b/tui_gateway/server.py @@ -560,14 +560,17 @@ def resolve_skin() -> dict: def _resolve_model() -> str: - env = os.environ.get("HERMES_MODEL", "") or os.environ.get("HERMES_INFERENCE_MODEL", "") + env = ( + os.environ.get("HERMES_MODEL", "") + or os.environ.get("HERMES_INFERENCE_MODEL", "") + ).strip() if env: return env m = _load_cfg().get("model", "") if isinstance(m, dict): - return m.get("default", "") + return str(m.get("default", "") or "").strip() if isinstance(m, str) and m: - return m + return m.strip() return "anthropic/claude-sonnet-4" @@ -773,6 +776,8 @@ def _apply_model_switch(sid: str, session: dict, raw_input: str) -> dict: # original one persisted in config or env. if result.target_provider: os.environ["HERMES_INFERENCE_PROVIDER"] = result.target_provider + if os.environ.get("HERMES_TUI_PROVIDER"): + os.environ["HERMES_TUI_PROVIDER"] = result.target_provider if persist_global: _persist_model_switch(result) return {"value": result.new_model, "warning": result.warning_message or ""}