diff --git a/hermes_cli/codex_models.py b/hermes_cli/codex_models.py index 9fe346714..43722124a 100644 --- a/hermes_cli/codex_models.py +++ b/hermes_cli/codex_models.py @@ -13,6 +13,7 @@ logger = logging.getLogger(__name__) DEFAULT_CODEX_MODELS: List[str] = [ "gpt-5.3-codex", + "gpt-5.4", "gpt-5.2-codex", "gpt-5.1-codex-max", "gpt-5.1-codex-mini", diff --git a/hermes_cli/models.py b/hermes_cli/models.py index 3b3d0ab4d..85c248c1b 100644 --- a/hermes_cli/models.py +++ b/hermes_cli/models.py @@ -40,6 +40,8 @@ _PROVIDER_MODELS: dict[str, list[str]] = { "deepseek-v3.2", ], "openai-codex": [ + "gpt-5.3-codex", + "gpt-5.4", "gpt-5.2-codex", "gpt-5.1-codex-mini", "gpt-5.1-codex-max", diff --git a/hermes_cli/setup.py b/hermes_cli/setup.py index b2e53c87d..789f2b096 100644 --- a/hermes_cli/setup.py +++ b/hermes_cli/setup.py @@ -654,6 +654,7 @@ def setup_model_provider(config: dict): _update_config_for_provider, _login_openai_codex, get_codex_auth_status, + resolve_codex_runtime_credentials, DEFAULT_CODEX_BASE_URL, detect_external_credentials, ) @@ -1266,7 +1267,14 @@ def setup_model_provider(config: dict): elif selected_provider == "openai-codex": from hermes_cli.codex_models import get_codex_model_ids - codex_models = get_codex_model_ids() + codex_token = None + try: + codex_creds = resolve_codex_runtime_credentials() + codex_token = codex_creds.get("api_key") + except Exception as exc: + logger.debug("Could not resolve Codex runtime credentials for model list: %s", exc) + + codex_models = get_codex_model_ids(access_token=codex_token) model_choices = codex_models + [f"Keep current ({current_model})"] default_codex = 0 if current_model in codex_models: diff --git a/tests/hermes_cli/test_setup.py b/tests/hermes_cli/test_setup.py index 54a82e4b5..7e2443abb 100644 --- a/tests/hermes_cli/test_setup.py +++ b/tests/hermes_cli/test_setup.py @@ -95,3 +95,50 @@ def test_custom_setup_clears_active_oauth_provider(tmp_path, monkeypatch): assert reloaded["model"]["provider"] == "custom" assert reloaded["model"]["base_url"] == "https://custom.example/v1" assert reloaded["model"]["default"] == "custom/model" + + +def test_codex_setup_uses_runtime_access_token_for_live_model_list(tmp_path, monkeypatch): + monkeypatch.setenv("HERMES_HOME", str(tmp_path)) + monkeypatch.setenv("OPENROUTER_API_KEY", "or-test-key") + _clear_provider_env(monkeypatch) + monkeypatch.setenv("OPENROUTER_API_KEY", "or-test-key") + + config = load_config() + + prompt_choices = iter([1, 0]) + monkeypatch.setattr( + "hermes_cli.setup.prompt_choice", + lambda *args, **kwargs: next(prompt_choices), + ) + monkeypatch.setattr("hermes_cli.setup.prompt", lambda *args, **kwargs: "") + monkeypatch.setattr("hermes_cli.auth.detect_external_credentials", lambda: []) + monkeypatch.setattr("hermes_cli.auth._login_openai_codex", lambda *args, **kwargs: None) + monkeypatch.setattr( + "hermes_cli.auth.resolve_codex_runtime_credentials", + lambda *args, **kwargs: { + "base_url": "https://chatgpt.com/backend-api/codex", + "api_key": "codex-access-token", + }, + ) + + captured = {} + + def _fake_get_codex_model_ids(access_token=None): + captured["access_token"] = access_token + return ["gpt-5.4", "gpt-5.3-codex"] + + monkeypatch.setattr( + "hermes_cli.codex_models.get_codex_model_ids", + _fake_get_codex_model_ids, + ) + + setup_model_provider(config) + save_config(config) + + reloaded = load_config() + + assert captured["access_token"] == "codex-access-token" + assert isinstance(reloaded["model"], dict) + assert reloaded["model"]["provider"] == "openai-codex" + assert reloaded["model"]["default"] == "gpt-5.4" + assert reloaded["model"]["base_url"] == "https://chatgpt.com/backend-api/codex"