diff --git a/hermes_cli/codex_models.py b/hermes_cli/codex_models.py index 9fe3467145..169c63e8ac 100644 --- a/hermes_cli/codex_models.py +++ b/hermes_cli/codex_models.py @@ -18,6 +18,36 @@ DEFAULT_CODEX_MODELS: List[str] = [ "gpt-5.1-codex-mini", ] +_FORWARD_COMPAT_TEMPLATE_MODELS: List[tuple[str, tuple[str, ...]]] = [ + ("gpt-5.3-codex", ("gpt-5.2-codex",)), + ("gpt-5.4", ("gpt-5.3-codex", "gpt-5.2-codex")), + ("gpt-5.3-codex-spark", ("gpt-5.3-codex", "gpt-5.2-codex")), +] + + +def _add_forward_compat_models(model_ids: List[str]) -> List[str]: + """Add Clawdbot-style synthetic forward-compat Codex models. + + If a newer Codex slug isn't returned by live discovery, surface it when an + older compatible template model is present. This mirrors Clawdbot's + synthetic catalog / forward-compat behavior for GPT-5 Codex variants. + """ + ordered: List[str] = [] + seen: set[str] = set() + for model_id in model_ids: + if model_id not in seen: + ordered.append(model_id) + seen.add(model_id) + + for synthetic_model, template_models in _FORWARD_COMPAT_TEMPLATE_MODELS: + if synthetic_model in seen: + continue + if any(template in seen for template in template_models): + ordered.append(synthetic_model) + seen.add(synthetic_model) + + return ordered + def _fetch_models_from_api(access_token: str) -> List[str]: """Fetch available models from the Codex API. Returns visible models sorted by priority.""" @@ -54,7 +84,7 @@ def _fetch_models_from_api(access_token: str) -> List[str]: sortable.append((rank, slug)) sortable.sort(key=lambda x: (x[0], x[1])) - return [slug for _, slug in sortable] + return _add_forward_compat_models([slug for _, slug in sortable]) def _read_default_model(codex_home: Path) -> Optional[str]: @@ -125,7 +155,7 @@ def get_codex_model_ids(access_token: Optional[str] = None) -> List[str]: if access_token: api_models = _fetch_models_from_api(access_token) if api_models: - return api_models + return _add_forward_compat_models(api_models) # Fall back to local sources default_model = _read_default_model(codex_home) @@ -140,4 +170,4 @@ def get_codex_model_ids(access_token: Optional[str] = None) -> List[str]: if model_id not in ordered: ordered.append(model_id) - return ordered + return _add_forward_compat_models(ordered) diff --git a/tests/test_codex_models.py b/tests/test_codex_models.py index 7148c659f9..32fe631535 100644 --- a/tests/test_codex_models.py +++ b/tests/test_codex_models.py @@ -52,6 +52,19 @@ def test_get_codex_model_ids_falls_back_to_curated_defaults(tmp_path, monkeypatc models = get_codex_model_ids() assert models[: len(DEFAULT_CODEX_MODELS)] == DEFAULT_CODEX_MODELS + assert "gpt-5.4" in models + assert "gpt-5.3-codex-spark" in models + + +def test_get_codex_model_ids_adds_forward_compat_models_from_templates(monkeypatch): + monkeypatch.setattr( + "hermes_cli.codex_models._fetch_models_from_api", + lambda access_token: ["gpt-5.2-codex"], + ) + + models = get_codex_model_ids(access_token="codex-access-token") + + assert models == ["gpt-5.2-codex", "gpt-5.3-codex", "gpt-5.4", "gpt-5.3-codex-spark"] def test_model_command_uses_runtime_access_token_for_codex_list(monkeypatch):