mirror of
https://github.com/NousResearch/hermes-agent.git
synced 2026-04-25 00:51:20 +00:00
feat(codex): add gpt-5.5 and wire live model discovery into picker (#14720)
OpenAI launched GPT-5.5 on Codex today (Apr 23 2026). Adds it to the static
catalog and pipes the user's OAuth access token into the openai-codex path of
provider_model_ids() so /model mid-session and the gateway picker hit the
live ChatGPT codex/models endpoint — new models appear for each user
according to what ChatGPT actually lists for their account, without a Hermes
release.
Verified live: 'gpt-5.5' returns priority 0 (featured) from the endpoint,
400k context per OpenAI's launch article. 'hermes chat --provider
openai-codex --model gpt-5.5' completes end-to-end.
Changes:
- hermes_cli/codex_models.py: add gpt-5.5 to DEFAULT_CODEX_MODELS + forward-compat
- agent/model_metadata.py: 400k context length entry
- hermes_cli/models.py: resolve codex OAuth token before calling
get_codex_model_ids() in provider_model_ids('openai-codex')
This commit is contained in:
parent
b6ca3c28dc
commit
8f5fee3e3e
3 changed files with 18 additions and 1 deletions
|
|
@ -123,6 +123,9 @@ DEFAULT_CONTEXT_LENGTHS = {
|
||||||
"claude": 200000,
|
"claude": 200000,
|
||||||
# OpenAI — GPT-5 family (most have 400k; specific overrides first)
|
# OpenAI — GPT-5 family (most have 400k; specific overrides first)
|
||||||
# Source: https://developers.openai.com/api/docs/models
|
# Source: https://developers.openai.com/api/docs/models
|
||||||
|
# GPT-5.5 (launched Apr 23 2026). Verified via live ChatGPT codex/models
|
||||||
|
# endpoint: bare slug `gpt-5.5`, no -pro/-mini variants. 400k context on Codex.
|
||||||
|
"gpt-5.5": 400000,
|
||||||
"gpt-5.4-nano": 400000, # 400k (not 1.05M like full 5.4)
|
"gpt-5.4-nano": 400000, # 400k (not 1.05M like full 5.4)
|
||||||
"gpt-5.4-mini": 400000, # 400k (not 1.05M like full 5.4)
|
"gpt-5.4-mini": 400000, # 400k (not 1.05M like full 5.4)
|
||||||
"gpt-5.4": 1050000, # GPT-5.4, GPT-5.4 Pro (1.05M context)
|
"gpt-5.4": 1050000, # GPT-5.4, GPT-5.4 Pro (1.05M context)
|
||||||
|
|
|
||||||
|
|
@ -12,6 +12,7 @@ import os
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
DEFAULT_CODEX_MODELS: List[str] = [
|
DEFAULT_CODEX_MODELS: List[str] = [
|
||||||
|
"gpt-5.5",
|
||||||
"gpt-5.4-mini",
|
"gpt-5.4-mini",
|
||||||
"gpt-5.4",
|
"gpt-5.4",
|
||||||
"gpt-5.3-codex",
|
"gpt-5.3-codex",
|
||||||
|
|
@ -21,6 +22,7 @@ DEFAULT_CODEX_MODELS: List[str] = [
|
||||||
]
|
]
|
||||||
|
|
||||||
_FORWARD_COMPAT_TEMPLATE_MODELS: List[tuple[str, tuple[str, ...]]] = [
|
_FORWARD_COMPAT_TEMPLATE_MODELS: List[tuple[str, tuple[str, ...]]] = [
|
||||||
|
("gpt-5.5", ("gpt-5.4", "gpt-5.4-mini", "gpt-5.3-codex")),
|
||||||
("gpt-5.4-mini", ("gpt-5.3-codex", "gpt-5.2-codex")),
|
("gpt-5.4-mini", ("gpt-5.3-codex", "gpt-5.2-codex")),
|
||||||
("gpt-5.4", ("gpt-5.3-codex", "gpt-5.2-codex")),
|
("gpt-5.4", ("gpt-5.3-codex", "gpt-5.2-codex")),
|
||||||
("gpt-5.3-codex", ("gpt-5.2-codex",)),
|
("gpt-5.3-codex", ("gpt-5.2-codex",)),
|
||||||
|
|
|
||||||
|
|
@ -1678,7 +1678,19 @@ def provider_model_ids(provider: Optional[str], *, force_refresh: bool = False)
|
||||||
if normalized == "openai-codex":
|
if normalized == "openai-codex":
|
||||||
from hermes_cli.codex_models import get_codex_model_ids
|
from hermes_cli.codex_models import get_codex_model_ids
|
||||||
|
|
||||||
return get_codex_model_ids()
|
# Pass the live OAuth access token so the picker matches whatever
|
||||||
|
# ChatGPT lists for this account right now (new models appear without
|
||||||
|
# a Hermes release). Falls back to the hardcoded catalog if no token
|
||||||
|
# or the endpoint is unreachable.
|
||||||
|
access_token = None
|
||||||
|
try:
|
||||||
|
from hermes_cli.auth import resolve_codex_runtime_credentials
|
||||||
|
|
||||||
|
creds = resolve_codex_runtime_credentials(refresh_if_expiring=True)
|
||||||
|
access_token = creds.get("api_key")
|
||||||
|
except Exception:
|
||||||
|
access_token = None
|
||||||
|
return get_codex_model_ids(access_token=access_token)
|
||||||
if normalized in {"copilot", "copilot-acp"}:
|
if normalized in {"copilot", "copilot-acp"}:
|
||||||
try:
|
try:
|
||||||
live = _fetch_github_models(_resolve_copilot_catalog_api_key())
|
live = _fetch_github_models(_resolve_copilot_catalog_api_key())
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue