fix(config): accept 'model' key as alias for 'default' in model config (#3603)

Users intuitively write model: { model: my-model } instead of
model: { default: my-model } and it silently falls back to the
hardcoded default. Now both spellings work across all three config
consumers: runtime_provider, CLI, and gateway.

Co-authored-by: ygd58 <ygd58@users.noreply.github.com>
This commit is contained in:
Teknium 2026-03-28 14:55:27 -07:00 committed by GitHub
parent 9a364f2805
commit e4480ff426
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
5 changed files with 11 additions and 3 deletions

View file

@ -7,6 +7,7 @@
# ============================================================================= # =============================================================================
model: model:
# Default model to use (can be overridden with --model flag) # Default model to use (can be overridden with --model flag)
# Both "default" and "model" work as the key name here.
default: "anthropic/claude-opus-4.6" default: "anthropic/claude-opus-4.6"
# Inference provider selection: # Inference provider selection:

2
cli.py
View file

@ -1078,7 +1078,7 @@ class HermesCLI:
# authoritative. This avoids conflicts in multi-agent setups where # authoritative. This avoids conflicts in multi-agent setups where
# env vars would stomp each other. # env vars would stomp each other.
_model_config = CLI_CONFIG.get("model", {}) _model_config = CLI_CONFIG.get("model", {})
_config_model = _model_config.get("default", "") if isinstance(_model_config, dict) else (_model_config or "") _config_model = (_model_config.get("default") or _model_config.get("model") or "") if isinstance(_model_config, dict) else (_model_config or "")
_FALLBACK_MODEL = "anthropic/claude-opus-4.6" _FALLBACK_MODEL = "anthropic/claude-opus-4.6"
self.model = model or _config_model or _FALLBACK_MODEL self.model = model or _config_model or _FALLBACK_MODEL
# Auto-detect model from local server if still on fallback # Auto-detect model from local server if still on fallback

View file

@ -288,7 +288,7 @@ def _resolve_gateway_model(config: dict | None = None) -> str:
if isinstance(model_cfg, str): if isinstance(model_cfg, str):
model = model_cfg model = model_cfg
elif isinstance(model_cfg, dict): elif isinstance(model_cfg, dict):
model = model_cfg.get("default", model) model = model_cfg.get("default") or model_cfg.get("model") or model
return model return model
@ -2093,7 +2093,7 @@ class GatewayRunner:
if isinstance(_model_cfg, str): if isinstance(_model_cfg, str):
_hyg_model = _model_cfg _hyg_model = _model_cfg
elif isinstance(_model_cfg, dict): elif isinstance(_model_cfg, dict):
_hyg_model = _model_cfg.get("default", _hyg_model) _hyg_model = _model_cfg.get("default") or _model_cfg.get("model") or _hyg_model
# Read explicit context_length override from model config # Read explicit context_length override from model config
# (same as run_agent.py lines 995-1005) # (same as run_agent.py lines 995-1005)
_raw_ctx = _model_cfg.get("context_length") _raw_ctx = _model_cfg.get("context_length")

View file

@ -63,6 +63,9 @@ def _get_model_config() -> Dict[str, Any]:
model_cfg = config.get("model") model_cfg = config.get("model")
if isinstance(model_cfg, dict): if isinstance(model_cfg, dict):
cfg = dict(model_cfg) cfg = dict(model_cfg)
# Accept "model" as alias for "default" (users intuitively write model.model)
if not cfg.get("default") and cfg.get("model"):
cfg["default"] = cfg["model"]
default = (cfg.get("default") or "").strip() default = (cfg.get("default") or "").strip()
base_url = (cfg.get("base_url") or "").strip() base_url = (cfg.get("base_url") or "").strip()
is_local = "localhost" in base_url or "127.0.0.1" in base_url is_local = "localhost" in base_url or "127.0.0.1" in base_url

View file

@ -95,6 +95,10 @@ You need at least one way to connect to an LLM. Use `hermes model` to switch pro
| **Hugging Face** | `HF_TOKEN` in `~/.hermes/.env` (provider: `huggingface`, aliases: `hf`) | | **Hugging Face** | `HF_TOKEN` in `~/.hermes/.env` (provider: `huggingface`, aliases: `hf`) |
| **Custom Endpoint** | `hermes model` (saved in `config.yaml`) or `OPENAI_BASE_URL` + `OPENAI_API_KEY` in `~/.hermes/.env` | | **Custom Endpoint** | `hermes model` (saved in `config.yaml`) or `OPENAI_BASE_URL` + `OPENAI_API_KEY` in `~/.hermes/.env` |
:::tip Model key alias
In the `model:` config section, you can use either `default:` or `model:` as the key name for your model ID. Both `model: { default: my-model }` and `model: { model: my-model }` work identically.
:::
:::info Codex Note :::info Codex Note
The OpenAI Codex provider authenticates via device code (open a URL, enter a code). Hermes stores the resulting credentials in its own auth store under `~/.hermes/auth.json` and can import existing Codex CLI credentials from `~/.codex/auth.json` when present. No Codex CLI installation is required. The OpenAI Codex provider authenticates via device code (open a URL, enter a code). Hermes stores the resulting credentials in its own auth store under `~/.hermes/auth.json` and can import existing Codex CLI credentials from `~/.codex/auth.json` when present. No Codex CLI installation is required.
::: :::