mirror of
https://github.com/NousResearch/hermes-agent.git
synced 2026-04-25 00:51:20 +00:00
fix: fall back to provider's default model when model config is empty (#8303)
When a user configures a provider (e.g. `hermes auth add openai-codex`) but never selects a model via `hermes model`, the gateway and CLI would pass an empty model string to the API, causing: 'Codex Responses request model must be a non-empty string' Now both gateway (_resolve_session_agent_runtime) and CLI (_ensure_runtime_credentials) detect an empty model and fill it from the provider's first catalog entry in _PROVIDER_MODELS. This covers all providers that have a static model list (openai-codex, anthropic, gemini, copilot, etc.). The fix is conservative: it only triggers when model is truly empty and a known provider was resolved. Explicit model choices are never overridden.
This commit is contained in:
parent
17c72f176d
commit
45e60904c6
4 changed files with 167 additions and 0 deletions
|
|
@ -900,6 +900,23 @@ class GatewayRunner:
|
|||
model, runtime_kwargs = self._apply_session_model_override(
|
||||
resolved_session_key, model, runtime_kwargs
|
||||
)
|
||||
|
||||
# When the config has no model.default but a provider was resolved
|
||||
# (e.g. user ran `hermes auth add openai-codex` without `hermes model`),
|
||||
# fall back to the provider's first catalog model so the API call
|
||||
# doesn't fail with "model must be a non-empty string".
|
||||
if not model and runtime_kwargs.get("provider"):
|
||||
try:
|
||||
from hermes_cli.models import get_default_model_for_provider
|
||||
model = get_default_model_for_provider(runtime_kwargs["provider"])
|
||||
if model:
|
||||
logger.info(
|
||||
"No model configured — defaulting to %s for provider %s",
|
||||
model, runtime_kwargs["provider"],
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return model, runtime_kwargs
|
||||
|
||||
def _resolve_turn_agent_config(self, user_message: str, model: str, runtime_kwargs: dict) -> dict:
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue