fix(model): preserve custom endpoint credentials and accept cloud models not in /v1/models

When switching models on a custom endpoint (ollama-launch):
- Same-provider switches no longer re-resolve credentials (fixes base_url
  being lost for 'custom' provider on subsequent switches)
- Named providers (ollama-launch) are resolved via user_providers so
  switch_model can find their base_url from config
- Models not in the /v1/models probe but present in the user's saved
  provider config are accepted with a warning instead of rejected
- CLI /model and TUI /model both pass user_providers/custom_providers
  to switch_model so the config model list is available for validation

Closes #15088
This commit is contained in:
kshitijk4poor 2026-04-25 14:10:42 +05:30
parent e5647d7863
commit 0d3d2a2631
4 changed files with 56 additions and 24 deletions

18
cli.py
View file

@ -5270,24 +5270,22 @@ class HermesCLI:
# Parse --provider and --global flags
model_input, explicit_provider, persist_global = parse_model_flags(raw_args)
# Load providers for switch_model (picker path needs them below)
user_provs = None
custom_provs = None
try:
from hermes_cli.config import get_compatible_custom_providers, load_config
cfg = load_config()
user_provs = cfg.get("providers")
custom_provs = get_compatible_custom_providers(cfg)
except Exception:
pass
# No args at all: open prompt_toolkit-native picker modal
if not model_input and not explicit_provider:
model_display = self.model or "unknown"
provider_display = get_label(self.provider) if self.provider else "unknown"
user_provs = None
custom_provs = None
try:
from hermes_cli.config import get_compatible_custom_providers, load_config
cfg = load_config()
user_provs = cfg.get("providers")
custom_provs = get_compatible_custom_providers(cfg)
except Exception:
pass
try:
providers = list_authenticated_providers(
current_provider=self.provider or "",