mirror of
https://github.com/NousResearch/hermes-agent.git
synced 2026-04-25 00:51:20 +00:00
Show configured model and provider in status output
Made-with: Cursor
This commit is contained in:
parent
95d49401ee
commit
c2c37ef158
3 changed files with 109 additions and 2 deletions
|
|
@ -223,6 +223,16 @@ def normalize_provider(provider: Optional[str]) -> str:
|
|||
return _PROVIDER_ALIASES.get(normalized, normalized)
|
||||
|
||||
|
||||
def provider_label(provider: Optional[str]) -> str:
|
||||
"""Return a human-friendly label for a provider id or alias."""
|
||||
original = (provider or "openrouter").strip()
|
||||
normalized = original.lower()
|
||||
if normalized == "auto":
|
||||
return "Auto"
|
||||
normalized = normalize_provider(normalized)
|
||||
return _PROVIDER_LABELS.get(normalized, original or "OpenRouter")
|
||||
|
||||
|
||||
def provider_model_ids(provider: Optional[str]) -> list[str]:
|
||||
"""Return the best known model catalog for a provider.
|
||||
|
||||
|
|
|
|||
|
|
@ -11,8 +11,11 @@ from pathlib import Path
|
|||
|
||||
PROJECT_ROOT = Path(__file__).parent.parent.resolve()
|
||||
|
||||
from hermes_cli.auth import AuthError, resolve_provider
|
||||
from hermes_cli.colors import Colors, color
|
||||
from hermes_cli.config import get_env_path, get_env_value, get_hermes_home
|
||||
from hermes_cli.config import get_env_path, get_env_value, get_hermes_home, load_config
|
||||
from hermes_cli.models import provider_label
|
||||
from hermes_cli.runtime_provider import resolve_requested_provider
|
||||
from hermes_constants import OPENROUTER_MODELS_URL
|
||||
|
||||
def check_mark(ok: bool) -> str:
|
||||
|
|
@ -48,6 +51,32 @@ def _format_iso_timestamp(value) -> str:
|
|||
return parsed.astimezone().strftime("%Y-%m-%d %H:%M:%S %Z")
|
||||
|
||||
|
||||
def _configured_model_label(config: dict) -> str:
|
||||
"""Return the configured default model from config.yaml."""
|
||||
model_cfg = config.get("model")
|
||||
if isinstance(model_cfg, dict):
|
||||
model = (model_cfg.get("default") or model_cfg.get("name") or "").strip()
|
||||
elif isinstance(model_cfg, str):
|
||||
model = model_cfg.strip()
|
||||
else:
|
||||
model = ""
|
||||
return model or "(not set)"
|
||||
|
||||
|
||||
def _effective_provider_label() -> str:
|
||||
"""Return the provider label matching current CLI runtime resolution."""
|
||||
requested = resolve_requested_provider()
|
||||
try:
|
||||
effective = resolve_provider(requested)
|
||||
except AuthError:
|
||||
effective = requested or "auto"
|
||||
|
||||
if effective == "openrouter" and get_env_value("OPENAI_BASE_URL"):
|
||||
effective = "custom"
|
||||
|
||||
return provider_label(effective)
|
||||
|
||||
|
||||
def show_status(args):
|
||||
"""Show status of all Hermes Agent components."""
|
||||
show_all = getattr(args, 'all', False)
|
||||
|
|
@ -68,6 +97,14 @@ def show_status(args):
|
|||
|
||||
env_path = get_env_path()
|
||||
print(f" .env file: {check_mark(env_path.exists())} {'exists' if env_path.exists() else 'not found'}")
|
||||
|
||||
try:
|
||||
config = load_config()
|
||||
except Exception:
|
||||
config = {}
|
||||
|
||||
print(f" Model: {_configured_model_label(config)}")
|
||||
print(f" Provider: {_effective_provider_label()}")
|
||||
|
||||
# =========================================================================
|
||||
# API Keys
|
||||
|
|
@ -181,7 +218,6 @@ def show_status(args):
|
|||
# Fall back to config file value when env var isn't set
|
||||
# (hermes status doesn't go through cli.py's config loading)
|
||||
try:
|
||||
from hermes_cli.config import load_config
|
||||
_cfg = load_config()
|
||||
terminal_env = _cfg.get("terminal", {}).get("backend", "local")
|
||||
except Exception:
|
||||
|
|
|
|||
61
tests/hermes_cli/test_status_model_provider.py
Normal file
61
tests/hermes_cli/test_status_model_provider.py
Normal file
|
|
@ -0,0 +1,61 @@
|
|||
"""Tests for hermes_cli.status model/provider display."""
|
||||
|
||||
from types import SimpleNamespace
|
||||
|
||||
|
||||
def _patch_common_status_deps(monkeypatch, status_mod, tmp_path, *, openai_base_url=""):
|
||||
import hermes_cli.auth as auth_mod
|
||||
|
||||
monkeypatch.setattr(status_mod, "get_env_path", lambda: tmp_path / ".env", raising=False)
|
||||
monkeypatch.setattr(status_mod, "get_hermes_home", lambda: tmp_path, raising=False)
|
||||
|
||||
def _get_env_value(name: str):
|
||||
if name == "OPENAI_BASE_URL":
|
||||
return openai_base_url
|
||||
return ""
|
||||
|
||||
monkeypatch.setattr(status_mod, "get_env_value", _get_env_value, raising=False)
|
||||
monkeypatch.setattr(auth_mod, "get_nous_auth_status", lambda: {}, raising=False)
|
||||
monkeypatch.setattr(auth_mod, "get_codex_auth_status", lambda: {}, raising=False)
|
||||
monkeypatch.setattr(
|
||||
status_mod.subprocess,
|
||||
"run",
|
||||
lambda *args, **kwargs: SimpleNamespace(stdout="inactive\n", returncode=3),
|
||||
)
|
||||
|
||||
|
||||
def test_show_status_displays_configured_dict_model_and_provider_label(monkeypatch, capsys, tmp_path):
|
||||
from hermes_cli import status as status_mod
|
||||
|
||||
_patch_common_status_deps(monkeypatch, status_mod, tmp_path)
|
||||
monkeypatch.setattr(
|
||||
status_mod,
|
||||
"load_config",
|
||||
lambda: {"model": {"default": "anthropic/claude-sonnet-4", "provider": "anthropic"}},
|
||||
raising=False,
|
||||
)
|
||||
monkeypatch.setattr(status_mod, "resolve_requested_provider", lambda requested=None: "anthropic", raising=False)
|
||||
monkeypatch.setattr(status_mod, "resolve_provider", lambda requested=None, **kwargs: "anthropic", raising=False)
|
||||
monkeypatch.setattr(status_mod, "provider_label", lambda provider: "Anthropic", raising=False)
|
||||
|
||||
status_mod.show_status(SimpleNamespace(all=False, deep=False))
|
||||
|
||||
out = capsys.readouterr().out
|
||||
assert "Model: anthropic/claude-sonnet-4" in out
|
||||
assert "Provider: Anthropic" in out
|
||||
|
||||
|
||||
def test_show_status_displays_legacy_string_model_and_custom_endpoint(monkeypatch, capsys, tmp_path):
|
||||
from hermes_cli import status as status_mod
|
||||
|
||||
_patch_common_status_deps(monkeypatch, status_mod, tmp_path, openai_base_url="http://localhost:8080/v1")
|
||||
monkeypatch.setattr(status_mod, "load_config", lambda: {"model": "qwen3:latest"}, raising=False)
|
||||
monkeypatch.setattr(status_mod, "resolve_requested_provider", lambda requested=None: "auto", raising=False)
|
||||
monkeypatch.setattr(status_mod, "resolve_provider", lambda requested=None, **kwargs: "openrouter", raising=False)
|
||||
monkeypatch.setattr(status_mod, "provider_label", lambda provider: "Custom endpoint" if provider == "custom" else provider, raising=False)
|
||||
|
||||
status_mod.show_status(SimpleNamespace(all=False, deep=False))
|
||||
|
||||
out = capsys.readouterr().out
|
||||
assert "Model: qwen3:latest" in out
|
||||
assert "Provider: Custom endpoint" in out
|
||||
Loading…
Add table
Add a link
Reference in a new issue