mirror of
https://github.com/NousResearch/hermes-agent.git
synced 2026-04-25 00:51:20 +00:00
Merge pull request #15337 from NousResearch/bb/tui-kawaii-default-off
fix(tui): keep default personality neutral
This commit is contained in:
commit
a5129c72ef
2 changed files with 39 additions and 27 deletions
|
|
@ -54,6 +54,44 @@ def test_make_agent_passes_resolved_provider():
|
|||
assert call_kwargs.kwargs["api_mode"] == "anthropic_messages"
|
||||
|
||||
|
||||
def test_make_agent_ignores_display_personality_without_system_prompt():
|
||||
"""The TUI matches the classic CLI: personality only becomes active once
|
||||
it has been saved to agent.system_prompt."""
|
||||
|
||||
fake_runtime = {
|
||||
"provider": "openrouter",
|
||||
"base_url": "https://api.synthetic.new/v1",
|
||||
"api_key": "sk-test",
|
||||
"api_mode": "chat_completions",
|
||||
"command": None,
|
||||
"args": None,
|
||||
"credential_pool": None,
|
||||
}
|
||||
fake_cfg = {
|
||||
"agent": {
|
||||
"system_prompt": "",
|
||||
"personalities": {"kawaii": "sparkle system prompt"},
|
||||
},
|
||||
"display": {"personality": "kawaii"},
|
||||
"model": {"default": "glm-5"},
|
||||
}
|
||||
|
||||
with (
|
||||
patch("tui_gateway.server._load_cfg", return_value=fake_cfg),
|
||||
patch("tui_gateway.server._get_db", return_value=MagicMock()),
|
||||
patch(
|
||||
"hermes_cli.runtime_provider.resolve_runtime_provider",
|
||||
return_value=fake_runtime,
|
||||
),
|
||||
patch("run_agent.AIAgent") as mock_agent,
|
||||
):
|
||||
from tui_gateway.server import _make_agent
|
||||
|
||||
_make_agent("sid-default-personality", "key-default-personality")
|
||||
|
||||
assert mock_agent.call_args.kwargs["ephemeral_system_prompt"] is None
|
||||
|
||||
|
||||
def test_probe_config_health_flags_null_sections():
|
||||
"""Bare YAML keys (`agent:` with no value) parse as None and silently
|
||||
drop nested settings; probe must surface them so users can fix."""
|
||||
|
|
|
|||
|
|
@ -1143,30 +1143,6 @@ def _wire_callbacks(sid: str):
|
|||
set_secret_capture_callback(secret_cb)
|
||||
|
||||
|
||||
def _resolve_personality_prompt(cfg: dict) -> str:
|
||||
"""Resolve the active personality into a system prompt string."""
|
||||
name = ((cfg.get("display") or {}).get("personality", "") or "").strip().lower()
|
||||
if not name or name in ("default", "none", "neutral"):
|
||||
return ""
|
||||
try:
|
||||
from cli import load_cli_config
|
||||
|
||||
personalities = (load_cli_config().get("agent") or {}).get("personalities", {}) or {}
|
||||
except Exception:
|
||||
try:
|
||||
from hermes_cli.config import load_config as _load_full_cfg
|
||||
|
||||
personalities = (
|
||||
(_load_full_cfg().get("agent") or {}).get("personalities", {}) or {}
|
||||
)
|
||||
except Exception:
|
||||
personalities = (cfg.get("agent") or {}).get("personalities", {}) or {}
|
||||
pval = personalities.get(name)
|
||||
if pval is None:
|
||||
return ""
|
||||
return _render_personality_prompt(pval)
|
||||
|
||||
|
||||
def _render_personality_prompt(value) -> str:
|
||||
if isinstance(value, dict):
|
||||
parts = [value.get("system_prompt", "")]
|
||||
|
|
@ -1300,9 +1276,7 @@ def _make_agent(sid: str, key: str, session_id: str | None = None):
|
|||
from hermes_cli.runtime_provider import resolve_runtime_provider
|
||||
|
||||
cfg = _load_cfg()
|
||||
system_prompt = (cfg.get("agent") or {}).get("system_prompt", "") or ""
|
||||
if not system_prompt:
|
||||
system_prompt = _resolve_personality_prompt(cfg)
|
||||
system_prompt = ((cfg.get("agent") or {}).get("system_prompt", "") or "").strip()
|
||||
runtime = resolve_runtime_provider(requested=None)
|
||||
return AIAgent(
|
||||
model=_resolve_model(),
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue