mirror of
https://github.com/NousResearch/hermes-agent.git
synced 2026-04-29 01:31:41 +00:00
Merge branch 'main' of github.com:NousResearch/hermes-agent into feat/ink-refactor
This commit is contained in:
commit
7e4dd6ea02
220 changed files with 23482 additions and 1959 deletions
|
|
@ -23,9 +23,9 @@ from hermes_cli.auth import (
|
|||
get_auth_status,
|
||||
AuthError,
|
||||
KIMI_CODE_BASE_URL,
|
||||
_try_gh_cli_token,
|
||||
_resolve_kimi_base_url,
|
||||
)
|
||||
from hermes_cli.copilot_auth import _try_gh_cli_token
|
||||
|
||||
|
||||
# =============================================================================
|
||||
|
|
@ -68,7 +68,7 @@ class TestProviderRegistry:
|
|||
def test_copilot_env_vars(self):
|
||||
pconfig = PROVIDER_REGISTRY["copilot"]
|
||||
assert pconfig.api_key_env_vars == ("COPILOT_GITHUB_TOKEN", "GH_TOKEN", "GITHUB_TOKEN")
|
||||
assert pconfig.base_url_env_var == ""
|
||||
assert pconfig.base_url_env_var == "COPILOT_API_BASE_URL"
|
||||
|
||||
def test_kimi_env_vars(self):
|
||||
pconfig = PROVIDER_REGISTRY["kimi-coding"]
|
||||
|
|
@ -381,13 +381,13 @@ class TestResolveApiKeyProviderCredentials:
|
|||
assert creds["source"] == "gh auth token"
|
||||
|
||||
def test_try_gh_cli_token_uses_homebrew_path_when_not_on_path(self, monkeypatch):
|
||||
monkeypatch.setattr("hermes_cli.auth.shutil.which", lambda command: None)
|
||||
monkeypatch.setattr("hermes_cli.copilot_auth.shutil.which", lambda command: None)
|
||||
monkeypatch.setattr(
|
||||
"hermes_cli.auth.os.path.isfile",
|
||||
"hermes_cli.copilot_auth.os.path.isfile",
|
||||
lambda path: path == "/opt/homebrew/bin/gh",
|
||||
)
|
||||
monkeypatch.setattr(
|
||||
"hermes_cli.auth.os.access",
|
||||
"hermes_cli.copilot_auth.os.access",
|
||||
lambda path, mode: path == "/opt/homebrew/bin/gh" and mode == os.X_OK,
|
||||
)
|
||||
|
||||
|
|
@ -397,11 +397,11 @@ class TestResolveApiKeyProviderCredentials:
|
|||
returncode = 0
|
||||
stdout = "gh-cli-secret\n"
|
||||
|
||||
def _fake_run(cmd, capture_output, text, timeout):
|
||||
def _fake_run(cmd, **kwargs):
|
||||
calls.append(cmd)
|
||||
return _Result()
|
||||
|
||||
monkeypatch.setattr("hermes_cli.auth.subprocess.run", _fake_run)
|
||||
monkeypatch.setattr("hermes_cli.copilot_auth.subprocess.run", _fake_run)
|
||||
|
||||
assert _try_gh_cli_token() == "gh-cli-secret"
|
||||
assert calls == [["/opt/homebrew/bin/gh", "auth", "token"]]
|
||||
|
|
|
|||
|
|
@ -1,6 +1,8 @@
|
|||
"""Tests for hermes backup and import commands."""
|
||||
|
||||
import json
|
||||
import os
|
||||
import sqlite3
|
||||
import zipfile
|
||||
from argparse import Namespace
|
||||
from pathlib import Path
|
||||
|
|
@ -232,6 +234,44 @@ class TestBackup:
|
|||
assert len(zips) == 1
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# _validate_backup_zip tests
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
class TestValidateBackupZip:
|
||||
def _make_zip(self, zip_path: Path, filenames: list[str]) -> None:
|
||||
with zipfile.ZipFile(zip_path, "w") as zf:
|
||||
for name in filenames:
|
||||
zf.writestr(name, "dummy")
|
||||
|
||||
def test_state_db_passes(self, tmp_path):
|
||||
"""A zip containing state.db is accepted as a valid Hermes backup."""
|
||||
from hermes_cli.backup import _validate_backup_zip
|
||||
zip_path = tmp_path / "backup.zip"
|
||||
self._make_zip(zip_path, ["state.db", "sessions/abc.json"])
|
||||
with zipfile.ZipFile(zip_path, "r") as zf:
|
||||
ok, reason = _validate_backup_zip(zf)
|
||||
assert ok, reason
|
||||
|
||||
def test_old_wrong_db_name_fails(self, tmp_path):
|
||||
"""A zip with only hermes_state.db (old wrong name) is rejected."""
|
||||
from hermes_cli.backup import _validate_backup_zip
|
||||
zip_path = tmp_path / "old.zip"
|
||||
self._make_zip(zip_path, ["hermes_state.db", "memory_store.db"])
|
||||
with zipfile.ZipFile(zip_path, "r") as zf:
|
||||
ok, reason = _validate_backup_zip(zf)
|
||||
assert not ok
|
||||
|
||||
def test_config_yaml_passes(self, tmp_path):
|
||||
"""A zip containing config.yaml is accepted (existing behaviour preserved)."""
|
||||
from hermes_cli.backup import _validate_backup_zip
|
||||
zip_path = tmp_path / "backup.zip"
|
||||
self._make_zip(zip_path, ["config.yaml", "skills/x/SKILL.md"])
|
||||
with zipfile.ZipFile(zip_path, "r") as zf:
|
||||
ok, reason = _validate_backup_zip(zf)
|
||||
assert ok, reason
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Import tests
|
||||
# ---------------------------------------------------------------------------
|
||||
|
|
@ -895,3 +935,181 @@ class TestProfileRestoration:
|
|||
|
||||
# Files should still be restored even if wrappers can't be created
|
||||
assert (hermes_home / "profiles" / "coder" / "config.yaml").exists()
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# SQLite safe copy tests
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
class TestSafeCopyDb:
|
||||
def test_copies_valid_database(self, tmp_path):
|
||||
from hermes_cli.backup import _safe_copy_db
|
||||
src = tmp_path / "test.db"
|
||||
dst = tmp_path / "copy.db"
|
||||
|
||||
conn = sqlite3.connect(str(src))
|
||||
conn.execute("CREATE TABLE t (x INTEGER)")
|
||||
conn.execute("INSERT INTO t VALUES (42)")
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
result = _safe_copy_db(src, dst)
|
||||
assert result is True
|
||||
|
||||
conn = sqlite3.connect(str(dst))
|
||||
rows = conn.execute("SELECT x FROM t").fetchall()
|
||||
conn.close()
|
||||
assert rows == [(42,)]
|
||||
|
||||
def test_copies_wal_mode_database(self, tmp_path):
|
||||
from hermes_cli.backup import _safe_copy_db
|
||||
src = tmp_path / "wal.db"
|
||||
dst = tmp_path / "copy.db"
|
||||
|
||||
conn = sqlite3.connect(str(src))
|
||||
conn.execute("PRAGMA journal_mode=WAL")
|
||||
conn.execute("CREATE TABLE t (x TEXT)")
|
||||
conn.execute("INSERT INTO t VALUES ('wal-test')")
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
result = _safe_copy_db(src, dst)
|
||||
assert result is True
|
||||
|
||||
conn = sqlite3.connect(str(dst))
|
||||
rows = conn.execute("SELECT x FROM t").fetchall()
|
||||
conn.close()
|
||||
assert rows == [("wal-test",)]
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Quick state snapshot tests
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
class TestQuickSnapshot:
|
||||
@pytest.fixture
|
||||
def hermes_home(self, tmp_path):
|
||||
"""Create a fake HERMES_HOME with critical state files."""
|
||||
home = tmp_path / ".hermes"
|
||||
home.mkdir()
|
||||
(home / "config.yaml").write_text("model:\n provider: openrouter\n")
|
||||
(home / ".env").write_text("OPENROUTER_API_KEY=test-key-123\n")
|
||||
(home / "auth.json").write_text('{"providers": {}}\n')
|
||||
(home / "cron").mkdir()
|
||||
(home / "cron" / "jobs.json").write_text('{"jobs": []}\n')
|
||||
|
||||
# Real SQLite database
|
||||
db_path = home / "state.db"
|
||||
conn = sqlite3.connect(str(db_path))
|
||||
conn.execute("CREATE TABLE sessions (id TEXT PRIMARY KEY, data TEXT)")
|
||||
conn.execute("INSERT INTO sessions VALUES ('s1', 'hello world')")
|
||||
conn.commit()
|
||||
conn.close()
|
||||
return home
|
||||
|
||||
def test_creates_snapshot(self, hermes_home):
|
||||
from hermes_cli.backup import create_quick_snapshot
|
||||
snap_id = create_quick_snapshot(hermes_home=hermes_home)
|
||||
assert snap_id is not None
|
||||
snap_dir = hermes_home / "state-snapshots" / snap_id
|
||||
assert snap_dir.is_dir()
|
||||
assert (snap_dir / "manifest.json").exists()
|
||||
|
||||
def test_label_in_id(self, hermes_home):
|
||||
from hermes_cli.backup import create_quick_snapshot
|
||||
snap_id = create_quick_snapshot(label="before-upgrade", hermes_home=hermes_home)
|
||||
assert "before-upgrade" in snap_id
|
||||
|
||||
def test_state_db_safely_copied(self, hermes_home):
|
||||
from hermes_cli.backup import create_quick_snapshot
|
||||
snap_id = create_quick_snapshot(hermes_home=hermes_home)
|
||||
db_copy = hermes_home / "state-snapshots" / snap_id / "state.db"
|
||||
assert db_copy.exists()
|
||||
|
||||
conn = sqlite3.connect(str(db_copy))
|
||||
rows = conn.execute("SELECT * FROM sessions").fetchall()
|
||||
conn.close()
|
||||
assert len(rows) == 1
|
||||
assert rows[0] == ("s1", "hello world")
|
||||
|
||||
def test_copies_nested_files(self, hermes_home):
|
||||
from hermes_cli.backup import create_quick_snapshot
|
||||
snap_id = create_quick_snapshot(hermes_home=hermes_home)
|
||||
assert (hermes_home / "state-snapshots" / snap_id / "cron" / "jobs.json").exists()
|
||||
|
||||
def test_missing_files_skipped(self, hermes_home):
|
||||
from hermes_cli.backup import create_quick_snapshot
|
||||
snap_id = create_quick_snapshot(hermes_home=hermes_home)
|
||||
with open(hermes_home / "state-snapshots" / snap_id / "manifest.json") as f:
|
||||
meta = json.load(f)
|
||||
# gateway_state.json etc. don't exist in fixture
|
||||
assert "gateway_state.json" not in meta["files"]
|
||||
|
||||
def test_empty_home_returns_none(self, tmp_path):
|
||||
from hermes_cli.backup import create_quick_snapshot
|
||||
empty = tmp_path / "empty"
|
||||
empty.mkdir()
|
||||
assert create_quick_snapshot(hermes_home=empty) is None
|
||||
|
||||
def test_list_snapshots(self, hermes_home):
|
||||
from hermes_cli.backup import create_quick_snapshot, list_quick_snapshots
|
||||
id1 = create_quick_snapshot(label="first", hermes_home=hermes_home)
|
||||
id2 = create_quick_snapshot(label="second", hermes_home=hermes_home)
|
||||
|
||||
snaps = list_quick_snapshots(hermes_home=hermes_home)
|
||||
assert len(snaps) == 2
|
||||
assert snaps[0]["id"] == id2 # most recent first
|
||||
assert snaps[1]["id"] == id1
|
||||
|
||||
def test_list_limit(self, hermes_home):
|
||||
from hermes_cli.backup import create_quick_snapshot, list_quick_snapshots
|
||||
for i in range(5):
|
||||
create_quick_snapshot(label=f"s{i}", hermes_home=hermes_home)
|
||||
snaps = list_quick_snapshots(limit=3, hermes_home=hermes_home)
|
||||
assert len(snaps) == 3
|
||||
|
||||
def test_restore_config(self, hermes_home):
|
||||
from hermes_cli.backup import create_quick_snapshot, restore_quick_snapshot
|
||||
snap_id = create_quick_snapshot(hermes_home=hermes_home)
|
||||
|
||||
(hermes_home / "config.yaml").write_text("model:\n provider: anthropic\n")
|
||||
assert "anthropic" in (hermes_home / "config.yaml").read_text()
|
||||
|
||||
result = restore_quick_snapshot(snap_id, hermes_home=hermes_home)
|
||||
assert result is True
|
||||
assert "openrouter" in (hermes_home / "config.yaml").read_text()
|
||||
|
||||
def test_restore_state_db(self, hermes_home):
|
||||
from hermes_cli.backup import create_quick_snapshot, restore_quick_snapshot
|
||||
snap_id = create_quick_snapshot(hermes_home=hermes_home)
|
||||
|
||||
conn = sqlite3.connect(str(hermes_home / "state.db"))
|
||||
conn.execute("INSERT INTO sessions VALUES ('s2', 'new')")
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
restore_quick_snapshot(snap_id, hermes_home=hermes_home)
|
||||
|
||||
conn = sqlite3.connect(str(hermes_home / "state.db"))
|
||||
rows = conn.execute("SELECT * FROM sessions").fetchall()
|
||||
conn.close()
|
||||
assert len(rows) == 1
|
||||
|
||||
def test_restore_nonexistent(self, hermes_home):
|
||||
from hermes_cli.backup import restore_quick_snapshot
|
||||
assert restore_quick_snapshot("nonexistent", hermes_home=hermes_home) is False
|
||||
|
||||
def test_auto_prune(self, hermes_home):
|
||||
from hermes_cli.backup import create_quick_snapshot, list_quick_snapshots, _QUICK_DEFAULT_KEEP
|
||||
for i in range(_QUICK_DEFAULT_KEEP + 5):
|
||||
create_quick_snapshot(label=f"snap-{i:03d}", hermes_home=hermes_home)
|
||||
snaps = list_quick_snapshots(limit=100, hermes_home=hermes_home)
|
||||
assert len(snaps) <= _QUICK_DEFAULT_KEEP
|
||||
|
||||
def test_manual_prune(self, hermes_home):
|
||||
from hermes_cli.backup import create_quick_snapshot, prune_quick_snapshots, list_quick_snapshots
|
||||
for i in range(10):
|
||||
create_quick_snapshot(label=f"s{i}", hermes_home=hermes_home)
|
||||
deleted = prune_quick_snapshots(keep=3, hermes_home=hermes_home)
|
||||
assert deleted == 7
|
||||
assert len(list_quick_snapshots(hermes_home=hermes_home)) == 3
|
||||
|
|
|
|||
|
|
@ -1,6 +1,7 @@
|
|||
"""Tests for hermes claw commands."""
|
||||
|
||||
from argparse import Namespace
|
||||
import subprocess
|
||||
from types import ModuleType
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
|
|
@ -197,6 +198,11 @@ class TestClawCommand:
|
|||
class TestCmdMigrate:
|
||||
"""Test the migrate command handler."""
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def _mock_openclaw_running(self):
|
||||
with patch.object(claw_mod, "_detect_openclaw_processes", return_value=[]):
|
||||
yield
|
||||
|
||||
def test_error_when_source_missing(self, tmp_path, capsys):
|
||||
args = Namespace(
|
||||
source=str(tmp_path / "nonexistent"),
|
||||
|
|
@ -626,3 +632,120 @@ class TestPrintMigrationReport:
|
|||
claw_mod._print_migration_report(report, dry_run=False)
|
||||
captured = capsys.readouterr()
|
||||
assert "Nothing to migrate" in captured.out
|
||||
|
||||
|
||||
class TestDetectOpenclawProcesses:
|
||||
def test_returns_match_when_pgrep_finds_openclaw(self):
|
||||
with patch.object(claw_mod, "sys") as mock_sys:
|
||||
mock_sys.platform = "linux"
|
||||
with patch.object(claw_mod, "subprocess") as mock_subprocess:
|
||||
# systemd check misses, pgrep finds openclaw
|
||||
mock_subprocess.run.side_effect = [
|
||||
MagicMock(returncode=1, stdout=""), # systemctl
|
||||
MagicMock(returncode=0, stdout="1234\n"), # pgrep
|
||||
]
|
||||
mock_subprocess.TimeoutExpired = subprocess.TimeoutExpired
|
||||
result = claw_mod._detect_openclaw_processes()
|
||||
assert len(result) == 1
|
||||
assert "1234" in result[0]
|
||||
|
||||
def test_returns_empty_when_pgrep_finds_nothing(self):
|
||||
with patch.object(claw_mod, "sys") as mock_sys:
|
||||
mock_sys.platform = "darwin"
|
||||
with patch.object(claw_mod, "subprocess") as mock_subprocess:
|
||||
mock_subprocess.run.side_effect = [
|
||||
MagicMock(returncode=1, stdout=""), # systemctl (not found)
|
||||
MagicMock(returncode=1, stdout=""), # pgrep
|
||||
]
|
||||
mock_subprocess.TimeoutExpired = subprocess.TimeoutExpired
|
||||
result = claw_mod._detect_openclaw_processes()
|
||||
assert result == []
|
||||
|
||||
def test_detects_systemd_service(self):
|
||||
with patch.object(claw_mod, "sys") as mock_sys:
|
||||
mock_sys.platform = "linux"
|
||||
with patch.object(claw_mod, "subprocess") as mock_subprocess:
|
||||
mock_subprocess.run.side_effect = [
|
||||
MagicMock(returncode=0, stdout="active\n"), # systemctl
|
||||
MagicMock(returncode=1, stdout=""), # pgrep
|
||||
]
|
||||
mock_subprocess.TimeoutExpired = subprocess.TimeoutExpired
|
||||
result = claw_mod._detect_openclaw_processes()
|
||||
assert len(result) == 1
|
||||
assert "systemd" in result[0]
|
||||
|
||||
def test_returns_match_on_windows_when_openclaw_exe_running(self):
|
||||
with patch.object(claw_mod, "sys") as mock_sys:
|
||||
mock_sys.platform = "win32"
|
||||
with patch.object(claw_mod, "subprocess") as mock_subprocess:
|
||||
mock_subprocess.run.side_effect = [
|
||||
MagicMock(returncode=0, stdout="openclaw.exe 1234 Console 1 45,056 K\n"),
|
||||
]
|
||||
result = claw_mod._detect_openclaw_processes()
|
||||
assert len(result) >= 1
|
||||
assert any("openclaw.exe" in r for r in result)
|
||||
|
||||
def test_returns_match_on_windows_when_node_exe_has_openclaw_in_cmdline(self):
|
||||
with patch.object(claw_mod, "sys") as mock_sys:
|
||||
mock_sys.platform = "win32"
|
||||
with patch.object(claw_mod, "subprocess") as mock_subprocess:
|
||||
mock_subprocess.run.side_effect = [
|
||||
MagicMock(returncode=0, stdout=""), # tasklist openclaw.exe
|
||||
MagicMock(returncode=0, stdout=""), # tasklist clawd.exe
|
||||
MagicMock(returncode=0, stdout="1234\n"), # PowerShell
|
||||
]
|
||||
result = claw_mod._detect_openclaw_processes()
|
||||
assert len(result) >= 1
|
||||
assert any("node.exe" in r for r in result)
|
||||
|
||||
def test_returns_empty_on_windows_when_nothing_found(self):
|
||||
with patch.object(claw_mod, "sys") as mock_sys:
|
||||
mock_sys.platform = "win32"
|
||||
with patch.object(claw_mod, "subprocess") as mock_subprocess:
|
||||
mock_subprocess.run.side_effect = [
|
||||
MagicMock(returncode=0, stdout=""),
|
||||
MagicMock(returncode=0, stdout=""),
|
||||
MagicMock(returncode=0, stdout=""),
|
||||
]
|
||||
result = claw_mod._detect_openclaw_processes()
|
||||
assert result == []
|
||||
|
||||
|
||||
class TestWarnIfOpenclawRunning:
|
||||
def test_noop_when_not_running(self, capsys):
|
||||
with patch.object(claw_mod, "_detect_openclaw_processes", return_value=[]):
|
||||
claw_mod._warn_if_openclaw_running(auto_yes=False)
|
||||
captured = capsys.readouterr()
|
||||
assert captured.out == ""
|
||||
|
||||
def test_warns_and_exits_when_running_and_user_declines(self, capsys):
|
||||
with patch.object(claw_mod, "_detect_openclaw_processes", return_value=["openclaw process(es) (PIDs: 1234)"]):
|
||||
with patch.object(claw_mod, "prompt_yes_no", return_value=False):
|
||||
with patch.object(claw_mod.sys.stdin, "isatty", return_value=True):
|
||||
with pytest.raises(SystemExit) as exc_info:
|
||||
claw_mod._warn_if_openclaw_running(auto_yes=False)
|
||||
assert exc_info.value.code == 0
|
||||
captured = capsys.readouterr()
|
||||
assert "OpenClaw appears to be running" in captured.out
|
||||
|
||||
def test_warns_and_continues_when_running_and_user_accepts(self, capsys):
|
||||
with patch.object(claw_mod, "_detect_openclaw_processes", return_value=["openclaw process(es) (PIDs: 1234)"]):
|
||||
with patch.object(claw_mod, "prompt_yes_no", return_value=True):
|
||||
with patch.object(claw_mod.sys.stdin, "isatty", return_value=True):
|
||||
claw_mod._warn_if_openclaw_running(auto_yes=False)
|
||||
captured = capsys.readouterr()
|
||||
assert "OpenClaw appears to be running" in captured.out
|
||||
|
||||
def test_warns_and_continues_in_auto_yes_mode(self, capsys):
|
||||
with patch.object(claw_mod, "_detect_openclaw_processes", return_value=["openclaw process(es) (PIDs: 1234)"]):
|
||||
claw_mod._warn_if_openclaw_running(auto_yes=True)
|
||||
captured = capsys.readouterr()
|
||||
assert "OpenClaw appears to be running" in captured.out
|
||||
|
||||
def test_warns_and_continues_in_non_interactive_session(self, capsys):
|
||||
with patch.object(claw_mod, "_detect_openclaw_processes", return_value=["openclaw process(es) (PIDs: 1234)"]):
|
||||
with patch.object(claw_mod.sys.stdin, "isatty", return_value=False):
|
||||
claw_mod._warn_if_openclaw_running(auto_yes=False)
|
||||
captured = capsys.readouterr()
|
||||
assert "OpenClaw appears to be running" in captured.out
|
||||
assert "Non-interactive session" in captured.out
|
||||
|
|
|
|||
|
|
@ -10,6 +10,7 @@ from hermes_cli.config import (
|
|||
DEFAULT_CONFIG,
|
||||
get_hermes_home,
|
||||
ensure_hermes_home,
|
||||
get_compatible_custom_providers,
|
||||
load_config,
|
||||
load_env,
|
||||
migrate_config,
|
||||
|
|
@ -424,6 +425,146 @@ class TestAnthropicTokenMigration:
|
|||
assert load_env().get("ANTHROPIC_TOKEN") == "current-token"
|
||||
|
||||
|
||||
class TestCustomProviderCompatibility:
|
||||
"""Custom provider compatibility across legacy and v12+ config schemas."""
|
||||
|
||||
def test_v11_upgrade_moves_custom_providers_into_providers(self, tmp_path):
|
||||
config_path = tmp_path / "config.yaml"
|
||||
config_path.write_text(
|
||||
yaml.safe_dump(
|
||||
{
|
||||
"_config_version": 11,
|
||||
"model": {
|
||||
"default": "openai/gpt-5.4",
|
||||
"provider": "openrouter",
|
||||
},
|
||||
"custom_providers": [
|
||||
{
|
||||
"name": "OpenAI Direct",
|
||||
"base_url": "https://api.openai.com/v1",
|
||||
"api_key": "test-key",
|
||||
"api_mode": "codex_responses",
|
||||
"model": "gpt-5-mini",
|
||||
}
|
||||
],
|
||||
"fallback_providers": [
|
||||
{"provider": "openai-direct", "model": "gpt-5-mini"}
|
||||
],
|
||||
}
|
||||
),
|
||||
encoding="utf-8",
|
||||
)
|
||||
|
||||
with patch.dict(os.environ, {"HERMES_HOME": str(tmp_path)}):
|
||||
migrate_config(interactive=False, quiet=True)
|
||||
raw = yaml.safe_load(config_path.read_text(encoding="utf-8"))
|
||||
|
||||
assert raw["_config_version"] == 17
|
||||
assert raw["providers"]["openai-direct"] == {
|
||||
"api": "https://api.openai.com/v1",
|
||||
"api_key": "test-key",
|
||||
"default_model": "gpt-5-mini",
|
||||
"name": "OpenAI Direct",
|
||||
"transport": "codex_responses",
|
||||
}
|
||||
# custom_providers removed by migration — runtime reads via compat layer
|
||||
assert "custom_providers" not in raw
|
||||
|
||||
def test_providers_dict_resolves_at_runtime(self, tmp_path):
|
||||
"""After migration deleted custom_providers, get_compatible_custom_providers
|
||||
still finds entries from the providers dict."""
|
||||
config_path = tmp_path / "config.yaml"
|
||||
config_path.write_text(
|
||||
yaml.safe_dump(
|
||||
{
|
||||
"_config_version": 17,
|
||||
"providers": {
|
||||
"openai-direct": {
|
||||
"api": "https://api.openai.com/v1",
|
||||
"api_key": "test-key",
|
||||
"default_model": "gpt-5-mini",
|
||||
"name": "OpenAI Direct",
|
||||
"transport": "codex_responses",
|
||||
}
|
||||
},
|
||||
}
|
||||
),
|
||||
encoding="utf-8",
|
||||
)
|
||||
|
||||
with patch.dict(os.environ, {"HERMES_HOME": str(tmp_path)}):
|
||||
compatible = get_compatible_custom_providers()
|
||||
|
||||
assert len(compatible) == 1
|
||||
assert compatible[0]["name"] == "OpenAI Direct"
|
||||
assert compatible[0]["base_url"] == "https://api.openai.com/v1"
|
||||
assert compatible[0]["provider_key"] == "openai-direct"
|
||||
assert compatible[0]["api_mode"] == "codex_responses"
|
||||
|
||||
def test_compatible_custom_providers_prefers_api_then_url_then_base_url(self, tmp_path):
|
||||
config_path = tmp_path / "config.yaml"
|
||||
config_path.write_text(
|
||||
yaml.safe_dump(
|
||||
{
|
||||
"_config_version": 17,
|
||||
"providers": {
|
||||
"my-provider": {
|
||||
"name": "My Provider",
|
||||
"api": "https://api.example.com/v1",
|
||||
"url": "https://url.example.com/v1",
|
||||
"base_url": "https://base.example.com/v1",
|
||||
}
|
||||
},
|
||||
}
|
||||
),
|
||||
encoding="utf-8",
|
||||
)
|
||||
|
||||
with patch.dict(os.environ, {"HERMES_HOME": str(tmp_path)}):
|
||||
compatible = get_compatible_custom_providers()
|
||||
|
||||
assert compatible == [
|
||||
{
|
||||
"name": "My Provider",
|
||||
"base_url": "https://api.example.com/v1",
|
||||
"provider_key": "my-provider",
|
||||
}
|
||||
]
|
||||
|
||||
def test_dedup_across_legacy_and_providers(self, tmp_path):
|
||||
"""Same name+url in both schemas should not produce duplicates."""
|
||||
config_path = tmp_path / "config.yaml"
|
||||
config_path.write_text(
|
||||
yaml.safe_dump(
|
||||
{
|
||||
"_config_version": 17,
|
||||
"custom_providers": [
|
||||
{
|
||||
"name": "OpenAI Direct",
|
||||
"base_url": "https://api.openai.com/v1",
|
||||
"api_key": "legacy-key",
|
||||
}
|
||||
],
|
||||
"providers": {
|
||||
"openai-direct": {
|
||||
"api": "https://api.openai.com/v1",
|
||||
"api_key": "new-key",
|
||||
"name": "OpenAI Direct",
|
||||
}
|
||||
},
|
||||
}
|
||||
),
|
||||
encoding="utf-8",
|
||||
)
|
||||
|
||||
with patch.dict(os.environ, {"HERMES_HOME": str(tmp_path)}):
|
||||
compatible = get_compatible_custom_providers()
|
||||
|
||||
assert len(compatible) == 1
|
||||
# Legacy entry wins (read first)
|
||||
assert compatible[0]["api_key"] == "legacy-key"
|
||||
|
||||
|
||||
class TestInterimAssistantMessageConfig:
|
||||
"""Test the explicit gateway interim-message config gate."""
|
||||
|
||||
|
|
@ -441,6 +582,6 @@ class TestInterimAssistantMessageConfig:
|
|||
migrate_config(interactive=False, quiet=True)
|
||||
raw = yaml.safe_load(config_path.read_text(encoding="utf-8"))
|
||||
|
||||
assert raw["_config_version"] == 16
|
||||
assert raw["_config_version"] == 17
|
||||
assert raw["display"]["tool_progress"] == "off"
|
||||
assert raw["display"]["interim_assistant_messages"] is True
|
||||
|
|
|
|||
|
|
@ -12,49 +12,10 @@ from unittest.mock import MagicMock, patch
|
|||
import pytest
|
||||
|
||||
from hermes_cli.config import (
|
||||
_is_inside_container,
|
||||
get_container_exec_info,
|
||||
)
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# _is_inside_container
|
||||
# =============================================================================
|
||||
|
||||
|
||||
def test_is_inside_container_dockerenv():
|
||||
"""Detects /.dockerenv marker file."""
|
||||
with patch("os.path.exists") as mock_exists:
|
||||
mock_exists.side_effect = lambda p: p == "/.dockerenv"
|
||||
assert _is_inside_container() is True
|
||||
|
||||
|
||||
def test_is_inside_container_containerenv():
|
||||
"""Detects Podman's /run/.containerenv marker."""
|
||||
with patch("os.path.exists") as mock_exists:
|
||||
mock_exists.side_effect = lambda p: p == "/run/.containerenv"
|
||||
assert _is_inside_container() is True
|
||||
|
||||
|
||||
def test_is_inside_container_cgroup_docker():
|
||||
"""Detects 'docker' in /proc/1/cgroup."""
|
||||
with patch("os.path.exists", return_value=False), \
|
||||
patch("builtins.open", create=True) as mock_open:
|
||||
mock_open.return_value.__enter__ = lambda s: s
|
||||
mock_open.return_value.__exit__ = MagicMock(return_value=False)
|
||||
mock_open.return_value.read = MagicMock(
|
||||
return_value="12:memory:/docker/abc123\n"
|
||||
)
|
||||
assert _is_inside_container() is True
|
||||
|
||||
|
||||
def test_is_inside_container_false_on_host():
|
||||
"""Returns False when none of the container indicators are present."""
|
||||
with patch("os.path.exists", return_value=False), \
|
||||
patch("builtins.open", side_effect=OSError("no such file")):
|
||||
assert _is_inside_container() is False
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# get_container_exec_info
|
||||
# =============================================================================
|
||||
|
|
@ -81,7 +42,7 @@ def container_env(tmp_path, monkeypatch):
|
|||
|
||||
def test_get_container_exec_info_returns_metadata(container_env):
|
||||
"""Reads .container-mode and returns all fields including exec_user."""
|
||||
with patch("hermes_cli.config._is_inside_container", return_value=False):
|
||||
with patch("hermes_constants.is_container", return_value=False):
|
||||
info = get_container_exec_info()
|
||||
|
||||
assert info is not None
|
||||
|
|
@ -93,7 +54,7 @@ def test_get_container_exec_info_returns_metadata(container_env):
|
|||
|
||||
def test_get_container_exec_info_none_inside_container(container_env):
|
||||
"""Returns None when we're already inside a container."""
|
||||
with patch("hermes_cli.config._is_inside_container", return_value=True):
|
||||
with patch("hermes_constants.is_container", return_value=True):
|
||||
info = get_container_exec_info()
|
||||
|
||||
assert info is None
|
||||
|
|
@ -106,7 +67,7 @@ def test_get_container_exec_info_none_without_file(tmp_path, monkeypatch):
|
|||
monkeypatch.setenv("HERMES_HOME", str(hermes_home))
|
||||
monkeypatch.delenv("HERMES_DEV", raising=False)
|
||||
|
||||
with patch("hermes_cli.config._is_inside_container", return_value=False):
|
||||
with patch("hermes_constants.is_container", return_value=False):
|
||||
info = get_container_exec_info()
|
||||
|
||||
assert info is None
|
||||
|
|
@ -116,7 +77,7 @@ def test_get_container_exec_info_skipped_when_hermes_dev(container_env, monkeypa
|
|||
"""Returns None when HERMES_DEV=1 is set (dev mode bypass)."""
|
||||
monkeypatch.setenv("HERMES_DEV", "1")
|
||||
|
||||
with patch("hermes_cli.config._is_inside_container", return_value=False):
|
||||
with patch("hermes_constants.is_container", return_value=False):
|
||||
info = get_container_exec_info()
|
||||
|
||||
assert info is None
|
||||
|
|
@ -126,7 +87,7 @@ def test_get_container_exec_info_not_skipped_when_hermes_dev_zero(container_env,
|
|||
"""HERMES_DEV=0 does NOT trigger bypass — only '1' does."""
|
||||
monkeypatch.setenv("HERMES_DEV", "0")
|
||||
|
||||
with patch("hermes_cli.config._is_inside_container", return_value=False):
|
||||
with patch("hermes_constants.is_container", return_value=False):
|
||||
info = get_container_exec_info()
|
||||
|
||||
assert info is not None
|
||||
|
|
@ -143,7 +104,7 @@ def test_get_container_exec_info_defaults():
|
|||
"# minimal file with no keys\n"
|
||||
)
|
||||
|
||||
with patch("hermes_cli.config._is_inside_container", return_value=False), \
|
||||
with patch("hermes_constants.is_container", return_value=False), \
|
||||
patch("hermes_cli.config.get_hermes_home", return_value=hermes_home), \
|
||||
patch.dict(os.environ, {}, clear=False):
|
||||
os.environ.pop("HERMES_DEV", None)
|
||||
|
|
@ -165,7 +126,7 @@ def test_get_container_exec_info_docker_backend(container_env):
|
|||
"hermes_bin=/opt/hermes/bin/hermes\n"
|
||||
)
|
||||
|
||||
with patch("hermes_cli.config._is_inside_container", return_value=False):
|
||||
with patch("hermes_constants.is_container", return_value=False):
|
||||
info = get_container_exec_info()
|
||||
|
||||
assert info["backend"] == "docker"
|
||||
|
|
@ -176,7 +137,7 @@ def test_get_container_exec_info_docker_backend(container_env):
|
|||
|
||||
def test_get_container_exec_info_crashes_on_permission_error(container_env):
|
||||
"""PermissionError propagates instead of being silently swallowed."""
|
||||
with patch("hermes_cli.config._is_inside_container", return_value=False), \
|
||||
with patch("hermes_constants.is_container", return_value=False), \
|
||||
patch("builtins.open", side_effect=PermissionError("permission denied")):
|
||||
with pytest.raises(PermissionError):
|
||||
get_container_exec_info()
|
||||
|
|
|
|||
461
tests/hermes_cli/test_debug.py
Normal file
461
tests/hermes_cli/test_debug.py
Normal file
|
|
@ -0,0 +1,461 @@
|
|||
"""Tests for ``hermes debug`` CLI command and debug utilities."""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import urllib.error
|
||||
from pathlib import Path
|
||||
from unittest.mock import MagicMock, patch, call
|
||||
|
||||
import pytest
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Fixtures
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@pytest.fixture
|
||||
def hermes_home(tmp_path, monkeypatch):
|
||||
"""Set up an isolated HERMES_HOME with minimal logs."""
|
||||
home = tmp_path / ".hermes"
|
||||
home.mkdir()
|
||||
monkeypatch.setenv("HERMES_HOME", str(home))
|
||||
|
||||
# Create log files
|
||||
logs_dir = home / "logs"
|
||||
logs_dir.mkdir()
|
||||
(logs_dir / "agent.log").write_text(
|
||||
"2026-04-12 17:00:00 INFO agent: session started\n"
|
||||
"2026-04-12 17:00:01 INFO tools.terminal: running ls\n"
|
||||
"2026-04-12 17:00:02 WARNING agent: high token usage\n"
|
||||
)
|
||||
(logs_dir / "errors.log").write_text(
|
||||
"2026-04-12 17:00:05 ERROR gateway.run: connection lost\n"
|
||||
)
|
||||
(logs_dir / "gateway.log").write_text(
|
||||
"2026-04-12 17:00:10 INFO gateway.run: started\n"
|
||||
)
|
||||
|
||||
return home
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Unit tests for upload helpers
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
class TestUploadPasteRs:
|
||||
"""Test paste.rs upload path."""
|
||||
|
||||
def test_upload_paste_rs_success(self):
|
||||
from hermes_cli.debug import _upload_paste_rs
|
||||
|
||||
mock_resp = MagicMock()
|
||||
mock_resp.read.return_value = b"https://paste.rs/abc123\n"
|
||||
mock_resp.__enter__ = lambda s: s
|
||||
mock_resp.__exit__ = MagicMock(return_value=False)
|
||||
|
||||
with patch("hermes_cli.debug.urllib.request.urlopen", return_value=mock_resp):
|
||||
url = _upload_paste_rs("hello world")
|
||||
|
||||
assert url == "https://paste.rs/abc123"
|
||||
|
||||
def test_upload_paste_rs_bad_response(self):
|
||||
from hermes_cli.debug import _upload_paste_rs
|
||||
|
||||
mock_resp = MagicMock()
|
||||
mock_resp.read.return_value = b"<html>error</html>"
|
||||
mock_resp.__enter__ = lambda s: s
|
||||
mock_resp.__exit__ = MagicMock(return_value=False)
|
||||
|
||||
with patch("hermes_cli.debug.urllib.request.urlopen", return_value=mock_resp):
|
||||
with pytest.raises(ValueError, match="Unexpected response"):
|
||||
_upload_paste_rs("test")
|
||||
|
||||
def test_upload_paste_rs_network_error(self):
|
||||
from hermes_cli.debug import _upload_paste_rs
|
||||
|
||||
with patch(
|
||||
"hermes_cli.debug.urllib.request.urlopen",
|
||||
side_effect=urllib.error.URLError("connection refused"),
|
||||
):
|
||||
with pytest.raises(urllib.error.URLError):
|
||||
_upload_paste_rs("test")
|
||||
|
||||
|
||||
class TestUploadDpasteCom:
|
||||
"""Test dpaste.com fallback upload path."""
|
||||
|
||||
def test_upload_dpaste_com_success(self):
|
||||
from hermes_cli.debug import _upload_dpaste_com
|
||||
|
||||
mock_resp = MagicMock()
|
||||
mock_resp.read.return_value = b"https://dpaste.com/ABCDEFG\n"
|
||||
mock_resp.__enter__ = lambda s: s
|
||||
mock_resp.__exit__ = MagicMock(return_value=False)
|
||||
|
||||
with patch("hermes_cli.debug.urllib.request.urlopen", return_value=mock_resp):
|
||||
url = _upload_dpaste_com("hello world", expiry_days=7)
|
||||
|
||||
assert url == "https://dpaste.com/ABCDEFG"
|
||||
|
||||
|
||||
class TestUploadToPastebin:
|
||||
"""Test the combined upload with fallback."""
|
||||
|
||||
def test_tries_paste_rs_first(self):
|
||||
from hermes_cli.debug import upload_to_pastebin
|
||||
|
||||
with patch("hermes_cli.debug._upload_paste_rs",
|
||||
return_value="https://paste.rs/test") as prs:
|
||||
url = upload_to_pastebin("content")
|
||||
|
||||
assert url == "https://paste.rs/test"
|
||||
prs.assert_called_once()
|
||||
|
||||
def test_falls_back_to_dpaste_com(self):
|
||||
from hermes_cli.debug import upload_to_pastebin
|
||||
|
||||
with patch("hermes_cli.debug._upload_paste_rs",
|
||||
side_effect=Exception("down")), \
|
||||
patch("hermes_cli.debug._upload_dpaste_com",
|
||||
return_value="https://dpaste.com/TEST") as dp:
|
||||
url = upload_to_pastebin("content")
|
||||
|
||||
assert url == "https://dpaste.com/TEST"
|
||||
dp.assert_called_once()
|
||||
|
||||
def test_raises_when_both_fail(self):
|
||||
from hermes_cli.debug import upload_to_pastebin
|
||||
|
||||
with patch("hermes_cli.debug._upload_paste_rs",
|
||||
side_effect=Exception("err1")), \
|
||||
patch("hermes_cli.debug._upload_dpaste_com",
|
||||
side_effect=Exception("err2")):
|
||||
with pytest.raises(RuntimeError, match="Failed to upload"):
|
||||
upload_to_pastebin("content")
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Log reading
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
class TestReadFullLog:
|
||||
"""Test _read_full_log for standalone log uploads."""
|
||||
|
||||
def test_reads_small_file(self, hermes_home):
|
||||
from hermes_cli.debug import _read_full_log
|
||||
|
||||
content = _read_full_log("agent")
|
||||
assert content is not None
|
||||
assert "session started" in content
|
||||
|
||||
def test_returns_none_for_missing(self, tmp_path, monkeypatch):
|
||||
home = tmp_path / ".hermes"
|
||||
home.mkdir()
|
||||
monkeypatch.setenv("HERMES_HOME", str(home))
|
||||
|
||||
from hermes_cli.debug import _read_full_log
|
||||
assert _read_full_log("agent") is None
|
||||
|
||||
def test_returns_none_for_empty(self, hermes_home):
|
||||
# Truncate agent.log to empty
|
||||
(hermes_home / "logs" / "agent.log").write_text("")
|
||||
|
||||
from hermes_cli.debug import _read_full_log
|
||||
assert _read_full_log("agent") is None
|
||||
|
||||
def test_truncates_large_file(self, hermes_home):
|
||||
"""Files larger than max_bytes get tail-truncated."""
|
||||
from hermes_cli.debug import _read_full_log
|
||||
|
||||
# Write a file larger than 1KB
|
||||
big_content = "x" * 100 + "\n"
|
||||
(hermes_home / "logs" / "agent.log").write_text(big_content * 200)
|
||||
|
||||
content = _read_full_log("agent", max_bytes=1024)
|
||||
assert content is not None
|
||||
assert "truncated" in content
|
||||
|
||||
def test_unknown_log_returns_none(self, hermes_home):
|
||||
from hermes_cli.debug import _read_full_log
|
||||
assert _read_full_log("nonexistent") is None
|
||||
|
||||
def test_falls_back_to_rotated_file(self, hermes_home):
|
||||
"""When gateway.log doesn't exist, falls back to gateway.log.1."""
|
||||
from hermes_cli.debug import _read_full_log
|
||||
|
||||
logs_dir = hermes_home / "logs"
|
||||
# Remove the primary (if any) and create a .1 rotation
|
||||
(logs_dir / "gateway.log").unlink(missing_ok=True)
|
||||
(logs_dir / "gateway.log.1").write_text(
|
||||
"2026-04-12 10:00:00 INFO gateway.run: rotated content\n"
|
||||
)
|
||||
|
||||
content = _read_full_log("gateway")
|
||||
assert content is not None
|
||||
assert "rotated content" in content
|
||||
|
||||
def test_prefers_primary_over_rotated(self, hermes_home):
|
||||
"""Primary log is used when it exists, even if .1 also exists."""
|
||||
from hermes_cli.debug import _read_full_log
|
||||
|
||||
logs_dir = hermes_home / "logs"
|
||||
(logs_dir / "gateway.log").write_text("primary content\n")
|
||||
(logs_dir / "gateway.log.1").write_text("rotated content\n")
|
||||
|
||||
content = _read_full_log("gateway")
|
||||
assert "primary content" in content
|
||||
assert "rotated" not in content
|
||||
|
||||
def test_falls_back_when_primary_empty(self, hermes_home):
|
||||
"""Empty primary log falls back to .1 rotation."""
|
||||
from hermes_cli.debug import _read_full_log
|
||||
|
||||
logs_dir = hermes_home / "logs"
|
||||
(logs_dir / "agent.log").write_text("")
|
||||
(logs_dir / "agent.log.1").write_text("rotated agent data\n")
|
||||
|
||||
content = _read_full_log("agent")
|
||||
assert content is not None
|
||||
assert "rotated agent data" in content
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Debug report collection
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
class TestCollectDebugReport:
|
||||
"""Test the debug report builder."""
|
||||
|
||||
def test_report_includes_dump_output(self, hermes_home):
|
||||
from hermes_cli.debug import collect_debug_report
|
||||
|
||||
with patch("hermes_cli.dump.run_dump") as mock_dump:
|
||||
mock_dump.side_effect = lambda args: print(
|
||||
"--- hermes dump ---\nversion: 0.8.0\n--- end dump ---"
|
||||
)
|
||||
report = collect_debug_report(log_lines=50)
|
||||
|
||||
assert "--- hermes dump ---" in report
|
||||
assert "version: 0.8.0" in report
|
||||
|
||||
def test_report_includes_agent_log(self, hermes_home):
|
||||
from hermes_cli.debug import collect_debug_report
|
||||
|
||||
with patch("hermes_cli.dump.run_dump"):
|
||||
report = collect_debug_report(log_lines=50)
|
||||
|
||||
assert "--- agent.log" in report
|
||||
assert "session started" in report
|
||||
|
||||
def test_report_includes_errors_log(self, hermes_home):
|
||||
from hermes_cli.debug import collect_debug_report
|
||||
|
||||
with patch("hermes_cli.dump.run_dump"):
|
||||
report = collect_debug_report(log_lines=50)
|
||||
|
||||
assert "--- errors.log" in report
|
||||
assert "connection lost" in report
|
||||
|
||||
def test_report_includes_gateway_log(self, hermes_home):
|
||||
from hermes_cli.debug import collect_debug_report
|
||||
|
||||
with patch("hermes_cli.dump.run_dump"):
|
||||
report = collect_debug_report(log_lines=50)
|
||||
|
||||
assert "--- gateway.log" in report
|
||||
|
||||
def test_missing_logs_handled(self, tmp_path, monkeypatch):
|
||||
home = tmp_path / ".hermes"
|
||||
home.mkdir()
|
||||
monkeypatch.setenv("HERMES_HOME", str(home))
|
||||
|
||||
from hermes_cli.debug import collect_debug_report
|
||||
|
||||
with patch("hermes_cli.dump.run_dump"):
|
||||
report = collect_debug_report(log_lines=50)
|
||||
|
||||
assert "(file not found)" in report
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# CLI entry point — run_debug_share
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
class TestRunDebugShare:
|
||||
"""Test the run_debug_share CLI handler."""
|
||||
|
||||
def test_local_flag_prints_full_logs(self, hermes_home, capsys):
|
||||
"""--local prints the report plus full log contents."""
|
||||
from hermes_cli.debug import run_debug_share
|
||||
|
||||
args = MagicMock()
|
||||
args.lines = 50
|
||||
args.expire = 7
|
||||
args.local = True
|
||||
|
||||
with patch("hermes_cli.dump.run_dump"):
|
||||
run_debug_share(args)
|
||||
|
||||
out = capsys.readouterr().out
|
||||
assert "--- agent.log" in out
|
||||
assert "FULL agent.log" in out
|
||||
assert "FULL gateway.log" in out
|
||||
|
||||
def test_share_uploads_three_pastes(self, hermes_home, capsys):
|
||||
"""Successful share uploads report + agent.log + gateway.log."""
|
||||
from hermes_cli.debug import run_debug_share
|
||||
|
||||
args = MagicMock()
|
||||
args.lines = 50
|
||||
args.expire = 7
|
||||
args.local = False
|
||||
|
||||
call_count = [0]
|
||||
uploaded_content = []
|
||||
def _mock_upload(content, expiry_days=7):
|
||||
call_count[0] += 1
|
||||
uploaded_content.append(content)
|
||||
return f"https://paste.rs/paste{call_count[0]}"
|
||||
|
||||
with patch("hermes_cli.dump.run_dump") as mock_dump, \
|
||||
patch("hermes_cli.debug.upload_to_pastebin",
|
||||
side_effect=_mock_upload):
|
||||
mock_dump.side_effect = lambda a: print("--- hermes dump ---\nversion: test\n--- end dump ---")
|
||||
run_debug_share(args)
|
||||
|
||||
out = capsys.readouterr().out
|
||||
# Should have 3 uploads: report, agent.log, gateway.log
|
||||
assert call_count[0] == 3
|
||||
assert "paste.rs/paste1" in out # Report
|
||||
assert "paste.rs/paste2" in out # agent.log
|
||||
assert "paste.rs/paste3" in out # gateway.log
|
||||
assert "Report" in out
|
||||
assert "agent.log" in out
|
||||
assert "gateway.log" in out
|
||||
|
||||
# Each log paste should start with the dump header
|
||||
agent_paste = uploaded_content[1]
|
||||
assert "--- hermes dump ---" in agent_paste
|
||||
assert "--- full agent.log ---" in agent_paste
|
||||
gateway_paste = uploaded_content[2]
|
||||
assert "--- hermes dump ---" in gateway_paste
|
||||
assert "--- full gateway.log ---" in gateway_paste
|
||||
|
||||
def test_share_skips_missing_logs(self, tmp_path, monkeypatch, capsys):
|
||||
"""Only uploads logs that exist."""
|
||||
home = tmp_path / ".hermes"
|
||||
home.mkdir()
|
||||
monkeypatch.setenv("HERMES_HOME", str(home))
|
||||
|
||||
from hermes_cli.debug import run_debug_share
|
||||
|
||||
args = MagicMock()
|
||||
args.lines = 50
|
||||
args.expire = 7
|
||||
args.local = False
|
||||
|
||||
call_count = [0]
|
||||
def _mock_upload(content, expiry_days=7):
|
||||
call_count[0] += 1
|
||||
return f"https://paste.rs/paste{call_count[0]}"
|
||||
|
||||
with patch("hermes_cli.dump.run_dump"), \
|
||||
patch("hermes_cli.debug.upload_to_pastebin",
|
||||
side_effect=_mock_upload):
|
||||
run_debug_share(args)
|
||||
|
||||
out = capsys.readouterr().out
|
||||
# Only the report should be uploaded (no log files exist)
|
||||
assert call_count[0] == 1
|
||||
assert "Report" in out
|
||||
|
||||
def test_share_continues_on_log_upload_failure(self, hermes_home, capsys):
|
||||
"""Log upload failure doesn't stop the report from being shared."""
|
||||
from hermes_cli.debug import run_debug_share
|
||||
|
||||
args = MagicMock()
|
||||
args.lines = 50
|
||||
args.expire = 7
|
||||
args.local = False
|
||||
|
||||
call_count = [0]
|
||||
def _mock_upload(content, expiry_days=7):
|
||||
call_count[0] += 1
|
||||
if call_count[0] > 1:
|
||||
raise RuntimeError("upload failed")
|
||||
return "https://paste.rs/report"
|
||||
|
||||
with patch("hermes_cli.dump.run_dump"), \
|
||||
patch("hermes_cli.debug.upload_to_pastebin",
|
||||
side_effect=_mock_upload):
|
||||
run_debug_share(args)
|
||||
|
||||
out = capsys.readouterr().out
|
||||
assert "Report" in out
|
||||
assert "paste.rs/report" in out
|
||||
assert "failed to upload" in out
|
||||
|
||||
def test_share_exits_on_report_upload_failure(self, hermes_home, capsys):
|
||||
"""If the main report fails to upload, exit with code 1."""
|
||||
from hermes_cli.debug import run_debug_share
|
||||
|
||||
args = MagicMock()
|
||||
args.lines = 50
|
||||
args.expire = 7
|
||||
args.local = False
|
||||
|
||||
with patch("hermes_cli.dump.run_dump"), \
|
||||
patch("hermes_cli.debug.upload_to_pastebin",
|
||||
side_effect=RuntimeError("all failed")):
|
||||
with pytest.raises(SystemExit) as exc_info:
|
||||
run_debug_share(args)
|
||||
|
||||
assert exc_info.value.code == 1
|
||||
out = capsys.readouterr()
|
||||
assert "all failed" in out.err
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# run_debug router
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
class TestRunDebug:
|
||||
def test_no_subcommand_shows_usage(self, capsys):
|
||||
from hermes_cli.debug import run_debug
|
||||
|
||||
args = MagicMock()
|
||||
args.debug_command = None
|
||||
|
||||
run_debug(args)
|
||||
|
||||
out = capsys.readouterr().out
|
||||
assert "hermes debug share" in out
|
||||
|
||||
def test_share_subcommand_routes(self, hermes_home):
|
||||
from hermes_cli.debug import run_debug
|
||||
|
||||
args = MagicMock()
|
||||
args.debug_command = "share"
|
||||
args.lines = 200
|
||||
args.expire = 7
|
||||
args.local = True
|
||||
|
||||
with patch("hermes_cli.dump.run_dump"):
|
||||
run_debug(args)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Argparse integration
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
class TestArgparseIntegration:
|
||||
def test_module_imports_clean(self):
|
||||
from hermes_cli.debug import run_debug, run_debug_share
|
||||
assert callable(run_debug)
|
||||
assert callable(run_debug_share)
|
||||
|
||||
def test_cmd_debug_dispatches(self):
|
||||
from hermes_cli.main import cmd_debug
|
||||
|
||||
args = MagicMock()
|
||||
args.debug_command = None
|
||||
cmd_debug(args)
|
||||
91
tests/hermes_cli/test_env_sanitize_on_load.py
Normal file
91
tests/hermes_cli/test_env_sanitize_on_load.py
Normal file
|
|
@ -0,0 +1,91 @@
|
|||
"""Tests for .env sanitization during load to prevent token duplication (#8908)."""
|
||||
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
from unittest.mock import patch
|
||||
|
||||
|
||||
def test_load_env_sanitizes_concatenated_lines():
|
||||
"""Verify load_env() splits concatenated KEY=VALUE pairs.
|
||||
|
||||
Reproduces the scenario from #8908 where a corrupted .env file
|
||||
contained multiple tokens on a single line, causing the bot token
|
||||
to be duplicated 8 times.
|
||||
"""
|
||||
from hermes_cli.config import load_env
|
||||
|
||||
token = "8356550917:AAGGEkzg06Hrc3Hjb3Sa1jkGVDOdU_lYy2Q"
|
||||
# Simulate concatenated line: TOKEN=xxx followed immediately by another key
|
||||
corrupted = f"TELEGRAM_BOT_TOKEN={token}ANTHROPIC_API_KEY=sk-ant-test123\n"
|
||||
|
||||
with tempfile.NamedTemporaryFile(
|
||||
mode="w", suffix=".env", delete=False, encoding="utf-8"
|
||||
) as f:
|
||||
f.write(corrupted)
|
||||
env_path = Path(f.name)
|
||||
|
||||
try:
|
||||
with patch("hermes_cli.config.get_env_path", return_value=env_path):
|
||||
result = load_env()
|
||||
assert result.get("TELEGRAM_BOT_TOKEN") == token, (
|
||||
f"Token should be exactly '{token}', got '{result.get('TELEGRAM_BOT_TOKEN')}'"
|
||||
)
|
||||
assert result.get("ANTHROPIC_API_KEY") == "sk-ant-test123"
|
||||
finally:
|
||||
env_path.unlink(missing_ok=True)
|
||||
|
||||
|
||||
def test_load_env_normal_file_unchanged():
|
||||
"""A well-formed .env file should be parsed identically."""
|
||||
from hermes_cli.config import load_env
|
||||
|
||||
content = (
|
||||
"TELEGRAM_BOT_TOKEN=mytoken123\n"
|
||||
"ANTHROPIC_API_KEY=sk-ant-key\n"
|
||||
"# comment\n"
|
||||
"\n"
|
||||
"OPENAI_API_KEY=sk-openai\n"
|
||||
)
|
||||
|
||||
with tempfile.NamedTemporaryFile(
|
||||
mode="w", suffix=".env", delete=False, encoding="utf-8"
|
||||
) as f:
|
||||
f.write(content)
|
||||
env_path = Path(f.name)
|
||||
|
||||
try:
|
||||
with patch("hermes_cli.config.get_env_path", return_value=env_path):
|
||||
result = load_env()
|
||||
assert result["TELEGRAM_BOT_TOKEN"] == "mytoken123"
|
||||
assert result["ANTHROPIC_API_KEY"] == "sk-ant-key"
|
||||
assert result["OPENAI_API_KEY"] == "sk-openai"
|
||||
finally:
|
||||
env_path.unlink(missing_ok=True)
|
||||
|
||||
|
||||
def test_env_loader_sanitizes_before_dotenv():
|
||||
"""Verify env_loader._sanitize_env_file_if_needed fixes corrupted files."""
|
||||
from hermes_cli.env_loader import _sanitize_env_file_if_needed
|
||||
|
||||
token = "8356550917:AAGGEkzg06Hrc3Hjb3Sa1jkGVDOdU_lYy2Q"
|
||||
corrupted = f"TELEGRAM_BOT_TOKEN={token}ANTHROPIC_API_KEY=sk-ant-test\n"
|
||||
|
||||
with tempfile.NamedTemporaryFile(
|
||||
mode="w", suffix=".env", delete=False, encoding="utf-8"
|
||||
) as f:
|
||||
f.write(corrupted)
|
||||
env_path = Path(f.name)
|
||||
|
||||
try:
|
||||
_sanitize_env_file_if_needed(env_path)
|
||||
with open(env_path, encoding="utf-8") as f:
|
||||
lines = f.readlines()
|
||||
# Should be split into two separate lines
|
||||
assert len(lines) == 2, f"Expected 2 lines, got {len(lines)}: {lines}"
|
||||
assert lines[0].startswith("TELEGRAM_BOT_TOKEN=")
|
||||
assert lines[1].startswith("ANTHROPIC_API_KEY=")
|
||||
# Token should not contain the second key
|
||||
parsed_token = lines[0].strip().split("=", 1)[1]
|
||||
assert parsed_token == token
|
||||
finally:
|
||||
env_path.unlink(missing_ok=True)
|
||||
|
|
@ -394,6 +394,21 @@ class TestLaunchdServiceRecovery:
|
|||
|
||||
|
||||
class TestGatewayServiceDetection:
|
||||
def test_supports_systemd_services_requires_systemctl_binary(self, monkeypatch):
|
||||
monkeypatch.setattr(gateway_cli, "is_linux", lambda: True)
|
||||
monkeypatch.setattr(gateway_cli, "is_termux", lambda: False)
|
||||
monkeypatch.setattr(gateway_cli.shutil, "which", lambda name: None)
|
||||
|
||||
assert gateway_cli.supports_systemd_services() is False
|
||||
|
||||
def test_supports_systemd_services_returns_true_when_systemctl_present(self, monkeypatch):
|
||||
monkeypatch.setattr(gateway_cli, "is_linux", lambda: True)
|
||||
monkeypatch.setattr(gateway_cli, "is_termux", lambda: False)
|
||||
monkeypatch.setattr(gateway_cli, "is_wsl", lambda: False)
|
||||
monkeypatch.setattr(gateway_cli.shutil, "which", lambda name: "/usr/bin/systemctl")
|
||||
|
||||
assert gateway_cli.supports_systemd_services() is True
|
||||
|
||||
def test_is_service_running_checks_system_scope_when_user_scope_is_inactive(self, monkeypatch):
|
||||
user_unit = SimpleNamespace(exists=lambda: True)
|
||||
system_unit = SimpleNamespace(exists=lambda: True)
|
||||
|
|
@ -418,6 +433,23 @@ class TestGatewayServiceDetection:
|
|||
|
||||
assert gateway_cli._is_service_running() is True
|
||||
|
||||
def test_is_service_running_returns_false_when_systemctl_missing(self, monkeypatch):
|
||||
unit = SimpleNamespace(exists=lambda: True)
|
||||
|
||||
monkeypatch.setattr(gateway_cli, "supports_systemd_services", lambda: True)
|
||||
monkeypatch.setattr(
|
||||
gateway_cli,
|
||||
"get_systemd_unit_path",
|
||||
lambda system=False: unit,
|
||||
)
|
||||
|
||||
def fake_run(*args, **kwargs):
|
||||
raise FileNotFoundError("systemctl")
|
||||
|
||||
monkeypatch.setattr(gateway_cli.subprocess, "run", fake_run)
|
||||
|
||||
assert gateway_cli._is_service_running() is False
|
||||
|
||||
|
||||
class TestGatewaySystemServiceRouting:
|
||||
def test_systemd_restart_self_requests_graceful_restart_without_reload_or_restart(self, monkeypatch, capsys):
|
||||
|
|
@ -1001,3 +1033,91 @@ class TestSystemUnitPathRemapping:
|
|||
# Target user paths should be present
|
||||
assert "/home/alice" in unit
|
||||
assert "WorkingDirectory=/home/alice/.hermes/hermes-agent" in unit
|
||||
|
||||
|
||||
class TestDockerAwareGateway:
|
||||
"""Tests for Docker container awareness in gateway commands."""
|
||||
|
||||
def test_run_systemctl_raises_runtimeerror_when_missing(self, monkeypatch):
|
||||
"""_run_systemctl raises RuntimeError with container guidance when systemctl is absent."""
|
||||
import pytest
|
||||
|
||||
def fake_run(cmd, **kwargs):
|
||||
raise FileNotFoundError("systemctl")
|
||||
|
||||
monkeypatch.setattr(gateway_cli.subprocess, "run", fake_run)
|
||||
|
||||
with pytest.raises(RuntimeError, match="systemctl is not available"):
|
||||
gateway_cli._run_systemctl(["start", "hermes-gateway"])
|
||||
|
||||
def test_run_systemctl_passes_through_on_success(self, monkeypatch):
|
||||
"""_run_systemctl delegates to subprocess.run when systemctl exists."""
|
||||
calls = []
|
||||
|
||||
def fake_run(cmd, **kwargs):
|
||||
calls.append(cmd)
|
||||
return SimpleNamespace(returncode=0, stdout="", stderr="")
|
||||
|
||||
monkeypatch.setattr(gateway_cli.subprocess, "run", fake_run)
|
||||
|
||||
result = gateway_cli._run_systemctl(["status", "hermes-gateway"])
|
||||
assert result.returncode == 0
|
||||
assert len(calls) == 1
|
||||
assert "status" in calls[0]
|
||||
|
||||
def test_install_in_container_prints_docker_guidance(self, monkeypatch, capsys):
|
||||
"""'hermes gateway install' inside Docker exits 0 with container guidance."""
|
||||
import pytest
|
||||
|
||||
monkeypatch.setattr(gateway_cli, "is_managed", lambda: False)
|
||||
monkeypatch.setattr(gateway_cli, "is_termux", lambda: False)
|
||||
monkeypatch.setattr(gateway_cli, "supports_systemd_services", lambda: False)
|
||||
monkeypatch.setattr(gateway_cli, "is_macos", lambda: False)
|
||||
monkeypatch.setattr(gateway_cli, "is_wsl", lambda: False)
|
||||
monkeypatch.setattr(gateway_cli, "is_container", lambda: True)
|
||||
|
||||
args = SimpleNamespace(gateway_command="install", force=False, system=False, run_as_user=None)
|
||||
with pytest.raises(SystemExit) as exc_info:
|
||||
gateway_cli.gateway_command(args)
|
||||
|
||||
assert exc_info.value.code == 0
|
||||
out = capsys.readouterr().out
|
||||
assert "Docker" in out or "docker" in out
|
||||
assert "restart" in out.lower()
|
||||
|
||||
def test_uninstall_in_container_prints_docker_guidance(self, monkeypatch, capsys):
|
||||
"""'hermes gateway uninstall' inside Docker exits 0 with container guidance."""
|
||||
import pytest
|
||||
|
||||
monkeypatch.setattr(gateway_cli, "is_managed", lambda: False)
|
||||
monkeypatch.setattr(gateway_cli, "is_termux", lambda: False)
|
||||
monkeypatch.setattr(gateway_cli, "supports_systemd_services", lambda: False)
|
||||
monkeypatch.setattr(gateway_cli, "is_macos", lambda: False)
|
||||
monkeypatch.setattr(gateway_cli, "is_container", lambda: True)
|
||||
|
||||
args = SimpleNamespace(gateway_command="uninstall", system=False)
|
||||
with pytest.raises(SystemExit) as exc_info:
|
||||
gateway_cli.gateway_command(args)
|
||||
|
||||
assert exc_info.value.code == 0
|
||||
out = capsys.readouterr().out
|
||||
assert "docker" in out.lower()
|
||||
|
||||
def test_start_in_container_prints_docker_guidance(self, monkeypatch, capsys):
|
||||
"""'hermes gateway start' inside Docker exits 0 with container guidance."""
|
||||
import pytest
|
||||
|
||||
monkeypatch.setattr(gateway_cli, "is_termux", lambda: False)
|
||||
monkeypatch.setattr(gateway_cli, "supports_systemd_services", lambda: False)
|
||||
monkeypatch.setattr(gateway_cli, "is_macos", lambda: False)
|
||||
monkeypatch.setattr(gateway_cli, "is_wsl", lambda: False)
|
||||
monkeypatch.setattr(gateway_cli, "is_container", lambda: True)
|
||||
|
||||
args = SimpleNamespace(gateway_command="start", system=False)
|
||||
with pytest.raises(SystemExit) as exc_info:
|
||||
gateway_cli.gateway_command(args)
|
||||
|
||||
assert exc_info.value.code == 0
|
||||
out = capsys.readouterr().out
|
||||
assert "docker" in out.lower()
|
||||
assert "hermes gateway run" in out
|
||||
|
|
|
|||
|
|
@ -54,14 +54,19 @@ class TestAnthropicDotToHyphen:
|
|||
|
||||
# ── OpenCode Zen regression ────────────────────────────────────────────
|
||||
|
||||
class TestOpenCodeZenDotToHyphen:
|
||||
"""OpenCode Zen follows Anthropic convention (dots→hyphens)."""
|
||||
class TestOpenCodeZenModelNormalization:
|
||||
"""OpenCode Zen preserves dots for most models, but Claude stays hyphenated."""
|
||||
|
||||
@pytest.mark.parametrize("model,expected", [
|
||||
("claude-sonnet-4.6", "claude-sonnet-4-6"),
|
||||
("glm-4.5", "glm-4-5"),
|
||||
("opencode-zen/claude-opus-4.5", "claude-opus-4-5"),
|
||||
("glm-4.5", "glm-4.5"),
|
||||
("glm-5.1", "glm-5.1"),
|
||||
("gpt-5.4", "gpt-5.4"),
|
||||
("minimax-m2.5-free", "minimax-m2.5-free"),
|
||||
("kimi-k2.5", "kimi-k2.5"),
|
||||
])
|
||||
def test_zen_converts_dots(self, model, expected):
|
||||
def test_zen_normalizes_models(self, model, expected):
|
||||
result = normalize_model_for_provider(model, "opencode-zen")
|
||||
assert result == expected
|
||||
|
||||
|
|
@ -69,6 +74,10 @@ class TestOpenCodeZenDotToHyphen:
|
|||
result = normalize_model_for_provider("opencode-zen/claude-sonnet-4.6", "opencode-zen")
|
||||
assert result == "claude-sonnet-4-6"
|
||||
|
||||
def test_zen_strips_vendor_prefix_for_non_claude(self):
|
||||
result = normalize_model_for_provider("opencode-zen/glm-5.1", "opencode-zen")
|
||||
assert result == "glm-5.1"
|
||||
|
||||
|
||||
# ── Copilot dot preservation (regression) ──────────────────────────────
|
||||
|
||||
|
|
|
|||
84
tests/hermes_cli/test_nous_hermes_non_agentic.py
Normal file
84
tests/hermes_cli/test_nous_hermes_non_agentic.py
Normal file
|
|
@ -0,0 +1,84 @@
|
|||
"""Tests for the Nous-Hermes-3/4 non-agentic warning detector.
|
||||
|
||||
Prior to this check, the warning fired on any model whose name contained
|
||||
``"hermes"`` anywhere (case-insensitive). That false-positived on unrelated
|
||||
local Modelfiles such as ``hermes-brain:qwen3-14b-ctx16k`` — a tool-capable
|
||||
Qwen3 wrapper that happens to live under the "hermes" tag namespace.
|
||||
|
||||
``is_nous_hermes_non_agentic`` should only match the actual Nous Research
|
||||
Hermes-3 / Hermes-4 chat family.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import pytest
|
||||
|
||||
from hermes_cli.model_switch import (
|
||||
_HERMES_MODEL_WARNING,
|
||||
_check_hermes_model_warning,
|
||||
is_nous_hermes_non_agentic,
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"model_name",
|
||||
[
|
||||
"NousResearch/Hermes-3-Llama-3.1-70B",
|
||||
"NousResearch/Hermes-3-Llama-3.1-405B",
|
||||
"hermes-3",
|
||||
"Hermes-3",
|
||||
"hermes-4",
|
||||
"hermes-4-405b",
|
||||
"hermes_4_70b",
|
||||
"openrouter/hermes3:70b",
|
||||
"openrouter/nousresearch/hermes-4-405b",
|
||||
"NousResearch/Hermes3",
|
||||
"hermes-3.1",
|
||||
],
|
||||
)
|
||||
def test_matches_real_nous_hermes_chat_models(model_name: str) -> None:
|
||||
assert is_nous_hermes_non_agentic(model_name), (
|
||||
f"expected {model_name!r} to be flagged as Nous Hermes 3/4"
|
||||
)
|
||||
assert _check_hermes_model_warning(model_name) == _HERMES_MODEL_WARNING
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"model_name",
|
||||
[
|
||||
# Kyle's local Modelfile — qwen3:14b under a custom tag
|
||||
"hermes-brain:qwen3-14b-ctx16k",
|
||||
"hermes-brain:qwen3-14b-ctx32k",
|
||||
"hermes-honcho:qwen3-8b-ctx8k",
|
||||
# Plain unrelated models
|
||||
"qwen3:14b",
|
||||
"qwen3-coder:30b",
|
||||
"qwen2.5:14b",
|
||||
"claude-opus-4-6",
|
||||
"anthropic/claude-sonnet-4.5",
|
||||
"gpt-5",
|
||||
"openai/gpt-4o",
|
||||
"google/gemini-2.5-flash",
|
||||
"deepseek-chat",
|
||||
# Non-chat Hermes models we don't warn about
|
||||
"hermes-llm-2",
|
||||
"hermes2-pro",
|
||||
"nous-hermes-2-mistral",
|
||||
# Edge cases
|
||||
"",
|
||||
"hermes", # bare "hermes" isn't the 3/4 family
|
||||
"hermes-brain",
|
||||
"brain-hermes-3-impostor", # "3" not preceded by /: boundary
|
||||
],
|
||||
)
|
||||
def test_does_not_match_unrelated_models(model_name: str) -> None:
|
||||
assert not is_nous_hermes_non_agentic(model_name), (
|
||||
f"expected {model_name!r} NOT to be flagged as Nous Hermes 3/4"
|
||||
)
|
||||
assert _check_hermes_model_warning(model_name) == ""
|
||||
|
||||
|
||||
def test_none_like_inputs_are_safe() -> None:
|
||||
assert is_nous_hermes_non_agentic("") is False
|
||||
# Defensive: the helper shouldn't crash on None-ish falsy input either.
|
||||
assert _check_hermes_model_warning("") == ""
|
||||
|
|
@ -177,7 +177,8 @@ class TestCreateProfile:
|
|||
# No error; optional files just not copied
|
||||
assert not (profile_dir / "config.yaml").exists()
|
||||
assert not (profile_dir / ".env").exists()
|
||||
assert not (profile_dir / "SOUL.md").exists()
|
||||
# SOUL.md is always seeded with the default even when clone source lacks it
|
||||
assert (profile_dir / "SOUL.md").exists()
|
||||
|
||||
|
||||
# ===================================================================
|
||||
|
|
|
|||
|
|
@ -119,6 +119,11 @@ def test_resolve_runtime_provider_falls_back_when_pool_empty(monkeypatch):
|
|||
|
||||
|
||||
def test_resolve_runtime_provider_codex(monkeypatch):
|
||||
monkeypatch.setattr(
|
||||
rp,
|
||||
"load_pool",
|
||||
lambda provider: type("P", (), {"has_credentials": lambda self: False})(),
|
||||
)
|
||||
monkeypatch.setattr(rp, "resolve_provider", lambda *a, **k: "openai-codex")
|
||||
monkeypatch.setattr(
|
||||
rp,
|
||||
|
|
@ -567,6 +572,87 @@ def test_named_custom_provider_uses_saved_credentials(monkeypatch):
|
|||
assert resolved["source"] == "custom_provider:Local"
|
||||
|
||||
|
||||
def test_named_custom_provider_uses_providers_dict_when_list_missing(monkeypatch):
|
||||
"""After v11→v12 migration deletes custom_providers, resolution should
|
||||
still find entries in the providers dict via get_compatible_custom_providers."""
|
||||
monkeypatch.delenv("OPENAI_API_KEY", raising=False)
|
||||
monkeypatch.delenv("OPENROUTER_API_KEY", raising=False)
|
||||
monkeypatch.setattr(
|
||||
rp,
|
||||
"load_config",
|
||||
lambda: {
|
||||
"providers": {
|
||||
"openai-direct-primary": {
|
||||
"api": "https://api.openai.com/v1",
|
||||
"api_key": "dir-key",
|
||||
"default_model": "gpt-5-mini",
|
||||
"name": "OpenAI Direct (Primary)",
|
||||
"transport": "codex_responses",
|
||||
}
|
||||
}
|
||||
},
|
||||
)
|
||||
monkeypatch.setattr(
|
||||
rp,
|
||||
"resolve_provider",
|
||||
lambda *a, **k: (_ for _ in ()).throw(
|
||||
AssertionError(
|
||||
"resolve_provider should not be called for named custom providers"
|
||||
)
|
||||
),
|
||||
)
|
||||
|
||||
resolved = rp.resolve_runtime_provider(requested="openai-direct-primary")
|
||||
|
||||
assert resolved["provider"] == "custom"
|
||||
assert resolved["api_mode"] == "codex_responses"
|
||||
assert resolved["base_url"] == "https://api.openai.com/v1"
|
||||
assert resolved["api_key"] == "dir-key"
|
||||
assert resolved["requested_provider"] == "openai-direct-primary"
|
||||
assert resolved["source"] == "custom_provider:OpenAI Direct (Primary)"
|
||||
assert resolved["model"] == "gpt-5-mini"
|
||||
|
||||
|
||||
def test_named_custom_provider_uses_key_env_from_providers_dict(monkeypatch):
|
||||
"""providers dict entries with key_env should resolve API key from env var."""
|
||||
monkeypatch.delenv("OPENAI_API_KEY", raising=False)
|
||||
monkeypatch.delenv("OPENROUTER_API_KEY", raising=False)
|
||||
monkeypatch.setenv("MYCORP_API_KEY", "env-secret")
|
||||
monkeypatch.setattr(
|
||||
rp,
|
||||
"load_config",
|
||||
lambda: {
|
||||
"providers": {
|
||||
"mycorp-proxy": {
|
||||
"base_url": "https://proxy.example.com/v1",
|
||||
"default_model": "acme-large",
|
||||
"key_env": "MYCORP_API_KEY",
|
||||
"name": "MyCorp Proxy",
|
||||
}
|
||||
}
|
||||
},
|
||||
)
|
||||
monkeypatch.setattr(
|
||||
rp,
|
||||
"resolve_provider",
|
||||
lambda *a, **k: (_ for _ in ()).throw(
|
||||
AssertionError(
|
||||
"resolve_provider should not be called for named custom providers"
|
||||
)
|
||||
),
|
||||
)
|
||||
|
||||
resolved = rp.resolve_runtime_provider(requested="mycorp-proxy")
|
||||
|
||||
assert resolved["provider"] == "custom"
|
||||
assert resolved["api_mode"] == "chat_completions"
|
||||
assert resolved["base_url"] == "https://proxy.example.com/v1"
|
||||
assert resolved["api_key"] == "env-secret"
|
||||
assert resolved["requested_provider"] == "mycorp-proxy"
|
||||
assert resolved["source"] == "custom_provider:MyCorp Proxy"
|
||||
assert resolved["model"] == "acme-large"
|
||||
|
||||
|
||||
def test_named_custom_provider_falls_back_to_openai_api_key(monkeypatch):
|
||||
monkeypatch.setenv("OPENAI_API_KEY", "env-openai-key")
|
||||
monkeypatch.delenv("OPENROUTER_API_KEY", raising=False)
|
||||
|
|
|
|||
|
|
@ -1,5 +1,4 @@
|
|||
"""Tests for setup_model_provider — verifies the delegation to
|
||||
select_provider_and_model() and config dict sync."""
|
||||
"""Tests for setup.py configuration flows."""
|
||||
import json
|
||||
import sys
|
||||
import types
|
||||
|
|
@ -8,6 +7,7 @@ import pytest
|
|||
|
||||
from hermes_cli.auth import get_active_provider
|
||||
from hermes_cli.config import load_config, save_config
|
||||
from hermes_cli import setup as setup_mod
|
||||
from hermes_cli.setup import setup_model_provider
|
||||
|
||||
|
||||
|
|
@ -144,6 +144,85 @@ def test_setup_custom_providers_synced(tmp_path, monkeypatch):
|
|||
assert reloaded.get("custom_providers") == [{"name": "Local", "base_url": "http://localhost:8080/v1"}]
|
||||
|
||||
|
||||
def test_setup_gateway_skips_service_install_when_systemctl_missing(monkeypatch, capsys):
|
||||
env = {
|
||||
"TELEGRAM_BOT_TOKEN": "",
|
||||
"TELEGRAM_HOME_CHANNEL": "",
|
||||
"DISCORD_BOT_TOKEN": "",
|
||||
"DISCORD_HOME_CHANNEL": "",
|
||||
"SLACK_BOT_TOKEN": "",
|
||||
"SLACK_HOME_CHANNEL": "",
|
||||
"MATRIX_HOMESERVER": "https://matrix.example.com",
|
||||
"MATRIX_USER_ID": "@alice:example.com",
|
||||
"MATRIX_PASSWORD": "",
|
||||
"MATRIX_ACCESS_TOKEN": "token",
|
||||
"BLUEBUBBLES_SERVER_URL": "",
|
||||
"BLUEBUBBLES_HOME_CHANNEL": "",
|
||||
"WHATSAPP_ENABLED": "",
|
||||
"WEBHOOK_ENABLED": "",
|
||||
}
|
||||
|
||||
monkeypatch.setattr(setup_mod, "get_env_value", lambda key: env.get(key, ""))
|
||||
monkeypatch.setattr(setup_mod, "prompt_yes_no", lambda *args, **kwargs: False)
|
||||
monkeypatch.setattr("platform.system", lambda: "Linux")
|
||||
|
||||
import hermes_cli.gateway as gateway_mod
|
||||
|
||||
monkeypatch.setattr(gateway_mod, "supports_systemd_services", lambda: False)
|
||||
monkeypatch.setattr(gateway_mod, "is_macos", lambda: False)
|
||||
monkeypatch.setattr(gateway_mod, "_is_service_installed", lambda: False)
|
||||
monkeypatch.setattr(gateway_mod, "_is_service_running", lambda: False)
|
||||
|
||||
setup_mod.setup_gateway({})
|
||||
|
||||
out = capsys.readouterr().out
|
||||
assert "Messaging platforms configured!" in out
|
||||
assert "Start the gateway to bring your bots online:" in out
|
||||
assert "hermes gateway" in out
|
||||
|
||||
|
||||
def test_setup_gateway_in_container_shows_docker_guidance(monkeypatch, capsys):
|
||||
"""setup_gateway() in a Docker container shows Docker-specific restart instructions."""
|
||||
env = {
|
||||
"TELEGRAM_BOT_TOKEN": "",
|
||||
"TELEGRAM_HOME_CHANNEL": "",
|
||||
"DISCORD_BOT_TOKEN": "",
|
||||
"DISCORD_HOME_CHANNEL": "",
|
||||
"SLACK_BOT_TOKEN": "",
|
||||
"SLACK_HOME_CHANNEL": "",
|
||||
"MATRIX_HOMESERVER": "https://matrix.example.com",
|
||||
"MATRIX_USER_ID": "@alice:example.com",
|
||||
"MATRIX_PASSWORD": "",
|
||||
"MATRIX_ACCESS_TOKEN": "token",
|
||||
"BLUEBUBBLES_SERVER_URL": "",
|
||||
"BLUEBUBBLES_HOME_CHANNEL": "",
|
||||
"WHATSAPP_ENABLED": "",
|
||||
"WEBHOOK_ENABLED": "",
|
||||
}
|
||||
|
||||
monkeypatch.setattr(setup_mod, "get_env_value", lambda key: env.get(key, ""))
|
||||
monkeypatch.setattr(setup_mod, "prompt_yes_no", lambda *args, **kwargs: False)
|
||||
monkeypatch.setattr("platform.system", lambda: "Linux")
|
||||
|
||||
import hermes_cli.gateway as gateway_mod
|
||||
|
||||
monkeypatch.setattr(gateway_mod, "supports_systemd_services", lambda: False)
|
||||
monkeypatch.setattr(gateway_mod, "is_macos", lambda: False)
|
||||
monkeypatch.setattr(gateway_mod, "_is_service_installed", lambda: False)
|
||||
monkeypatch.setattr(gateway_mod, "_is_service_running", lambda: False)
|
||||
|
||||
# Patch is_container at the import location in setup.py
|
||||
import hermes_constants
|
||||
monkeypatch.setattr(hermes_constants, "is_container", lambda: True)
|
||||
|
||||
setup_mod.setup_gateway({})
|
||||
|
||||
out = capsys.readouterr().out
|
||||
assert "Messaging platforms configured!" in out
|
||||
assert "docker" in out.lower() or "Docker" in out
|
||||
assert "restart" in out.lower()
|
||||
|
||||
|
||||
def test_setup_syncs_custom_provider_removal_from_disk(tmp_path, monkeypatch):
|
||||
"""Removing the last custom provider in model setup should persist."""
|
||||
monkeypatch.setenv("HERMES_HOME", str(tmp_path))
|
||||
|
|
|
|||
|
|
@ -119,8 +119,7 @@ def test_toolset_has_keys_for_vision_accepts_codex_auth(tmp_path, monkeypatch):
|
|||
monkeypatch.delenv("OPENROUTER_API_KEY", raising=False)
|
||||
monkeypatch.delenv("OPENAI_BASE_URL", raising=False)
|
||||
monkeypatch.delenv("OPENAI_API_KEY", raising=False)
|
||||
monkeypatch.delenv("AUXILIARY_VISION_PROVIDER", raising=False)
|
||||
monkeypatch.delenv("CONTEXT_VISION_PROVIDER", raising=False)
|
||||
|
||||
monkeypatch.setattr(
|
||||
"agent.auxiliary_client.resolve_vision_provider_client",
|
||||
lambda: ("openai-codex", object(), "gpt-4.1"),
|
||||
|
|
|
|||
280
tests/hermes_cli/test_user_providers_model_switch.py
Normal file
280
tests/hermes_cli/test_user_providers_model_switch.py
Normal file
|
|
@ -0,0 +1,280 @@
|
|||
"""Tests for user-defined providers (providers: dict) in /model.
|
||||
|
||||
These tests ensure that providers defined in the config.yaml ``providers:`` section
|
||||
are properly resolved for model switching and that their full ``models:`` lists
|
||||
are exposed in the model picker.
|
||||
"""
|
||||
|
||||
import pytest
|
||||
from hermes_cli.model_switch import list_authenticated_providers, switch_model
|
||||
from hermes_cli import runtime_provider as rp
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Tests for list_authenticated_providers including full models list
|
||||
# =============================================================================
|
||||
|
||||
def test_list_authenticated_providers_includes_full_models_list_from_user_providers(monkeypatch):
|
||||
"""User-defined providers should expose both default_model and full models list.
|
||||
|
||||
Regression test: previously only default_model was shown in /model picker.
|
||||
"""
|
||||
monkeypatch.setattr("agent.models_dev.fetch_models_dev", lambda: {})
|
||||
monkeypatch.setattr("hermes_cli.providers.HERMES_OVERLAYS", {})
|
||||
|
||||
user_providers = {
|
||||
"local-ollama": {
|
||||
"name": "Local Ollama",
|
||||
"api": "http://localhost:11434/v1",
|
||||
"default_model": "minimax-m2.7:cloud",
|
||||
"models": [
|
||||
"minimax-m2.7:cloud",
|
||||
"kimi-k2.5:cloud",
|
||||
"glm-5.1:cloud",
|
||||
"qwen3.5:cloud",
|
||||
],
|
||||
}
|
||||
}
|
||||
|
||||
providers = list_authenticated_providers(
|
||||
current_provider="local-ollama",
|
||||
user_providers=user_providers,
|
||||
custom_providers=[],
|
||||
max_models=50,
|
||||
)
|
||||
|
||||
# Find our user provider
|
||||
user_prov = next(
|
||||
(p for p in providers if p.get("is_user_defined") and p["slug"] == "local-ollama"),
|
||||
None
|
||||
)
|
||||
|
||||
assert user_prov is not None, "User provider 'local-ollama' should be in results"
|
||||
assert user_prov["total_models"] == 4, f"Expected 4 models, got {user_prov['total_models']}"
|
||||
assert "minimax-m2.7:cloud" in user_prov["models"]
|
||||
assert "kimi-k2.5:cloud" in user_prov["models"]
|
||||
assert "glm-5.1:cloud" in user_prov["models"]
|
||||
assert "qwen3.5:cloud" in user_prov["models"]
|
||||
|
||||
|
||||
def test_list_authenticated_providers_dedupes_models_when_default_in_list(monkeypatch):
|
||||
"""When default_model is also in models list, don't duplicate."""
|
||||
monkeypatch.setattr("agent.models_dev.fetch_models_dev", lambda: {})
|
||||
monkeypatch.setattr("hermes_cli.providers.HERMES_OVERLAYS", {})
|
||||
|
||||
user_providers = {
|
||||
"my-provider": {
|
||||
"api": "http://example.com/v1",
|
||||
"default_model": "model-a", # Included in models list below
|
||||
"models": ["model-a", "model-b", "model-c"],
|
||||
}
|
||||
}
|
||||
|
||||
providers = list_authenticated_providers(
|
||||
current_provider="my-provider",
|
||||
user_providers=user_providers,
|
||||
custom_providers=[],
|
||||
)
|
||||
|
||||
user_prov = next(
|
||||
(p for p in providers if p.get("is_user_defined")),
|
||||
None
|
||||
)
|
||||
|
||||
assert user_prov is not None
|
||||
assert user_prov["total_models"] == 3, "Should have 3 unique models, not 4"
|
||||
assert user_prov["models"].count("model-a") == 1, "model-a should not be duplicated"
|
||||
|
||||
|
||||
def test_list_authenticated_providers_fallback_to_default_only(monkeypatch):
|
||||
"""When no models array is provided, should fall back to default_model."""
|
||||
monkeypatch.setattr("agent.models_dev.fetch_models_dev", lambda: {})
|
||||
monkeypatch.setattr("hermes_cli.providers.HERMES_OVERLAYS", {})
|
||||
|
||||
user_providers = {
|
||||
"simple-provider": {
|
||||
"name": "Simple Provider",
|
||||
"api": "http://example.com/v1",
|
||||
"default_model": "single-model",
|
||||
# No 'models' key
|
||||
}
|
||||
}
|
||||
|
||||
providers = list_authenticated_providers(
|
||||
current_provider="",
|
||||
user_providers=user_providers,
|
||||
custom_providers=[],
|
||||
)
|
||||
|
||||
user_prov = next(
|
||||
(p for p in providers if p.get("is_user_defined")),
|
||||
None
|
||||
)
|
||||
|
||||
assert user_prov is not None
|
||||
assert user_prov["total_models"] == 1
|
||||
assert user_prov["models"] == ["single-model"]
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Tests for _get_named_custom_provider with providers: dict
|
||||
# =============================================================================
|
||||
|
||||
def test_get_named_custom_provider_finds_user_providers_by_key(monkeypatch, tmp_path):
|
||||
"""Should resolve providers from providers: dict (new-style), not just custom_providers."""
|
||||
config = {
|
||||
"providers": {
|
||||
"local-localhost:11434": {
|
||||
"api": "http://localhost:11434/v1",
|
||||
"name": "Local (localhost:11434)",
|
||||
"default_model": "minimax-m2.7:cloud",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
import yaml
|
||||
config_file = tmp_path / "config.yaml"
|
||||
config_file.write_text(yaml.dump(config))
|
||||
|
||||
monkeypatch.setenv("HERMES_HOME", str(tmp_path))
|
||||
|
||||
result = rp._get_named_custom_provider("local-localhost:11434")
|
||||
|
||||
assert result is not None
|
||||
assert result["base_url"] == "http://localhost:11434/v1"
|
||||
assert result["name"] == "Local (localhost:11434)"
|
||||
|
||||
|
||||
def test_get_named_custom_provider_finds_by_display_name(monkeypatch, tmp_path):
|
||||
"""Should match providers by their 'name' field as well as key."""
|
||||
config = {
|
||||
"providers": {
|
||||
"my-ollama-xyz": {
|
||||
"api": "http://ollama.example.com/v1",
|
||||
"name": "My Production Ollama",
|
||||
"default_model": "llama3",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
import yaml
|
||||
config_file = tmp_path / "config.yaml"
|
||||
config_file.write_text(yaml.dump(config))
|
||||
|
||||
monkeypatch.setenv("HERMES_HOME", str(tmp_path))
|
||||
|
||||
# Should find by display name (normalized)
|
||||
result = rp._get_named_custom_provider("my-production-ollama")
|
||||
|
||||
assert result is not None
|
||||
assert result["base_url"] == "http://ollama.example.com/v1"
|
||||
|
||||
|
||||
def test_get_named_custom_provider_falls_back_to_legacy_format(monkeypatch, tmp_path):
|
||||
"""Should still work with custom_providers: list format."""
|
||||
config = {
|
||||
"providers": {},
|
||||
"custom_providers": [
|
||||
{
|
||||
"name": "Custom Endpoint",
|
||||
"base_url": "http://custom.example.com/v1",
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
import yaml
|
||||
config_file = tmp_path / "config.yaml"
|
||||
config_file.write_text(yaml.dump(config))
|
||||
|
||||
monkeypatch.setenv("HERMES_HOME", str(tmp_path))
|
||||
|
||||
result = rp._get_named_custom_provider("custom-endpoint")
|
||||
|
||||
assert result is not None
|
||||
|
||||
|
||||
def test_get_named_custom_provider_returns_none_for_unknown(monkeypatch, tmp_path):
|
||||
"""Should return None for providers that don't exist."""
|
||||
config = {
|
||||
"providers": {
|
||||
"known-provider": {
|
||||
"api": "http://known.example.com/v1",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
import yaml
|
||||
config_file = tmp_path / "config.yaml"
|
||||
config_file.write_text(yaml.dump(config))
|
||||
|
||||
monkeypatch.setenv("HERMES_HOME", str(tmp_path))
|
||||
|
||||
result = rp._get_named_custom_provider("other-provider")
|
||||
|
||||
# "unknown-provider" partial-matches "known-provider" because "unknown" doesn't match
|
||||
# but our matching is loose (substring). Let's verify a truly non-matching provider
|
||||
result = rp._get_named_custom_provider("completely-different-name")
|
||||
assert result is None
|
||||
|
||||
|
||||
def test_get_named_custom_provider_skips_empty_base_url(monkeypatch, tmp_path):
|
||||
"""Should skip providers without a base_url."""
|
||||
config = {
|
||||
"providers": {
|
||||
"incomplete-provider": {
|
||||
"name": "Incomplete",
|
||||
# No api/base_url field
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
import yaml
|
||||
config_file = tmp_path / "config.yaml"
|
||||
config_file.write_text(yaml.dump(config))
|
||||
|
||||
monkeypatch.setenv("HERMES_HOME", str(tmp_path))
|
||||
|
||||
result = rp._get_named_custom_provider("incomplete-provider")
|
||||
|
||||
assert result is None
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Integration test for switch_model with user providers
|
||||
# =============================================================================
|
||||
|
||||
def test_switch_model_resolves_user_provider_credentials(monkeypatch, tmp_path):
|
||||
"""/model switch should resolve credentials for providers: dict providers."""
|
||||
import yaml
|
||||
|
||||
config = {
|
||||
"providers": {
|
||||
"local-ollama": {
|
||||
"api": "http://localhost:11434/v1",
|
||||
"name": "Local Ollama",
|
||||
"default_model": "minimax-m2.7:cloud",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
config_file = tmp_path / "config.yaml"
|
||||
config_file.write_text(yaml.dump(config))
|
||||
monkeypatch.setenv("HERMES_HOME", str(tmp_path))
|
||||
|
||||
# Mock validation to pass
|
||||
monkeypatch.setattr(
|
||||
"hermes_cli.models.validate_requested_model",
|
||||
lambda *a, **k: {"accepted": True, "persist": True, "recognized": True, "message": None}
|
||||
)
|
||||
|
||||
result = switch_model(
|
||||
raw_input="kimi-k2.5:cloud",
|
||||
current_provider="local-ollama",
|
||||
current_model="minimax-m2.7:cloud",
|
||||
current_base_url="http://localhost:11434/v1",
|
||||
is_global=False,
|
||||
user_providers=config["providers"],
|
||||
)
|
||||
|
||||
assert result.success is True
|
||||
assert result.error_message == ""
|
||||
675
tests/hermes_cli/test_web_server.py
Normal file
675
tests/hermes_cli/test_web_server.py
Normal file
|
|
@ -0,0 +1,675 @@
|
|||
"""Tests for hermes_cli.web_server and related config utilities."""
|
||||
|
||||
import os
|
||||
import json
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
from unittest.mock import patch, MagicMock
|
||||
|
||||
import pytest
|
||||
|
||||
from hermes_cli.config import (
|
||||
DEFAULT_CONFIG,
|
||||
reload_env,
|
||||
redact_key,
|
||||
_EXTRA_ENV_KEYS,
|
||||
OPTIONAL_ENV_VARS,
|
||||
)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# reload_env tests
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestReloadEnv:
|
||||
"""Tests for reload_env() — re-reads .env into os.environ."""
|
||||
|
||||
def test_adds_new_vars(self, tmp_path):
|
||||
"""reload_env() adds vars from .env that are not in os.environ."""
|
||||
env_file = tmp_path / ".env"
|
||||
env_file.write_text("TEST_RELOAD_VAR=hello123\n")
|
||||
with patch("hermes_cli.config.get_env_path", return_value=env_file):
|
||||
os.environ.pop("TEST_RELOAD_VAR", None)
|
||||
count = reload_env()
|
||||
assert count >= 1
|
||||
assert os.environ.get("TEST_RELOAD_VAR") == "hello123"
|
||||
os.environ.pop("TEST_RELOAD_VAR", None)
|
||||
|
||||
def test_updates_changed_vars(self, tmp_path):
|
||||
"""reload_env() updates vars whose value changed on disk."""
|
||||
env_file = tmp_path / ".env"
|
||||
env_file.write_text("TEST_RELOAD_VAR=old_value\n")
|
||||
with patch("hermes_cli.config.get_env_path", return_value=env_file):
|
||||
os.environ["TEST_RELOAD_VAR"] = "old_value"
|
||||
# Now change the file
|
||||
env_file.write_text("TEST_RELOAD_VAR=new_value\n")
|
||||
count = reload_env()
|
||||
assert count >= 1
|
||||
assert os.environ.get("TEST_RELOAD_VAR") == "new_value"
|
||||
os.environ.pop("TEST_RELOAD_VAR", None)
|
||||
|
||||
def test_removes_deleted_known_vars(self, tmp_path):
|
||||
"""reload_env() removes known Hermes vars not present in .env."""
|
||||
env_file = tmp_path / ".env"
|
||||
env_file.write_text("") # empty .env
|
||||
# Pick a known key from OPTIONAL_ENV_VARS
|
||||
known_key = next(iter(OPTIONAL_ENV_VARS.keys()))
|
||||
with patch("hermes_cli.config.get_env_path", return_value=env_file):
|
||||
os.environ[known_key] = "stale_value"
|
||||
count = reload_env()
|
||||
assert known_key not in os.environ
|
||||
assert count >= 1
|
||||
|
||||
def test_does_not_remove_unknown_vars(self, tmp_path):
|
||||
"""reload_env() preserves non-Hermes env vars even when absent from .env."""
|
||||
env_file = tmp_path / ".env"
|
||||
env_file.write_text("")
|
||||
with patch("hermes_cli.config.get_env_path", return_value=env_file):
|
||||
os.environ["MY_CUSTOM_UNRELATED_VAR"] = "keep_me"
|
||||
reload_env()
|
||||
assert os.environ.get("MY_CUSTOM_UNRELATED_VAR") == "keep_me"
|
||||
os.environ.pop("MY_CUSTOM_UNRELATED_VAR", None)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# redact_key tests
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestRedactKey:
|
||||
def test_long_key_shows_prefix_suffix(self):
|
||||
result = redact_key("sk-1234567890abcdef")
|
||||
assert result.startswith("sk-1")
|
||||
assert result.endswith("cdef")
|
||||
assert "..." in result
|
||||
|
||||
def test_short_key_fully_masked(self):
|
||||
assert redact_key("short") == "***"
|
||||
|
||||
def test_empty_key(self):
|
||||
result = redact_key("")
|
||||
assert "not set" in result.lower() or result == "***" or "\x1b" in result
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# web_server tests (FastAPI endpoints)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestWebServerEndpoints:
|
||||
"""Test the FastAPI REST endpoints using Starlette TestClient."""
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def _setup_test_client(self):
|
||||
"""Create a TestClient — import is deferred to avoid requiring fastapi."""
|
||||
try:
|
||||
from starlette.testclient import TestClient
|
||||
except ImportError:
|
||||
pytest.skip("fastapi/starlette not installed")
|
||||
|
||||
from hermes_cli.web_server import app
|
||||
self.client = TestClient(app)
|
||||
|
||||
def test_get_status(self):
|
||||
resp = self.client.get("/api/status")
|
||||
assert resp.status_code == 200
|
||||
data = resp.json()
|
||||
assert "version" in data
|
||||
assert "hermes_home" in data
|
||||
assert "active_sessions" in data
|
||||
|
||||
def test_get_status_filters_unconfigured_gateway_platforms(self, monkeypatch):
|
||||
import gateway.config as gateway_config
|
||||
import hermes_cli.web_server as web_server
|
||||
|
||||
class _Platform:
|
||||
def __init__(self, value):
|
||||
self.value = value
|
||||
|
||||
class _GatewayConfig:
|
||||
def get_connected_platforms(self):
|
||||
return [_Platform("telegram")]
|
||||
|
||||
monkeypatch.setattr(web_server, "get_running_pid", lambda: 1234)
|
||||
monkeypatch.setattr(
|
||||
web_server,
|
||||
"read_runtime_status",
|
||||
lambda: {
|
||||
"gateway_state": "running",
|
||||
"updated_at": "2026-04-12T00:00:00+00:00",
|
||||
"platforms": {
|
||||
"telegram": {"state": "connected", "updated_at": "2026-04-12T00:00:00+00:00"},
|
||||
"whatsapp": {"state": "retrying", "updated_at": "2026-04-12T00:00:00+00:00"},
|
||||
"feishu": {"state": "connected", "updated_at": "2026-04-12T00:00:00+00:00"},
|
||||
},
|
||||
},
|
||||
)
|
||||
monkeypatch.setattr(web_server, "check_config_version", lambda: (1, 1))
|
||||
monkeypatch.setattr(gateway_config, "load_gateway_config", lambda: _GatewayConfig())
|
||||
|
||||
resp = self.client.get("/api/status")
|
||||
|
||||
assert resp.status_code == 200
|
||||
assert resp.json()["gateway_platforms"] == {
|
||||
"telegram": {"state": "connected", "updated_at": "2026-04-12T00:00:00+00:00"},
|
||||
}
|
||||
|
||||
def test_get_status_hides_stale_platforms_when_gateway_not_running(self, monkeypatch):
|
||||
import gateway.config as gateway_config
|
||||
import hermes_cli.web_server as web_server
|
||||
|
||||
class _GatewayConfig:
|
||||
def get_connected_platforms(self):
|
||||
return []
|
||||
|
||||
monkeypatch.setattr(web_server, "get_running_pid", lambda: None)
|
||||
monkeypatch.setattr(
|
||||
web_server,
|
||||
"read_runtime_status",
|
||||
lambda: {
|
||||
"gateway_state": "startup_failed",
|
||||
"updated_at": "2026-04-12T00:00:00+00:00",
|
||||
"platforms": {
|
||||
"whatsapp": {"state": "retrying", "updated_at": "2026-04-12T00:00:00+00:00"},
|
||||
"feishu": {"state": "connected", "updated_at": "2026-04-12T00:00:00+00:00"},
|
||||
},
|
||||
},
|
||||
)
|
||||
monkeypatch.setattr(web_server, "check_config_version", lambda: (1, 1))
|
||||
monkeypatch.setattr(gateway_config, "load_gateway_config", lambda: _GatewayConfig())
|
||||
|
||||
resp = self.client.get("/api/status")
|
||||
|
||||
assert resp.status_code == 200
|
||||
assert resp.json()["gateway_state"] == "startup_failed"
|
||||
assert resp.json()["gateway_platforms"] == {}
|
||||
|
||||
def test_get_config_schema(self):
|
||||
resp = self.client.get("/api/config/schema")
|
||||
assert resp.status_code == 200
|
||||
data = resp.json()
|
||||
assert "fields" in data
|
||||
assert "category_order" in data
|
||||
schema = data["fields"]
|
||||
assert len(schema) > 100 # Should have 150+ fields
|
||||
assert "model" in schema
|
||||
# Verify category_order is a non-empty list
|
||||
assert isinstance(data["category_order"], list)
|
||||
assert len(data["category_order"]) > 0
|
||||
assert "general" in data["category_order"]
|
||||
|
||||
def test_get_config_defaults(self):
|
||||
resp = self.client.get("/api/config/defaults")
|
||||
assert resp.status_code == 200
|
||||
defaults = resp.json()
|
||||
assert "model" in defaults
|
||||
|
||||
def test_get_env_vars(self):
|
||||
resp = self.client.get("/api/env")
|
||||
assert resp.status_code == 200
|
||||
data = resp.json()
|
||||
# Should contain known env var names
|
||||
assert any(k.endswith("_API_KEY") or k.endswith("_TOKEN") for k in data.keys())
|
||||
|
||||
def test_reveal_env_var(self, tmp_path):
|
||||
"""POST /api/env/reveal should return the real unredacted value."""
|
||||
from hermes_cli.config import save_env_value
|
||||
from hermes_cli.web_server import _SESSION_TOKEN
|
||||
save_env_value("TEST_REVEAL_KEY", "super-secret-value-12345")
|
||||
resp = self.client.post(
|
||||
"/api/env/reveal",
|
||||
json={"key": "TEST_REVEAL_KEY"},
|
||||
headers={"Authorization": f"Bearer {_SESSION_TOKEN}"},
|
||||
)
|
||||
assert resp.status_code == 200
|
||||
data = resp.json()
|
||||
assert data["key"] == "TEST_REVEAL_KEY"
|
||||
assert data["value"] == "super-secret-value-12345"
|
||||
|
||||
def test_reveal_env_var_not_found(self):
|
||||
"""POST /api/env/reveal should 404 for unknown keys."""
|
||||
from hermes_cli.web_server import _SESSION_TOKEN
|
||||
resp = self.client.post(
|
||||
"/api/env/reveal",
|
||||
json={"key": "NONEXISTENT_KEY_XYZ"},
|
||||
headers={"Authorization": f"Bearer {_SESSION_TOKEN}"},
|
||||
)
|
||||
assert resp.status_code == 404
|
||||
|
||||
def test_reveal_env_var_no_token(self, tmp_path):
|
||||
"""POST /api/env/reveal without token should return 401."""
|
||||
from hermes_cli.config import save_env_value
|
||||
save_env_value("TEST_REVEAL_NOAUTH", "secret-value")
|
||||
resp = self.client.post(
|
||||
"/api/env/reveal",
|
||||
json={"key": "TEST_REVEAL_NOAUTH"},
|
||||
)
|
||||
assert resp.status_code == 401
|
||||
|
||||
def test_reveal_env_var_bad_token(self, tmp_path):
|
||||
"""POST /api/env/reveal with wrong token should return 401."""
|
||||
from hermes_cli.config import save_env_value
|
||||
save_env_value("TEST_REVEAL_BADAUTH", "secret-value")
|
||||
resp = self.client.post(
|
||||
"/api/env/reveal",
|
||||
json={"key": "TEST_REVEAL_BADAUTH"},
|
||||
headers={"Authorization": "Bearer wrong-token-here"},
|
||||
)
|
||||
assert resp.status_code == 401
|
||||
|
||||
def test_session_token_endpoint(self):
|
||||
"""GET /api/auth/session-token should return a token."""
|
||||
from hermes_cli.web_server import _SESSION_TOKEN
|
||||
resp = self.client.get("/api/auth/session-token")
|
||||
assert resp.status_code == 200
|
||||
assert resp.json()["token"] == _SESSION_TOKEN
|
||||
|
||||
def test_path_traversal_blocked(self):
|
||||
"""Verify URL-encoded path traversal is blocked."""
|
||||
# %2e%2e = ..
|
||||
resp = self.client.get("/%2e%2e/%2e%2e/etc/passwd")
|
||||
# Should return 200 with index.html (SPA fallback), not the actual file
|
||||
assert resp.status_code in (200, 404)
|
||||
if resp.status_code == 200:
|
||||
# Should be the SPA fallback, not the system file
|
||||
assert "root:" not in resp.text
|
||||
|
||||
def test_path_traversal_dotdot_blocked(self):
|
||||
"""Direct .. path traversal via encoded sequences."""
|
||||
resp = self.client.get("/%2e%2e/hermes_cli/web_server.py")
|
||||
assert resp.status_code in (200, 404)
|
||||
if resp.status_code == 200:
|
||||
assert "FastAPI" not in resp.text # Should not serve the actual source
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# _build_schema_from_config tests
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestBuildSchemaFromConfig:
|
||||
def test_produces_expected_field_count(self):
|
||||
from hermes_cli.web_server import CONFIG_SCHEMA
|
||||
# DEFAULT_CONFIG has ~150+ leaf fields
|
||||
assert len(CONFIG_SCHEMA) > 100
|
||||
|
||||
def test_schema_entries_have_required_fields(self):
|
||||
from hermes_cli.web_server import CONFIG_SCHEMA
|
||||
for key, entry in list(CONFIG_SCHEMA.items())[:10]:
|
||||
assert "type" in entry, f"Missing type for {key}"
|
||||
assert "category" in entry, f"Missing category for {key}"
|
||||
|
||||
def test_overrides_applied(self):
|
||||
from hermes_cli.web_server import CONFIG_SCHEMA
|
||||
# terminal.backend should be a select with options
|
||||
if "terminal.backend" in CONFIG_SCHEMA:
|
||||
entry = CONFIG_SCHEMA["terminal.backend"]
|
||||
assert entry["type"] == "select"
|
||||
assert "options" in entry
|
||||
assert "local" in entry["options"]
|
||||
|
||||
def test_empty_prefix_produces_correct_keys(self):
|
||||
from hermes_cli.web_server import _build_schema_from_config
|
||||
test_config = {"model": "test", "nested": {"key": "val"}}
|
||||
schema = _build_schema_from_config(test_config)
|
||||
assert "model" in schema
|
||||
assert "nested.key" in schema
|
||||
|
||||
def test_top_level_scalars_get_general_category(self):
|
||||
"""Top-level scalar fields should be in 'general' category."""
|
||||
from hermes_cli.web_server import CONFIG_SCHEMA
|
||||
assert CONFIG_SCHEMA["model"]["category"] == "general"
|
||||
|
||||
def test_nested_keys_get_parent_category(self):
|
||||
"""Nested fields should use the top-level parent as their category."""
|
||||
from hermes_cli.web_server import CONFIG_SCHEMA
|
||||
if "agent.max_turns" in CONFIG_SCHEMA:
|
||||
assert CONFIG_SCHEMA["agent.max_turns"]["category"] == "agent"
|
||||
|
||||
def test_category_merge_applied(self):
|
||||
"""Small categories should be merged into larger ones."""
|
||||
from hermes_cli.web_server import CONFIG_SCHEMA
|
||||
categories = {e["category"] for e in CONFIG_SCHEMA.values()}
|
||||
# These should be merged away
|
||||
assert "privacy" not in categories # merged into security
|
||||
assert "context" not in categories # merged into agent
|
||||
|
||||
def test_no_single_field_categories(self):
|
||||
"""After merging, no category should have just 1 field."""
|
||||
from hermes_cli.web_server import CONFIG_SCHEMA
|
||||
from collections import Counter
|
||||
cats = Counter(e["category"] for e in CONFIG_SCHEMA.values())
|
||||
for cat, count in cats.items():
|
||||
assert count >= 2, f"Category '{cat}' has only {count} field(s) — should be merged"
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Config round-trip tests
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestConfigRoundTrip:
|
||||
"""Verify config survives GET → edit → PUT without data loss."""
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def _setup(self):
|
||||
try:
|
||||
from starlette.testclient import TestClient
|
||||
except ImportError:
|
||||
pytest.skip("fastapi/starlette not installed")
|
||||
from hermes_cli.web_server import app
|
||||
self.client = TestClient(app)
|
||||
|
||||
def test_get_config_no_internal_keys(self):
|
||||
"""GET /api/config should not expose _config_version or _model_meta."""
|
||||
config = self.client.get("/api/config").json()
|
||||
internal = [k for k in config if k.startswith("_")]
|
||||
assert not internal, f"Internal keys leaked to frontend: {internal}"
|
||||
|
||||
def test_get_config_model_is_string(self):
|
||||
"""GET /api/config should normalize model dict to a string."""
|
||||
config = self.client.get("/api/config").json()
|
||||
assert isinstance(config.get("model"), str), \
|
||||
f"model should be string, got {type(config.get('model'))}"
|
||||
|
||||
def test_round_trip_preserves_model_subkeys(self):
|
||||
"""Save and reload should not lose model.provider, model.base_url, etc."""
|
||||
from hermes_cli.config import load_config, save_config
|
||||
|
||||
# Set up a config with model as a dict (the common user config form)
|
||||
save_config({
|
||||
"model": {
|
||||
"default": "anthropic/claude-sonnet-4",
|
||||
"provider": "openrouter",
|
||||
"base_url": "https://openrouter.ai/api/v1",
|
||||
"api_mode": "openai",
|
||||
}
|
||||
})
|
||||
|
||||
before = load_config()
|
||||
assert isinstance(before.get("model"), dict)
|
||||
original_keys = set(before["model"].keys())
|
||||
|
||||
# GET → PUT unchanged
|
||||
web_config = self.client.get("/api/config").json()
|
||||
assert isinstance(web_config.get("model"), str), "GET should normalize model to string"
|
||||
|
||||
self.client.put("/api/config", json={"config": web_config})
|
||||
|
||||
after = load_config()
|
||||
assert isinstance(after.get("model"), dict), "model should still be a dict after save"
|
||||
assert set(after["model"].keys()) >= original_keys, \
|
||||
f"Lost model subkeys: {original_keys - set(after['model'].keys())}"
|
||||
|
||||
def test_edit_model_name_preserved(self):
|
||||
"""Changing the model string should update model.default on disk."""
|
||||
from hermes_cli.config import load_config
|
||||
|
||||
web_config = self.client.get("/api/config").json()
|
||||
original_model = web_config["model"]
|
||||
|
||||
# Change model
|
||||
web_config["model"] = "test/editing-model"
|
||||
self.client.put("/api/config", json={"config": web_config})
|
||||
|
||||
after = load_config()
|
||||
if isinstance(after.get("model"), dict):
|
||||
assert after["model"]["default"] == "test/editing-model"
|
||||
else:
|
||||
assert after["model"] == "test/editing-model"
|
||||
|
||||
# Restore
|
||||
web_config["model"] = original_model
|
||||
self.client.put("/api/config", json={"config": web_config})
|
||||
|
||||
def test_edit_nested_value(self):
|
||||
"""Editing a nested config value should persist correctly."""
|
||||
from hermes_cli.config import load_config
|
||||
|
||||
web_config = self.client.get("/api/config").json()
|
||||
original_turns = web_config.get("agent", {}).get("max_turns")
|
||||
|
||||
# Change max_turns
|
||||
if "agent" not in web_config:
|
||||
web_config["agent"] = {}
|
||||
web_config["agent"]["max_turns"] = 42
|
||||
|
||||
self.client.put("/api/config", json={"config": web_config})
|
||||
|
||||
after = load_config()
|
||||
assert after.get("agent", {}).get("max_turns") == 42
|
||||
|
||||
# Restore
|
||||
web_config["agent"]["max_turns"] = original_turns
|
||||
self.client.put("/api/config", json={"config": web_config})
|
||||
|
||||
def test_schema_types_match_config_values(self):
|
||||
"""Every schema field should have a matching-type value in the config."""
|
||||
config = self.client.get("/api/config").json()
|
||||
schema_resp = self.client.get("/api/config/schema").json()
|
||||
schema = schema_resp["fields"]
|
||||
|
||||
def get_nested(obj, path):
|
||||
parts = path.split(".")
|
||||
cur = obj
|
||||
for p in parts:
|
||||
if cur is None or not isinstance(cur, dict):
|
||||
return None
|
||||
cur = cur.get(p)
|
||||
return cur
|
||||
|
||||
mismatches = []
|
||||
for key, entry in schema.items():
|
||||
val = get_nested(config, key)
|
||||
if val is None:
|
||||
continue # not set in user config — fine
|
||||
expected = entry["type"]
|
||||
if expected in ("string", "select") and not isinstance(val, str):
|
||||
mismatches.append(f"{key}: expected str, got {type(val).__name__}")
|
||||
elif expected == "number" and not isinstance(val, (int, float)):
|
||||
mismatches.append(f"{key}: expected number, got {type(val).__name__}")
|
||||
elif expected == "boolean" and not isinstance(val, bool):
|
||||
mismatches.append(f"{key}: expected bool, got {type(val).__name__}")
|
||||
elif expected == "list" and not isinstance(val, list):
|
||||
mismatches.append(f"{key}: expected list, got {type(val).__name__}")
|
||||
assert not mismatches, f"Type mismatches:\n" + "\n".join(mismatches)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# New feature endpoint tests
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestNewEndpoints:
|
||||
"""Tests for session detail, logs, cron, skills, tools, raw config, analytics."""
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def _setup(self):
|
||||
try:
|
||||
from starlette.testclient import TestClient
|
||||
except ImportError:
|
||||
pytest.skip("fastapi/starlette not installed")
|
||||
from hermes_cli.web_server import app
|
||||
self.client = TestClient(app)
|
||||
|
||||
def test_get_logs_default(self):
|
||||
resp = self.client.get("/api/logs")
|
||||
assert resp.status_code == 200
|
||||
data = resp.json()
|
||||
assert "file" in data
|
||||
assert "lines" in data
|
||||
assert isinstance(data["lines"], list)
|
||||
|
||||
def test_get_logs_invalid_file(self):
|
||||
resp = self.client.get("/api/logs?file=nonexistent")
|
||||
assert resp.status_code == 400
|
||||
|
||||
def test_cron_list(self):
|
||||
resp = self.client.get("/api/cron/jobs")
|
||||
assert resp.status_code == 200
|
||||
assert isinstance(resp.json(), list)
|
||||
|
||||
def test_cron_job_not_found(self):
|
||||
resp = self.client.get("/api/cron/jobs/nonexistent-id")
|
||||
assert resp.status_code == 404
|
||||
|
||||
def test_skills_list(self):
|
||||
resp = self.client.get("/api/skills")
|
||||
assert resp.status_code == 200
|
||||
skills = resp.json()
|
||||
assert isinstance(skills, list)
|
||||
if skills:
|
||||
assert "name" in skills[0]
|
||||
assert "enabled" in skills[0]
|
||||
|
||||
def test_skills_list_includes_disabled_skills(self, monkeypatch):
|
||||
import tools.skills_tool as skills_tool
|
||||
import hermes_cli.skills_config as skills_config
|
||||
import hermes_cli.web_server as web_server
|
||||
|
||||
def _fake_find_all_skills(*, skip_disabled=False):
|
||||
if skip_disabled:
|
||||
return [
|
||||
{"name": "active-skill", "description": "active", "category": "demo"},
|
||||
{"name": "disabled-skill", "description": "disabled", "category": "demo"},
|
||||
]
|
||||
return [
|
||||
{"name": "active-skill", "description": "active", "category": "demo"},
|
||||
]
|
||||
|
||||
monkeypatch.setattr(skills_tool, "_find_all_skills", _fake_find_all_skills)
|
||||
monkeypatch.setattr(skills_config, "get_disabled_skills", lambda config: {"disabled-skill"})
|
||||
monkeypatch.setattr(web_server, "load_config", lambda: {"skills": {"disabled": ["disabled-skill"]}})
|
||||
|
||||
resp = self.client.get("/api/skills")
|
||||
|
||||
assert resp.status_code == 200
|
||||
assert resp.json() == [
|
||||
{
|
||||
"name": "active-skill",
|
||||
"description": "active",
|
||||
"category": "demo",
|
||||
"enabled": True,
|
||||
},
|
||||
{
|
||||
"name": "disabled-skill",
|
||||
"description": "disabled",
|
||||
"category": "demo",
|
||||
"enabled": False,
|
||||
},
|
||||
]
|
||||
|
||||
def test_toolsets_list(self):
|
||||
resp = self.client.get("/api/tools/toolsets")
|
||||
assert resp.status_code == 200
|
||||
toolsets = resp.json()
|
||||
assert isinstance(toolsets, list)
|
||||
if toolsets:
|
||||
assert "name" in toolsets[0]
|
||||
assert "label" in toolsets[0]
|
||||
assert "enabled" in toolsets[0]
|
||||
|
||||
def test_toolsets_list_matches_cli_enabled_state(self, monkeypatch):
|
||||
import hermes_cli.tools_config as tools_config
|
||||
import toolsets as toolsets_module
|
||||
import hermes_cli.web_server as web_server
|
||||
|
||||
monkeypatch.setattr(
|
||||
tools_config,
|
||||
"_get_effective_configurable_toolsets",
|
||||
lambda: [
|
||||
("web", "🔍 Web Search & Scraping", "web_search, web_extract"),
|
||||
("skills", "📚 Skills", "list, view, manage"),
|
||||
("memory", "💾 Memory", "persistent memory across sessions"),
|
||||
],
|
||||
)
|
||||
monkeypatch.setattr(
|
||||
tools_config,
|
||||
"_get_platform_tools",
|
||||
lambda config, platform, include_default_mcp_servers=False: {"web", "skills"},
|
||||
)
|
||||
monkeypatch.setattr(
|
||||
tools_config,
|
||||
"_toolset_has_keys",
|
||||
lambda ts_key, config=None: ts_key != "web",
|
||||
)
|
||||
monkeypatch.setattr(
|
||||
toolsets_module,
|
||||
"resolve_toolset",
|
||||
lambda name: {
|
||||
"web": ["web_search", "web_extract"],
|
||||
"skills": ["skills_list", "skill_view"],
|
||||
"memory": ["memory_read"],
|
||||
}[name],
|
||||
)
|
||||
monkeypatch.setattr(web_server, "load_config", lambda: {"platform_toolsets": {"cli": ["web", "skills"]}})
|
||||
|
||||
resp = self.client.get("/api/tools/toolsets")
|
||||
|
||||
assert resp.status_code == 200
|
||||
assert resp.json() == [
|
||||
{
|
||||
"name": "web",
|
||||
"label": "🔍 Web Search & Scraping",
|
||||
"description": "web_search, web_extract",
|
||||
"enabled": True,
|
||||
"available": True,
|
||||
"configured": False,
|
||||
"tools": ["web_extract", "web_search"],
|
||||
},
|
||||
{
|
||||
"name": "skills",
|
||||
"label": "📚 Skills",
|
||||
"description": "list, view, manage",
|
||||
"enabled": True,
|
||||
"available": True,
|
||||
"configured": True,
|
||||
"tools": ["skill_view", "skills_list"],
|
||||
},
|
||||
{
|
||||
"name": "memory",
|
||||
"label": "💾 Memory",
|
||||
"description": "persistent memory across sessions",
|
||||
"enabled": False,
|
||||
"available": False,
|
||||
"configured": True,
|
||||
"tools": ["memory_read"],
|
||||
},
|
||||
]
|
||||
|
||||
def test_config_raw_get(self):
|
||||
resp = self.client.get("/api/config/raw")
|
||||
assert resp.status_code == 200
|
||||
assert "yaml" in resp.json()
|
||||
|
||||
def test_config_raw_put_valid(self):
|
||||
resp = self.client.put(
|
||||
"/api/config/raw",
|
||||
json={"yaml_text": "model: test\ntoolsets:\n - all\n"},
|
||||
)
|
||||
assert resp.status_code == 200
|
||||
assert resp.json()["ok"] is True
|
||||
|
||||
def test_config_raw_put_invalid(self):
|
||||
resp = self.client.put(
|
||||
"/api/config/raw",
|
||||
json={"yaml_text": "- this is a list not a dict"},
|
||||
)
|
||||
assert resp.status_code == 400
|
||||
|
||||
def test_analytics_usage(self):
|
||||
resp = self.client.get("/api/analytics/usage?days=7")
|
||||
assert resp.status_code == 200
|
||||
data = resp.json()
|
||||
assert "daily" in data
|
||||
assert "by_model" in data
|
||||
assert "totals" in data
|
||||
assert isinstance(data["daily"], list)
|
||||
assert "total_sessions" in data["totals"]
|
||||
|
||||
def test_session_token_endpoint(self):
|
||||
from hermes_cli.web_server import _SESSION_TOKEN
|
||||
resp = self.client.get("/api/auth/session-token")
|
||||
assert resp.status_code == 200
|
||||
assert resp.json()["token"] == _SESSION_TOKEN
|
||||
Loading…
Add table
Add a link
Reference in a new issue