mirror of
https://github.com/NousResearch/hermes-agent.git
synced 2026-04-30 01:41:43 +00:00
Merge remote-tracking branch 'origin/main' into hermes/hermes-1f7bfa9e
# Conflicts: # cron/scheduler.py # tools/send_message_tool.py
This commit is contained in:
commit
e7fc6450fc
99 changed files with 9609 additions and 1075 deletions
399
hermes_cli/backup.py
Normal file
399
hermes_cli/backup.py
Normal file
|
|
@ -0,0 +1,399 @@
|
|||
"""
|
||||
Backup and import commands for hermes CLI.
|
||||
|
||||
`hermes backup` creates a zip archive of the entire ~/.hermes/ directory
|
||||
(excluding the hermes-agent repo and transient files).
|
||||
|
||||
`hermes import` restores from a backup zip, overlaying onto the current
|
||||
HERMES_HOME root.
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import zipfile
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
|
||||
from hermes_constants import get_default_hermes_root, display_hermes_home
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Exclusion rules
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
# Directory names to skip entirely (matched against each path component)
|
||||
_EXCLUDED_DIRS = {
|
||||
"hermes-agent", # the codebase repo — re-clone instead
|
||||
"__pycache__", # bytecode caches — regenerated on import
|
||||
".git", # nested git dirs (profiles shouldn't have these, but safety)
|
||||
"node_modules", # js deps if website/ somehow leaks in
|
||||
}
|
||||
|
||||
# File-name suffixes to skip
|
||||
_EXCLUDED_SUFFIXES = (
|
||||
".pyc",
|
||||
".pyo",
|
||||
)
|
||||
|
||||
# File names to skip (runtime state that's meaningless on another machine)
|
||||
_EXCLUDED_NAMES = {
|
||||
"gateway.pid",
|
||||
"cron.pid",
|
||||
}
|
||||
|
||||
|
||||
def _should_exclude(rel_path: Path) -> bool:
|
||||
"""Return True if *rel_path* (relative to hermes root) should be skipped."""
|
||||
parts = rel_path.parts
|
||||
|
||||
# Any path component matches an excluded dir name
|
||||
for part in parts:
|
||||
if part in _EXCLUDED_DIRS:
|
||||
return True
|
||||
|
||||
name = rel_path.name
|
||||
|
||||
if name in _EXCLUDED_NAMES:
|
||||
return True
|
||||
|
||||
if name.endswith(_EXCLUDED_SUFFIXES):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Backup
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def _format_size(nbytes: int) -> str:
|
||||
"""Human-readable file size."""
|
||||
for unit in ("B", "KB", "MB", "GB"):
|
||||
if nbytes < 1024:
|
||||
return f"{nbytes:.1f} {unit}" if unit != "B" else f"{nbytes} {unit}"
|
||||
nbytes /= 1024
|
||||
return f"{nbytes:.1f} TB"
|
||||
|
||||
|
||||
def run_backup(args) -> None:
|
||||
"""Create a zip backup of the Hermes home directory."""
|
||||
hermes_root = get_default_hermes_root()
|
||||
|
||||
if not hermes_root.is_dir():
|
||||
print(f"Error: Hermes home directory not found at {hermes_root}")
|
||||
sys.exit(1)
|
||||
|
||||
# Determine output path
|
||||
if args.output:
|
||||
out_path = Path(args.output).expanduser().resolve()
|
||||
# If user gave a directory, put the zip inside it
|
||||
if out_path.is_dir():
|
||||
stamp = datetime.now().strftime("%Y-%m-%d-%H%M%S")
|
||||
out_path = out_path / f"hermes-backup-{stamp}.zip"
|
||||
else:
|
||||
stamp = datetime.now().strftime("%Y-%m-%d-%H%M%S")
|
||||
out_path = Path.home() / f"hermes-backup-{stamp}.zip"
|
||||
|
||||
# Ensure the suffix is .zip
|
||||
if out_path.suffix.lower() != ".zip":
|
||||
out_path = out_path.with_suffix(out_path.suffix + ".zip")
|
||||
|
||||
# Ensure parent directory exists
|
||||
out_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Collect files
|
||||
print(f"Scanning {display_hermes_home()} ...")
|
||||
files_to_add: list[tuple[Path, Path]] = [] # (absolute, relative)
|
||||
skipped_dirs = set()
|
||||
|
||||
for dirpath, dirnames, filenames in os.walk(hermes_root, followlinks=False):
|
||||
dp = Path(dirpath)
|
||||
rel_dir = dp.relative_to(hermes_root)
|
||||
|
||||
# Prune excluded directories in-place so os.walk doesn't descend
|
||||
orig_dirnames = dirnames[:]
|
||||
dirnames[:] = [
|
||||
d for d in dirnames
|
||||
if d not in _EXCLUDED_DIRS
|
||||
]
|
||||
for removed in set(orig_dirnames) - set(dirnames):
|
||||
skipped_dirs.add(str(rel_dir / removed))
|
||||
|
||||
for fname in filenames:
|
||||
fpath = dp / fname
|
||||
rel = fpath.relative_to(hermes_root)
|
||||
|
||||
if _should_exclude(rel):
|
||||
continue
|
||||
|
||||
# Skip the output zip itself if it happens to be inside hermes root
|
||||
try:
|
||||
if fpath.resolve() == out_path.resolve():
|
||||
continue
|
||||
except (OSError, ValueError):
|
||||
pass
|
||||
|
||||
files_to_add.append((fpath, rel))
|
||||
|
||||
if not files_to_add:
|
||||
print("No files to back up.")
|
||||
return
|
||||
|
||||
# Create the zip
|
||||
file_count = len(files_to_add)
|
||||
print(f"Backing up {file_count} files ...")
|
||||
|
||||
total_bytes = 0
|
||||
errors = []
|
||||
t0 = time.monotonic()
|
||||
|
||||
with zipfile.ZipFile(out_path, "w", zipfile.ZIP_DEFLATED, compresslevel=6) as zf:
|
||||
for i, (abs_path, rel_path) in enumerate(files_to_add, 1):
|
||||
try:
|
||||
zf.write(abs_path, arcname=str(rel_path))
|
||||
total_bytes += abs_path.stat().st_size
|
||||
except (PermissionError, OSError) as exc:
|
||||
errors.append(f" {rel_path}: {exc}")
|
||||
continue
|
||||
|
||||
# Progress every 500 files
|
||||
if i % 500 == 0:
|
||||
print(f" {i}/{file_count} files ...")
|
||||
|
||||
elapsed = time.monotonic() - t0
|
||||
zip_size = out_path.stat().st_size
|
||||
|
||||
# Summary
|
||||
print()
|
||||
print(f"Backup complete: {out_path}")
|
||||
print(f" Files: {file_count}")
|
||||
print(f" Original: {_format_size(total_bytes)}")
|
||||
print(f" Compressed: {_format_size(zip_size)}")
|
||||
print(f" Time: {elapsed:.1f}s")
|
||||
|
||||
if skipped_dirs:
|
||||
print(f"\n Excluded directories:")
|
||||
for d in sorted(skipped_dirs):
|
||||
print(f" {d}/")
|
||||
|
||||
if errors:
|
||||
print(f"\n Warnings ({len(errors)} files skipped):")
|
||||
for e in errors[:10]:
|
||||
print(e)
|
||||
if len(errors) > 10:
|
||||
print(f" ... and {len(errors) - 10} more")
|
||||
|
||||
print(f"\nRestore with: hermes import {out_path.name}")
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Import
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def _validate_backup_zip(zf: zipfile.ZipFile) -> tuple[bool, str]:
|
||||
"""Check that a zip looks like a Hermes backup.
|
||||
|
||||
Returns (ok, reason).
|
||||
"""
|
||||
names = zf.namelist()
|
||||
if not names:
|
||||
return False, "zip archive is empty"
|
||||
|
||||
# Look for telltale files that a hermes home would have
|
||||
markers = {"config.yaml", ".env", "hermes_state.db", "memory_store.db"}
|
||||
found = set()
|
||||
for n in names:
|
||||
# Could be at the root or one level deep (if someone zipped the directory)
|
||||
basename = Path(n).name
|
||||
if basename in markers:
|
||||
found.add(basename)
|
||||
|
||||
if not found:
|
||||
return False, (
|
||||
"zip does not appear to be a Hermes backup "
|
||||
"(no config.yaml, .env, or state databases found)"
|
||||
)
|
||||
|
||||
return True, ""
|
||||
|
||||
|
||||
def _detect_prefix(zf: zipfile.ZipFile) -> str:
|
||||
"""Detect if the zip has a common directory prefix wrapping all entries.
|
||||
|
||||
Some tools zip as `.hermes/config.yaml` instead of `config.yaml`.
|
||||
Returns the prefix to strip (empty string if none).
|
||||
"""
|
||||
names = [n for n in zf.namelist() if not n.endswith("/")]
|
||||
if not names:
|
||||
return ""
|
||||
|
||||
# Find common prefix
|
||||
parts_list = [Path(n).parts for n in names]
|
||||
|
||||
# Check if all entries share a common first directory
|
||||
first_parts = {p[0] for p in parts_list if len(p) > 1}
|
||||
if len(first_parts) == 1:
|
||||
prefix = first_parts.pop()
|
||||
# Only strip if it looks like a hermes dir name
|
||||
if prefix in (".hermes", "hermes"):
|
||||
return prefix + "/"
|
||||
|
||||
return ""
|
||||
|
||||
|
||||
def run_import(args) -> None:
|
||||
"""Restore a Hermes backup from a zip file."""
|
||||
zip_path = Path(args.zipfile).expanduser().resolve()
|
||||
|
||||
if not zip_path.is_file():
|
||||
print(f"Error: File not found: {zip_path}")
|
||||
sys.exit(1)
|
||||
|
||||
if not zipfile.is_zipfile(zip_path):
|
||||
print(f"Error: Not a valid zip file: {zip_path}")
|
||||
sys.exit(1)
|
||||
|
||||
hermes_root = get_default_hermes_root()
|
||||
|
||||
with zipfile.ZipFile(zip_path, "r") as zf:
|
||||
# Validate
|
||||
ok, reason = _validate_backup_zip(zf)
|
||||
if not ok:
|
||||
print(f"Error: {reason}")
|
||||
sys.exit(1)
|
||||
|
||||
prefix = _detect_prefix(zf)
|
||||
members = [n for n in zf.namelist() if not n.endswith("/")]
|
||||
file_count = len(members)
|
||||
|
||||
print(f"Backup contains {file_count} files")
|
||||
print(f"Target: {display_hermes_home()}")
|
||||
|
||||
if prefix:
|
||||
print(f"Detected archive prefix: {prefix!r} (will be stripped)")
|
||||
|
||||
# Check for existing installation
|
||||
has_config = (hermes_root / "config.yaml").exists()
|
||||
has_env = (hermes_root / ".env").exists()
|
||||
|
||||
if (has_config or has_env) and not args.force:
|
||||
print()
|
||||
print("Warning: Target directory already has Hermes configuration.")
|
||||
print("Importing will overwrite existing files with backup contents.")
|
||||
print()
|
||||
try:
|
||||
answer = input("Continue? [y/N] ").strip().lower()
|
||||
except (EOFError, KeyboardInterrupt):
|
||||
print("\nAborted.")
|
||||
sys.exit(1)
|
||||
if answer not in ("y", "yes"):
|
||||
print("Aborted.")
|
||||
return
|
||||
|
||||
# Extract
|
||||
print(f"\nImporting {file_count} files ...")
|
||||
hermes_root.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
errors = []
|
||||
restored = 0
|
||||
t0 = time.monotonic()
|
||||
|
||||
for member in members:
|
||||
# Strip prefix if detected
|
||||
if prefix and member.startswith(prefix):
|
||||
rel = member[len(prefix):]
|
||||
else:
|
||||
rel = member
|
||||
|
||||
if not rel:
|
||||
continue
|
||||
|
||||
target = hermes_root / rel
|
||||
|
||||
# Security: reject absolute paths and traversals
|
||||
try:
|
||||
target.resolve().relative_to(hermes_root.resolve())
|
||||
except ValueError:
|
||||
errors.append(f" {rel}: path traversal blocked")
|
||||
continue
|
||||
|
||||
try:
|
||||
target.parent.mkdir(parents=True, exist_ok=True)
|
||||
with zf.open(member) as src, open(target, "wb") as dst:
|
||||
dst.write(src.read())
|
||||
restored += 1
|
||||
except (PermissionError, OSError) as exc:
|
||||
errors.append(f" {rel}: {exc}")
|
||||
|
||||
if restored % 500 == 0:
|
||||
print(f" {restored}/{file_count} files ...")
|
||||
|
||||
elapsed = time.monotonic() - t0
|
||||
|
||||
# Summary
|
||||
print()
|
||||
print(f"Import complete: {restored} files restored in {elapsed:.1f}s")
|
||||
print(f" Target: {display_hermes_home()}")
|
||||
|
||||
if errors:
|
||||
print(f"\n Warnings ({len(errors)} files skipped):")
|
||||
for e in errors[:10]:
|
||||
print(e)
|
||||
if len(errors) > 10:
|
||||
print(f" ... and {len(errors) - 10} more")
|
||||
|
||||
# Post-import: restore profile wrapper scripts
|
||||
profiles_dir = hermes_root / "profiles"
|
||||
restored_profiles = []
|
||||
if profiles_dir.is_dir():
|
||||
try:
|
||||
from hermes_cli.profiles import (
|
||||
create_wrapper_script, check_alias_collision,
|
||||
_is_wrapper_dir_in_path, _get_wrapper_dir,
|
||||
)
|
||||
for entry in sorted(profiles_dir.iterdir()):
|
||||
if not entry.is_dir():
|
||||
continue
|
||||
profile_name = entry.name
|
||||
# Only create wrappers for directories with config
|
||||
if not (entry / "config.yaml").exists() and not (entry / ".env").exists():
|
||||
continue
|
||||
collision = check_alias_collision(profile_name)
|
||||
if collision:
|
||||
print(f" Skipped alias '{profile_name}': {collision}")
|
||||
restored_profiles.append((profile_name, False))
|
||||
else:
|
||||
wrapper = create_wrapper_script(profile_name)
|
||||
restored_profiles.append((profile_name, wrapper is not None))
|
||||
|
||||
if restored_profiles:
|
||||
created = [n for n, ok in restored_profiles if ok]
|
||||
skipped = [n for n, ok in restored_profiles if not ok]
|
||||
if created:
|
||||
print(f"\n Profile aliases restored: {', '.join(created)}")
|
||||
if skipped:
|
||||
print(f" Profile aliases skipped: {', '.join(skipped)}")
|
||||
if not _is_wrapper_dir_in_path():
|
||||
print(f"\n Note: {_get_wrapper_dir()} is not in your PATH.")
|
||||
print(' Add to your shell config (~/.bashrc or ~/.zshrc):')
|
||||
print(' export PATH="$HOME/.local/bin:$PATH"')
|
||||
except ImportError:
|
||||
# hermes_cli.profiles might not be available (fresh install)
|
||||
if any(profiles_dir.iterdir()):
|
||||
print(f"\n Profiles detected but aliases could not be created.")
|
||||
print(f" Run: hermes profile list (after installing hermes)")
|
||||
|
||||
# Guidance
|
||||
print()
|
||||
if not (hermes_root / "hermes-agent").is_dir():
|
||||
print("Note: The hermes-agent codebase was not included in the backup.")
|
||||
print(" If this is a fresh install, run: hermes update")
|
||||
|
||||
if restored_profiles:
|
||||
gw_profiles = [n for n, _ in restored_profiles]
|
||||
print("\nTo re-enable gateway services for profiles:")
|
||||
for pname in gw_profiles:
|
||||
print(f" hermes -p {pname} gateway install")
|
||||
|
||||
print("Done. Your Hermes configuration has been restored.")
|
||||
|
|
@ -52,6 +52,41 @@ _OPENCLAW_SCRIPT_INSTALLED = (
|
|||
# Known OpenClaw directory names (current + legacy)
|
||||
_OPENCLAW_DIR_NAMES = (".openclaw", ".clawdbot", ".moldbot")
|
||||
|
||||
def _warn_if_gateway_running(auto_yes: bool) -> None:
|
||||
"""Check if a Hermes gateway is running with connected platforms.
|
||||
|
||||
Migrating bot tokens while the gateway is polling will cause conflicts
|
||||
(e.g. Telegram 409 "terminated by other getUpdates request"). Warn the
|
||||
user and let them decide whether to continue.
|
||||
"""
|
||||
from gateway.status import get_running_pid, read_runtime_status
|
||||
|
||||
if not get_running_pid():
|
||||
return
|
||||
|
||||
data = read_runtime_status() or {}
|
||||
platforms = data.get("platforms") or {}
|
||||
connected = [name for name, info in platforms.items()
|
||||
if isinstance(info, dict) and info.get("state") == "connected"]
|
||||
if not connected:
|
||||
return
|
||||
|
||||
print()
|
||||
print_error(
|
||||
"Hermes gateway is running with active connections: "
|
||||
+ ", ".join(connected)
|
||||
)
|
||||
print_info(
|
||||
"Migrating bot tokens while the gateway is active will cause "
|
||||
"conflicts (Telegram, Discord, and Slack only allow one active "
|
||||
"session per token)."
|
||||
)
|
||||
print_info("Recommendation: stop the gateway first with 'hermes stop'.")
|
||||
print()
|
||||
if not auto_yes and not prompt_yes_no("Continue anyway?", default=False):
|
||||
print_info("Migration cancelled. Stop the gateway and try again.")
|
||||
sys.exit(0)
|
||||
|
||||
# State files commonly found in OpenClaw workspace directories that cause
|
||||
# confusion after migration (the agent discovers them and writes to them)
|
||||
_WORKSPACE_STATE_GLOBS = (
|
||||
|
|
@ -252,6 +287,10 @@ def _cmd_migrate(args):
|
|||
print_info(f"Workspace: {workspace_target}")
|
||||
print()
|
||||
|
||||
# Check if a gateway is running with connected platforms — migrating tokens
|
||||
# while the gateway is active will cause conflicts (e.g. Telegram 409).
|
||||
_warn_if_gateway_running(auto_yes)
|
||||
|
||||
# Ensure config.yaml exists before migration tries to read it
|
||||
config_path = get_config_path()
|
||||
if not config_path.exists():
|
||||
|
|
|
|||
|
|
@ -38,6 +38,9 @@ _EXTRA_ENV_KEYS = frozenset({
|
|||
"DINGTALK_CLIENT_ID", "DINGTALK_CLIENT_SECRET",
|
||||
"FEISHU_APP_ID", "FEISHU_APP_SECRET", "FEISHU_ENCRYPT_KEY", "FEISHU_VERIFICATION_TOKEN",
|
||||
"WECOM_BOT_ID", "WECOM_SECRET",
|
||||
"WECOM_CALLBACK_CORP_ID", "WECOM_CALLBACK_CORP_SECRET", "WECOM_CALLBACK_AGENT_ID",
|
||||
"WECOM_CALLBACK_TOKEN", "WECOM_CALLBACK_ENCODING_AES_KEY",
|
||||
"WECOM_CALLBACK_HOST", "WECOM_CALLBACK_PORT",
|
||||
"WEIXIN_ACCOUNT_ID", "WEIXIN_TOKEN", "WEIXIN_BASE_URL", "WEIXIN_CDN_BASE_URL",
|
||||
"WEIXIN_HOME_CHANNEL", "WEIXIN_HOME_CHANNEL_NAME", "WEIXIN_DM_POLICY", "WEIXIN_GROUP_POLICY",
|
||||
"WEIXIN_ALLOWED_USERS", "WEIXIN_GROUP_ALLOWED_USERS", "WEIXIN_ALLOW_ALL_USERS",
|
||||
|
|
@ -142,6 +145,73 @@ def managed_error(action: str = "modify configuration"):
|
|||
print(format_managed_message(action), file=sys.stderr)
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Container-aware CLI (NixOS container mode)
|
||||
# =============================================================================
|
||||
|
||||
def _is_inside_container() -> bool:
|
||||
"""Detect if we're already running inside a Docker/Podman container."""
|
||||
# Standard Docker/Podman indicators
|
||||
if os.path.exists("/.dockerenv"):
|
||||
return True
|
||||
# Podman uses /run/.containerenv
|
||||
if os.path.exists("/run/.containerenv"):
|
||||
return True
|
||||
# Check cgroup for container runtime evidence (works for both Docker & Podman)
|
||||
try:
|
||||
with open("/proc/1/cgroup", "r") as f:
|
||||
cgroup = f.read()
|
||||
if "docker" in cgroup or "podman" in cgroup or "/lxc/" in cgroup:
|
||||
return True
|
||||
except OSError:
|
||||
pass
|
||||
return False
|
||||
|
||||
|
||||
def get_container_exec_info() -> Optional[dict]:
|
||||
"""Read container mode metadata from HERMES_HOME/.container-mode.
|
||||
|
||||
Returns a dict with keys: backend, container_name, exec_user, hermes_bin
|
||||
or None if container mode is not active, we're already inside the
|
||||
container, or HERMES_DEV=1 is set.
|
||||
|
||||
The .container-mode file is written by the NixOS activation script when
|
||||
container.enable = true. It tells the host CLI to exec into the container
|
||||
instead of running locally.
|
||||
"""
|
||||
if os.environ.get("HERMES_DEV") == "1":
|
||||
return None
|
||||
|
||||
if _is_inside_container():
|
||||
return None
|
||||
|
||||
container_mode_file = get_hermes_home() / ".container-mode"
|
||||
|
||||
try:
|
||||
info = {}
|
||||
with open(container_mode_file, "r") as f:
|
||||
for line in f:
|
||||
line = line.strip()
|
||||
if "=" in line and not line.startswith("#"):
|
||||
key, _, value = line.partition("=")
|
||||
info[key.strip()] = value.strip()
|
||||
except FileNotFoundError:
|
||||
return None
|
||||
# All other exceptions (PermissionError, malformed data, etc.) propagate
|
||||
|
||||
backend = info.get("backend", "docker")
|
||||
container_name = info.get("container_name", "hermes-agent")
|
||||
exec_user = info.get("exec_user", "hermes")
|
||||
hermes_bin = info.get("hermes_bin", "/data/current-package/bin/hermes")
|
||||
|
||||
return {
|
||||
"backend": backend,
|
||||
"container_name": container_name,
|
||||
"exec_user": exec_user,
|
||||
"hermes_bin": hermes_bin,
|
||||
}
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Config paths
|
||||
# =============================================================================
|
||||
|
|
@ -447,9 +517,11 @@ DEFAULT_CONFIG = {
|
|||
"inline_diffs": True, # Show inline diff previews for write actions (write_file, patch, skill_manage)
|
||||
"show_cost": False, # Show $ cost in the status bar (off by default)
|
||||
"skin": "default",
|
||||
"interim_assistant_messages": True, # Gateway: show natural mid-turn assistant status messages
|
||||
"tool_progress_command": False, # Enable /verbose command in messaging gateway
|
||||
"tool_progress_overrides": {}, # Per-platform overrides: {"signal": "off", "telegram": "all"}
|
||||
"tool_progress_overrides": {}, # DEPRECATED — use display.platforms instead
|
||||
"tool_preview_length": 0, # Max chars for tool call previews (0 = no limit, show full paths/commands)
|
||||
"platforms": {}, # Per-platform display overrides: {"telegram": {"tool_progress": "all"}, "slack": {"tool_progress": "off"}}
|
||||
},
|
||||
|
||||
# Privacy settings
|
||||
|
|
@ -637,7 +709,7 @@ DEFAULT_CONFIG = {
|
|||
},
|
||||
|
||||
# Config schema version - bump this when adding new required fields
|
||||
"_config_version": 14,
|
||||
"_config_version": 16,
|
||||
}
|
||||
|
||||
# =============================================================================
|
||||
|
|
@ -1901,6 +1973,44 @@ def migrate_config(interactive: bool = True, quiet: bool = False) -> Dict[str, A
|
|||
if not quiet:
|
||||
print(f" ✓ Migrated legacy stt.model to provider-specific config")
|
||||
|
||||
# ── Version 14 → 15: add explicit gateway interim-message gate ──
|
||||
if current_ver < 15:
|
||||
config = read_raw_config()
|
||||
display = config.get("display", {})
|
||||
if not isinstance(display, dict):
|
||||
display = {}
|
||||
if "interim_assistant_messages" not in display:
|
||||
display["interim_assistant_messages"] = True
|
||||
config["display"] = display
|
||||
results["config_added"].append("display.interim_assistant_messages=true (default)")
|
||||
save_config(config)
|
||||
if not quiet:
|
||||
print(" ✓ Added display.interim_assistant_messages=true")
|
||||
|
||||
# ── Version 15 → 16: migrate tool_progress_overrides into display.platforms ──
|
||||
if current_ver < 16:
|
||||
config = read_raw_config()
|
||||
display = config.get("display", {})
|
||||
if not isinstance(display, dict):
|
||||
display = {}
|
||||
old_overrides = display.get("tool_progress_overrides")
|
||||
if isinstance(old_overrides, dict) and old_overrides:
|
||||
platforms = display.get("platforms", {})
|
||||
if not isinstance(platforms, dict):
|
||||
platforms = {}
|
||||
for plat, mode in old_overrides.items():
|
||||
if plat not in platforms:
|
||||
platforms[plat] = {}
|
||||
if "tool_progress" not in platforms[plat]:
|
||||
platforms[plat]["tool_progress"] = mode
|
||||
display["platforms"] = platforms
|
||||
config["display"] = display
|
||||
save_config(config)
|
||||
if not quiet:
|
||||
migrated = ", ".join(f"{p}={m}" for p, m in old_overrides.items())
|
||||
print(f" ✓ Migrated tool_progress_overrides → display.platforms: {migrated}")
|
||||
results["config_added"].append("display.platforms (migrated from tool_progress_overrides)")
|
||||
|
||||
if current_ver < latest_ver and not quiet:
|
||||
print(f"Config version: {current_ver} → {latest_ver}")
|
||||
|
||||
|
|
|
|||
|
|
@ -287,6 +287,129 @@ def _radio_numbered_fallback(
|
|||
return cancel_returns
|
||||
|
||||
|
||||
def curses_single_select(
|
||||
title: str,
|
||||
items: List[str],
|
||||
default_index: int = 0,
|
||||
*,
|
||||
cancel_label: str = "Cancel",
|
||||
) -> int | None:
|
||||
"""Curses single-select menu. Returns selected index or None on cancel.
|
||||
|
||||
Works inside prompt_toolkit because curses.wrapper() restores the terminal
|
||||
safely, unlike simple_term_menu which conflicts with /dev/tty.
|
||||
"""
|
||||
if not sys.stdin.isatty():
|
||||
return None
|
||||
|
||||
try:
|
||||
import curses
|
||||
result_holder: list = [None]
|
||||
|
||||
all_items = list(items) + [cancel_label]
|
||||
cancel_idx = len(items)
|
||||
|
||||
def _draw(stdscr):
|
||||
curses.curs_set(0)
|
||||
if curses.has_colors():
|
||||
curses.start_color()
|
||||
curses.use_default_colors()
|
||||
curses.init_pair(1, curses.COLOR_GREEN, -1)
|
||||
curses.init_pair(2, curses.COLOR_YELLOW, -1)
|
||||
cursor = min(default_index, len(all_items) - 1)
|
||||
scroll_offset = 0
|
||||
|
||||
while True:
|
||||
stdscr.clear()
|
||||
max_y, max_x = stdscr.getmaxyx()
|
||||
|
||||
try:
|
||||
hattr = curses.A_BOLD
|
||||
if curses.has_colors():
|
||||
hattr |= curses.color_pair(2)
|
||||
stdscr.addnstr(0, 0, title, max_x - 1, hattr)
|
||||
stdscr.addnstr(
|
||||
1, 0,
|
||||
" ↑↓ navigate ENTER confirm ESC/q cancel",
|
||||
max_x - 1, curses.A_DIM,
|
||||
)
|
||||
except curses.error:
|
||||
pass
|
||||
|
||||
visible_rows = max_y - 3
|
||||
if cursor < scroll_offset:
|
||||
scroll_offset = cursor
|
||||
elif cursor >= scroll_offset + visible_rows:
|
||||
scroll_offset = cursor - visible_rows + 1
|
||||
|
||||
for draw_i, i in enumerate(
|
||||
range(scroll_offset, min(len(all_items), scroll_offset + visible_rows))
|
||||
):
|
||||
y = draw_i + 3
|
||||
if y >= max_y - 1:
|
||||
break
|
||||
arrow = "→" if i == cursor else " "
|
||||
line = f" {arrow} {all_items[i]}"
|
||||
attr = curses.A_NORMAL
|
||||
if i == cursor:
|
||||
attr = curses.A_BOLD
|
||||
if curses.has_colors():
|
||||
attr |= curses.color_pair(1)
|
||||
try:
|
||||
stdscr.addnstr(y, 0, line, max_x - 1, attr)
|
||||
except curses.error:
|
||||
pass
|
||||
|
||||
stdscr.refresh()
|
||||
key = stdscr.getch()
|
||||
|
||||
if key in (curses.KEY_UP, ord("k")):
|
||||
cursor = (cursor - 1) % len(all_items)
|
||||
elif key in (curses.KEY_DOWN, ord("j")):
|
||||
cursor = (cursor + 1) % len(all_items)
|
||||
elif key in (curses.KEY_ENTER, 10, 13):
|
||||
result_holder[0] = cursor
|
||||
return
|
||||
elif key in (27, ord("q")):
|
||||
result_holder[0] = None
|
||||
return
|
||||
|
||||
curses.wrapper(_draw)
|
||||
flush_stdin()
|
||||
if result_holder[0] is not None and result_holder[0] >= cancel_idx:
|
||||
return None
|
||||
return result_holder[0]
|
||||
|
||||
except Exception:
|
||||
all_items = list(items) + [cancel_label]
|
||||
cancel_idx = len(items)
|
||||
return _numbered_single_fallback(title, all_items, cancel_idx)
|
||||
|
||||
|
||||
def _numbered_single_fallback(
|
||||
title: str,
|
||||
items: List[str],
|
||||
cancel_idx: int,
|
||||
) -> int | None:
|
||||
"""Text-based numbered fallback for single-select."""
|
||||
print(f"\n {title}\n")
|
||||
for i, label in enumerate(items, 1):
|
||||
print(f" {i}. {label}")
|
||||
print()
|
||||
try:
|
||||
val = input(f" Choice [1-{len(items)}]: ").strip()
|
||||
if not val:
|
||||
return None
|
||||
idx = int(val) - 1
|
||||
if 0 <= idx < len(items) and idx < cancel_idx:
|
||||
return idx
|
||||
if idx == cancel_idx:
|
||||
return None
|
||||
except (ValueError, KeyboardInterrupt, EOFError):
|
||||
pass
|
||||
return None
|
||||
|
||||
|
||||
def _numbered_fallback(
|
||||
title: str,
|
||||
items: List[str],
|
||||
|
|
|
|||
|
|
@ -119,6 +119,7 @@ def _configured_platforms() -> list[str]:
|
|||
"dingtalk": "DINGTALK_CLIENT_ID",
|
||||
"feishu": "FEISHU_APP_ID",
|
||||
"wecom": "WECOM_BOT_ID",
|
||||
"wecom_callback": "WECOM_CALLBACK_CORP_ID",
|
||||
"weixin": "WEIXIN_ACCOUNT_ID",
|
||||
}
|
||||
return [name for name, env in checks.items() if os.getenv(env)]
|
||||
|
|
|
|||
|
|
@ -157,30 +157,54 @@ def _request_gateway_self_restart(pid: int) -> bool:
|
|||
return True
|
||||
|
||||
|
||||
def find_gateway_pids(exclude_pids: set | None = None) -> list:
|
||||
def find_gateway_pids(exclude_pids: set | None = None, all_profiles: bool = False) -> list:
|
||||
"""Find PIDs of running gateway processes.
|
||||
|
||||
Args:
|
||||
exclude_pids: PIDs to exclude from the result (e.g. service-managed
|
||||
PIDs that should not be killed during a stale-process sweep).
|
||||
all_profiles: When ``True``, return gateway PIDs across **all**
|
||||
profiles (the pre-7923 global behaviour). ``hermes update``
|
||||
needs this because a code update affects every profile.
|
||||
When ``False`` (default), only PIDs belonging to the current
|
||||
Hermes profile are returned.
|
||||
"""
|
||||
pids = []
|
||||
_exclude = exclude_pids or set()
|
||||
pids = [pid for pid in _get_service_pids() if pid not in _exclude]
|
||||
patterns = [
|
||||
"hermes_cli.main gateway",
|
||||
"hermes_cli.main --profile",
|
||||
"hermes_cli.main -p",
|
||||
"hermes_cli/main.py gateway",
|
||||
"hermes_cli/main.py --profile",
|
||||
"hermes_cli/main.py -p",
|
||||
"hermes gateway",
|
||||
"gateway/run.py",
|
||||
]
|
||||
current_home = str(get_hermes_home().resolve())
|
||||
current_profile_arg = _profile_arg(current_home)
|
||||
current_profile_name = current_profile_arg.split()[-1] if current_profile_arg else ""
|
||||
|
||||
def _matches_current_profile(command: str) -> bool:
|
||||
if current_profile_name:
|
||||
return (
|
||||
f"--profile {current_profile_name}" in command
|
||||
or f"-p {current_profile_name}" in command
|
||||
or f"HERMES_HOME={current_home}" in command
|
||||
)
|
||||
|
||||
if "--profile " in command or " -p " in command:
|
||||
return False
|
||||
if "HERMES_HOME=" in command and f"HERMES_HOME={current_home}" not in command:
|
||||
return False
|
||||
return True
|
||||
|
||||
try:
|
||||
if is_windows():
|
||||
# Windows: use wmic to search command lines
|
||||
result = subprocess.run(
|
||||
["wmic", "process", "get", "ProcessId,CommandLine", "/FORMAT:LIST"],
|
||||
capture_output=True, text=True, timeout=10
|
||||
)
|
||||
# Parse WMIC LIST output: blocks of "CommandLine=...\nProcessId=...\n"
|
||||
current_cmd = ""
|
||||
for line in result.stdout.split('\n'):
|
||||
line = line.strip()
|
||||
|
|
@ -188,7 +212,7 @@ def find_gateway_pids(exclude_pids: set | None = None) -> list:
|
|||
current_cmd = line[len("CommandLine="):]
|
||||
elif line.startswith("ProcessId="):
|
||||
pid_str = line[len("ProcessId="):]
|
||||
if any(p in current_cmd for p in patterns):
|
||||
if any(p in current_cmd for p in patterns) and (all_profiles or _matches_current_profile(current_cmd)):
|
||||
try:
|
||||
pid = int(pid_str)
|
||||
if pid != os.getpid() and pid not in pids and pid not in _exclude:
|
||||
|
|
@ -198,41 +222,57 @@ def find_gateway_pids(exclude_pids: set | None = None) -> list:
|
|||
current_cmd = ""
|
||||
else:
|
||||
result = subprocess.run(
|
||||
["ps", "aux"],
|
||||
["ps", "eww", "-ax", "-o", "pid=,command="],
|
||||
capture_output=True,
|
||||
text=True,
|
||||
timeout=10,
|
||||
)
|
||||
for line in result.stdout.split('\n'):
|
||||
# Skip grep and current process
|
||||
if 'grep' in line or str(os.getpid()) in line:
|
||||
stripped = line.strip()
|
||||
if not stripped or 'grep' in stripped:
|
||||
continue
|
||||
for pattern in patterns:
|
||||
if pattern in line:
|
||||
parts = line.split()
|
||||
if len(parts) > 1:
|
||||
try:
|
||||
pid = int(parts[1])
|
||||
if pid not in pids and pid not in _exclude:
|
||||
pids.append(pid)
|
||||
except ValueError:
|
||||
continue
|
||||
break
|
||||
except Exception:
|
||||
|
||||
pid = None
|
||||
command = ""
|
||||
|
||||
parts = stripped.split(None, 1)
|
||||
if len(parts) == 2:
|
||||
try:
|
||||
pid = int(parts[0])
|
||||
command = parts[1]
|
||||
except ValueError:
|
||||
pid = None
|
||||
|
||||
if pid is None:
|
||||
aux_parts = stripped.split()
|
||||
if len(aux_parts) > 10 and aux_parts[1].isdigit():
|
||||
pid = int(aux_parts[1])
|
||||
command = " ".join(aux_parts[10:])
|
||||
|
||||
if pid is None:
|
||||
continue
|
||||
if pid == os.getpid() or pid in pids or pid in _exclude:
|
||||
continue
|
||||
if any(pattern in command for pattern in patterns) and (all_profiles or _matches_current_profile(command)):
|
||||
pids.append(pid)
|
||||
except (OSError, subprocess.TimeoutExpired):
|
||||
pass
|
||||
|
||||
return pids
|
||||
|
||||
|
||||
def kill_gateway_processes(force: bool = False, exclude_pids: set | None = None) -> int:
|
||||
def kill_gateway_processes(force: bool = False, exclude_pids: set | None = None,
|
||||
all_profiles: bool = False) -> int:
|
||||
"""Kill any running gateway processes. Returns count killed.
|
||||
|
||||
Args:
|
||||
force: Use the platform's force-kill mechanism instead of graceful terminate.
|
||||
exclude_pids: PIDs to skip (e.g. service-managed PIDs that were just
|
||||
restarted and should not be killed).
|
||||
all_profiles: When ``True``, kill across all profiles. Passed
|
||||
through to :func:`find_gateway_pids`.
|
||||
"""
|
||||
pids = find_gateway_pids(exclude_pids=exclude_pids)
|
||||
pids = find_gateway_pids(exclude_pids=exclude_pids, all_profiles=all_profiles)
|
||||
killed = 0
|
||||
|
||||
for pid in pids:
|
||||
|
|
@ -633,6 +673,17 @@ def print_systemd_linger_guidance() -> None:
|
|||
print(" If you want the gateway user service to survive logout, run:")
|
||||
print(" sudo loginctl enable-linger $USER")
|
||||
|
||||
def _launchd_user_home() -> Path:
|
||||
"""Return the real macOS user home for launchd artifacts.
|
||||
|
||||
Profile-mode Hermes often sets ``HOME`` to a profile-scoped directory, but
|
||||
launchd user agents still live under the actual account home.
|
||||
"""
|
||||
import pwd
|
||||
|
||||
return Path(pwd.getpwuid(os.getuid()).pw_dir)
|
||||
|
||||
|
||||
def get_launchd_plist_path() -> Path:
|
||||
"""Return the launchd plist path, scoped per profile.
|
||||
|
||||
|
|
@ -641,7 +692,7 @@ def get_launchd_plist_path() -> Path:
|
|||
"""
|
||||
suffix = _profile_suffix()
|
||||
name = f"ai.hermes.gateway-{suffix}" if suffix else "ai.hermes.gateway"
|
||||
return Path.home() / "Library" / "LaunchAgents" / f"{name}.plist"
|
||||
return _launchd_user_home() / "Library" / "LaunchAgents" / f"{name}.plist"
|
||||
|
||||
def _detect_venv_dir() -> Path | None:
|
||||
"""Detect the active virtualenv directory.
|
||||
|
|
@ -839,6 +890,25 @@ def _normalize_service_definition(text: str) -> str:
|
|||
return "\n".join(line.rstrip() for line in text.strip().splitlines())
|
||||
|
||||
|
||||
def _normalize_launchd_plist_for_comparison(text: str) -> str:
|
||||
"""Normalize launchd plist text for staleness checks.
|
||||
|
||||
The generated plist intentionally captures a broad PATH assembled from the
|
||||
invoking shell so user-installed tools remain reachable under launchd.
|
||||
That makes raw text comparison unstable across shells, so ignore the PATH
|
||||
payload when deciding whether the installed plist is stale.
|
||||
"""
|
||||
import re
|
||||
|
||||
normalized = _normalize_service_definition(text)
|
||||
return re.sub(
|
||||
r'(<key>PATH</key>\s*<string>)(.*?)(</string>)',
|
||||
r'\1__HERMES_PATH__\3',
|
||||
normalized,
|
||||
flags=re.S,
|
||||
)
|
||||
|
||||
|
||||
def systemd_unit_is_current(system: bool = False) -> bool:
|
||||
unit_path = get_systemd_unit_path(system=system)
|
||||
if not unit_path.exists():
|
||||
|
|
@ -1220,7 +1290,7 @@ def launchd_plist_is_current() -> bool:
|
|||
|
||||
installed = plist_path.read_text(encoding="utf-8")
|
||||
expected = generate_launchd_plist()
|
||||
return _normalize_service_definition(installed) == _normalize_service_definition(expected)
|
||||
return _normalize_launchd_plist_for_comparison(installed) == _normalize_launchd_plist_for_comparison(expected)
|
||||
|
||||
|
||||
def refresh_launchd_plist_if_needed() -> bool:
|
||||
|
|
@ -1751,6 +1821,37 @@ _PLATFORMS = [
|
|||
"help": "Chat ID for scheduled results and notifications."},
|
||||
],
|
||||
},
|
||||
{
|
||||
"key": "wecom_callback",
|
||||
"label": "WeCom Callback (Self-Built App)",
|
||||
"emoji": "💬",
|
||||
"token_var": "WECOM_CALLBACK_CORP_ID",
|
||||
"setup_instructions": [
|
||||
"1. Go to WeCom Admin Console → Applications → Create Self-Built App",
|
||||
"2. Note the Corp ID (top of admin console) and create a Corp Secret",
|
||||
"3. Under Receive Messages, configure the callback URL to point to your server",
|
||||
"4. Copy the Token and EncodingAESKey from the callback configuration",
|
||||
"5. The adapter runs an HTTP server — ensure the port is reachable from WeCom",
|
||||
"6. Restrict access with WECOM_CALLBACK_ALLOWED_USERS for production use",
|
||||
],
|
||||
"vars": [
|
||||
{"name": "WECOM_CALLBACK_CORP_ID", "prompt": "Corp ID", "password": False,
|
||||
"help": "Your WeCom enterprise Corp ID."},
|
||||
{"name": "WECOM_CALLBACK_CORP_SECRET", "prompt": "Corp Secret", "password": True,
|
||||
"help": "The secret for your self-built application."},
|
||||
{"name": "WECOM_CALLBACK_AGENT_ID", "prompt": "Agent ID", "password": False,
|
||||
"help": "The Agent ID of your self-built application."},
|
||||
{"name": "WECOM_CALLBACK_TOKEN", "prompt": "Callback Token", "password": True,
|
||||
"help": "The Token from your WeCom callback configuration."},
|
||||
{"name": "WECOM_CALLBACK_ENCODING_AES_KEY", "prompt": "Encoding AES Key", "password": True,
|
||||
"help": "The EncodingAESKey from your WeCom callback configuration."},
|
||||
{"name": "WECOM_CALLBACK_PORT", "prompt": "Callback server port (default: 8645)", "password": False,
|
||||
"help": "Port for the HTTP callback server."},
|
||||
{"name": "WECOM_CALLBACK_ALLOWED_USERS", "prompt": "Allowed user IDs (comma-separated, or empty)", "password": False,
|
||||
"is_allowlist": True,
|
||||
"help": "Restrict which WeCom users can interact with the app."},
|
||||
],
|
||||
},
|
||||
{
|
||||
"key": "weixin",
|
||||
"label": "Weixin / WeChat",
|
||||
|
|
@ -1981,6 +2082,36 @@ def _setup_whatsapp():
|
|||
cmd_whatsapp(argparse.Namespace())
|
||||
|
||||
|
||||
def _setup_email():
|
||||
"""Configure Email via the standard platform setup."""
|
||||
email_platform = next(p for p in _PLATFORMS if p["key"] == "email")
|
||||
_setup_standard_platform(email_platform)
|
||||
|
||||
|
||||
def _setup_sms():
|
||||
"""Configure SMS (Twilio) via the standard platform setup."""
|
||||
sms_platform = next(p for p in _PLATFORMS if p["key"] == "sms")
|
||||
_setup_standard_platform(sms_platform)
|
||||
|
||||
|
||||
def _setup_dingtalk():
|
||||
"""Configure DingTalk via the standard platform setup."""
|
||||
dingtalk_platform = next(p for p in _PLATFORMS if p["key"] == "dingtalk")
|
||||
_setup_standard_platform(dingtalk_platform)
|
||||
|
||||
|
||||
def _setup_feishu():
|
||||
"""Configure Feishu / Lark via the standard platform setup."""
|
||||
feishu_platform = next(p for p in _PLATFORMS if p["key"] == "feishu")
|
||||
_setup_standard_platform(feishu_platform)
|
||||
|
||||
|
||||
def _setup_wecom():
|
||||
"""Configure WeCom (Enterprise WeChat) via the standard platform setup."""
|
||||
wecom_platform = next(p for p in _PLATFORMS if p["key"] == "wecom")
|
||||
_setup_standard_platform(wecom_platform)
|
||||
|
||||
|
||||
def _is_service_installed() -> bool:
|
||||
"""Check if the gateway is installed as a system service."""
|
||||
if supports_systemd_services():
|
||||
|
|
@ -2566,7 +2697,7 @@ def gateway_command(args):
|
|||
service_available = True
|
||||
except subprocess.CalledProcessError:
|
||||
pass
|
||||
killed = kill_gateway_processes()
|
||||
killed = kill_gateway_processes(all_profiles=True)
|
||||
total = killed + (1 if service_available else 0)
|
||||
if total:
|
||||
print(f"✓ Stopped {total} gateway process(es) across all profiles")
|
||||
|
|
|
|||
|
|
@ -1,16 +1,18 @@
|
|||
"""``hermes logs`` — view and filter Hermes log files.
|
||||
|
||||
Supports tailing, following, session filtering, level filtering, and
|
||||
relative time ranges. All log files live under ``~/.hermes/logs/``.
|
||||
Supports tailing, following, session filtering, level filtering,
|
||||
component filtering, and relative time ranges. All log files live
|
||||
under ``~/.hermes/logs/``.
|
||||
|
||||
Usage examples::
|
||||
|
||||
hermes logs # last 50 lines of agent.log
|
||||
hermes logs -f # follow agent.log in real time
|
||||
hermes logs errors # last 50 lines of errors.log
|
||||
hermes logs gateway -n 100 # last 100 lines of gateway.log
|
||||
hermes logs gateway -n 100 # last 100 lines of gateway.log
|
||||
hermes logs --level WARNING # only WARNING+ lines
|
||||
hermes logs --session abc123 # filter by session ID substring
|
||||
hermes logs --component tools # only tool-related lines
|
||||
hermes logs --since 1h # lines from the last hour
|
||||
hermes logs --since 30m -f # follow, starting 30 min ago
|
||||
"""
|
||||
|
|
@ -20,7 +22,7 @@ import sys
|
|||
import time
|
||||
from datetime import datetime, timedelta
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
from typing import Optional, Sequence
|
||||
|
||||
from hermes_constants import get_hermes_home, display_hermes_home
|
||||
|
||||
|
|
@ -38,6 +40,15 @@ _TS_RE = re.compile(r"^(\d{4}-\d{2}-\d{2}\s+\d{2}:\d{2}:\d{2})")
|
|||
# Level extraction — matches " INFO ", " WARNING ", " ERROR ", " DEBUG ", " CRITICAL "
|
||||
_LEVEL_RE = re.compile(r"\s(DEBUG|INFO|WARNING|ERROR|CRITICAL)\s")
|
||||
|
||||
# Logger name extraction — after level and optional session tag, the next
|
||||
# non-space token before ":" is the logger name.
|
||||
# Matches: "INFO gateway.run:" or "INFO [sess_abc] tools.terminal_tool:"
|
||||
_LOGGER_NAME_RE = re.compile(
|
||||
r"\s(?:DEBUG|INFO|WARNING|ERROR|CRITICAL)" # level
|
||||
r"(?:\s+\[.*?\])?" # optional session tag
|
||||
r"\s+(\S+):" # logger name
|
||||
)
|
||||
|
||||
# Level ordering for >= filtering
|
||||
_LEVEL_ORDER = {"DEBUG": 0, "INFO": 1, "WARNING": 2, "ERROR": 3, "CRITICAL": 4}
|
||||
|
||||
|
|
@ -79,12 +90,27 @@ def _extract_level(line: str) -> Optional[str]:
|
|||
return m.group(1) if m else None
|
||||
|
||||
|
||||
def _extract_logger_name(line: str) -> Optional[str]:
|
||||
"""Extract the logger name from a log line."""
|
||||
m = _LOGGER_NAME_RE.search(line)
|
||||
return m.group(1) if m else None
|
||||
|
||||
|
||||
def _line_matches_component(line: str, prefixes: Sequence[str]) -> bool:
|
||||
"""Check if a log line's logger name starts with any of *prefixes*."""
|
||||
name = _extract_logger_name(line)
|
||||
if name is None:
|
||||
return False
|
||||
return name.startswith(tuple(prefixes))
|
||||
|
||||
|
||||
def _matches_filters(
|
||||
line: str,
|
||||
*,
|
||||
min_level: Optional[str] = None,
|
||||
session_filter: Optional[str] = None,
|
||||
since: Optional[datetime] = None,
|
||||
component_prefixes: Optional[Sequence[str]] = None,
|
||||
) -> bool:
|
||||
"""Check if a log line passes all active filters."""
|
||||
if since is not None:
|
||||
|
|
@ -102,6 +128,10 @@ def _matches_filters(
|
|||
if session_filter not in line:
|
||||
return False
|
||||
|
||||
if component_prefixes is not None:
|
||||
if not _line_matches_component(line, component_prefixes):
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
|
|
@ -113,6 +143,7 @@ def tail_log(
|
|||
level: Optional[str] = None,
|
||||
session: Optional[str] = None,
|
||||
since: Optional[str] = None,
|
||||
component: Optional[str] = None,
|
||||
) -> None:
|
||||
"""Read and display log lines, optionally following in real time.
|
||||
|
||||
|
|
@ -130,6 +161,8 @@ def tail_log(
|
|||
Session ID substring to filter on.
|
||||
since
|
||||
Relative time string (e.g. ``"1h"``, ``"30m"``).
|
||||
component
|
||||
Component name to filter by (e.g. ``"gateway"``, ``"tools"``).
|
||||
"""
|
||||
filename = LOG_FILES.get(log_name)
|
||||
if filename is None:
|
||||
|
|
@ -155,13 +188,29 @@ def tail_log(
|
|||
print(f"Invalid --level: {level!r}. Use DEBUG, INFO, WARNING, ERROR, or CRITICAL.")
|
||||
sys.exit(1)
|
||||
|
||||
has_filters = min_level is not None or session is not None or since_dt is not None
|
||||
# Resolve component to logger name prefixes
|
||||
component_prefixes = None
|
||||
if component:
|
||||
from hermes_logging import COMPONENT_PREFIXES
|
||||
component_lower = component.lower()
|
||||
if component_lower not in COMPONENT_PREFIXES:
|
||||
available = ", ".join(sorted(COMPONENT_PREFIXES))
|
||||
print(f"Unknown component: {component!r}. Available: {available}")
|
||||
sys.exit(1)
|
||||
component_prefixes = COMPONENT_PREFIXES[component_lower]
|
||||
|
||||
has_filters = (
|
||||
min_level is not None
|
||||
or session is not None
|
||||
or since_dt is not None
|
||||
or component_prefixes is not None
|
||||
)
|
||||
|
||||
# Read and display the tail
|
||||
try:
|
||||
lines = _read_tail(log_path, num_lines, has_filters=has_filters,
|
||||
min_level=min_level, session_filter=session,
|
||||
since=since_dt)
|
||||
since=since_dt, component_prefixes=component_prefixes)
|
||||
except PermissionError:
|
||||
print(f"Permission denied: {log_path}")
|
||||
sys.exit(1)
|
||||
|
|
@ -172,6 +221,8 @@ def tail_log(
|
|||
filter_parts.append(f"level>={min_level}")
|
||||
if session:
|
||||
filter_parts.append(f"session={session}")
|
||||
if component:
|
||||
filter_parts.append(f"component={component}")
|
||||
if since:
|
||||
filter_parts.append(f"since={since}")
|
||||
filter_desc = f" [{', '.join(filter_parts)}]" if filter_parts else ""
|
||||
|
|
@ -190,7 +241,7 @@ def tail_log(
|
|||
# Follow mode — poll for new content
|
||||
try:
|
||||
_follow_log(log_path, min_level=min_level, session_filter=session,
|
||||
since=since_dt)
|
||||
since=since_dt, component_prefixes=component_prefixes)
|
||||
except KeyboardInterrupt:
|
||||
print("\n--- stopped ---")
|
||||
|
||||
|
|
@ -203,6 +254,7 @@ def _read_tail(
|
|||
min_level: Optional[str] = None,
|
||||
session_filter: Optional[str] = None,
|
||||
since: Optional[datetime] = None,
|
||||
component_prefixes: Optional[Sequence[str]] = None,
|
||||
) -> list:
|
||||
"""Read the last *num_lines* matching lines from a log file.
|
||||
|
||||
|
|
@ -215,7 +267,8 @@ def _read_tail(
|
|||
filtered = [
|
||||
l for l in raw_lines
|
||||
if _matches_filters(l, min_level=min_level,
|
||||
session_filter=session_filter, since=since)
|
||||
session_filter=session_filter, since=since,
|
||||
component_prefixes=component_prefixes)
|
||||
]
|
||||
return filtered[-num_lines:]
|
||||
else:
|
||||
|
|
@ -284,6 +337,7 @@ def _follow_log(
|
|||
min_level: Optional[str] = None,
|
||||
session_filter: Optional[str] = None,
|
||||
since: Optional[datetime] = None,
|
||||
component_prefixes: Optional[Sequence[str]] = None,
|
||||
) -> None:
|
||||
"""Poll a log file for new content and print matching lines."""
|
||||
with open(path, "r", encoding="utf-8", errors="replace") as f:
|
||||
|
|
@ -293,7 +347,8 @@ def _follow_log(
|
|||
line = f.readline()
|
||||
if line:
|
||||
if _matches_filters(line, min_level=min_level,
|
||||
session_filter=session_filter, since=since):
|
||||
session_filter=session_filter, since=since,
|
||||
component_prefixes=component_prefixes):
|
||||
print(line, end="")
|
||||
sys.stdout.flush()
|
||||
else:
|
||||
|
|
|
|||
|
|
@ -528,6 +528,113 @@ def _resolve_last_cli_session() -> Optional[str]:
|
|||
return None
|
||||
|
||||
|
||||
def _probe_container(cmd: list, backend: str, via_sudo: bool = False):
|
||||
"""Run a container inspect probe, returning the CompletedProcess.
|
||||
|
||||
Catches TimeoutExpired specifically for a human-readable message;
|
||||
all other exceptions propagate naturally.
|
||||
"""
|
||||
try:
|
||||
return subprocess.run(cmd, capture_output=True, text=True, timeout=15)
|
||||
except subprocess.TimeoutExpired:
|
||||
label = f"sudo {backend}" if via_sudo else backend
|
||||
print(
|
||||
f"Error: timed out waiting for {label} to respond.\n"
|
||||
f"The {backend} daemon may be unresponsive or starting up.",
|
||||
file=sys.stderr,
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def _exec_in_container(container_info: dict, cli_args: list):
|
||||
"""Replace the current process with a command inside the managed container.
|
||||
|
||||
Probes whether sudo is needed (rootful containers), then os.execvp
|
||||
into the container. On success the Python process is replaced entirely
|
||||
and the container's exit code becomes the process exit code (OS semantics).
|
||||
On failure, OSError propagates naturally.
|
||||
|
||||
Args:
|
||||
container_info: dict with backend, container_name, exec_user, hermes_bin
|
||||
cli_args: the original CLI arguments (everything after 'hermes')
|
||||
"""
|
||||
import shutil
|
||||
|
||||
backend = container_info["backend"]
|
||||
container_name = container_info["container_name"]
|
||||
exec_user = container_info["exec_user"]
|
||||
hermes_bin = container_info["hermes_bin"]
|
||||
|
||||
runtime = shutil.which(backend)
|
||||
if not runtime:
|
||||
print(f"Error: {backend} not found on PATH. Cannot route to container.",
|
||||
file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
# Rootful containers (NixOS systemd service) are invisible to unprivileged
|
||||
# users — Podman uses per-user namespaces, Docker needs group access.
|
||||
# Probe whether the runtime can see the container; if not, try via sudo.
|
||||
sudo_path = None
|
||||
probe = _probe_container(
|
||||
[runtime, "inspect", "--format", "ok", container_name], backend,
|
||||
)
|
||||
if probe.returncode != 0:
|
||||
sudo_path = shutil.which("sudo")
|
||||
if sudo_path:
|
||||
probe2 = _probe_container(
|
||||
[sudo_path, "-n", runtime, "inspect", "--format", "ok", container_name],
|
||||
backend, via_sudo=True,
|
||||
)
|
||||
if probe2.returncode != 0:
|
||||
print(
|
||||
f"Error: container '{container_name}' not found via {backend}.\n"
|
||||
f"\n"
|
||||
f"The container is likely running as root. Your user cannot see it\n"
|
||||
f"because {backend} uses per-user namespaces. Grant passwordless\n"
|
||||
f"sudo for {backend} — the -n (non-interactive) flag is required\n"
|
||||
f"because a password prompt would hang or break piped commands.\n"
|
||||
f"\n"
|
||||
f"On NixOS:\n"
|
||||
f"\n"
|
||||
f' security.sudo.extraRules = [{{\n'
|
||||
f' users = [ "{os.getenv("USER", "your-user")}" ];\n'
|
||||
f' commands = [{{ command = "{runtime}"; options = [ "NOPASSWD" ]; }}];\n'
|
||||
f' }}];\n'
|
||||
f"\n"
|
||||
f"Or run: sudo hermes {' '.join(cli_args)}",
|
||||
file=sys.stderr,
|
||||
)
|
||||
sys.exit(1)
|
||||
else:
|
||||
print(
|
||||
f"Error: container '{container_name}' not found via {backend}.\n"
|
||||
f"The container may be running under root. Try: sudo hermes {' '.join(cli_args)}",
|
||||
file=sys.stderr,
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
is_tty = sys.stdin.isatty()
|
||||
tty_flags = ["-it"] if is_tty else ["-i"]
|
||||
|
||||
env_flags = []
|
||||
for var in ("TERM", "COLORTERM", "LANG", "LC_ALL"):
|
||||
val = os.environ.get(var)
|
||||
if val:
|
||||
env_flags.extend(["-e", f"{var}={val}"])
|
||||
|
||||
cmd_prefix = [sudo_path, "-n", runtime] if sudo_path else [runtime]
|
||||
exec_cmd = (
|
||||
cmd_prefix + ["exec"]
|
||||
+ tty_flags
|
||||
+ ["-u", exec_user]
|
||||
+ env_flags
|
||||
+ [container_name, hermes_bin]
|
||||
+ cli_args
|
||||
)
|
||||
|
||||
os.execvp(exec_cmd[0], exec_cmd)
|
||||
|
||||
|
||||
def _resolve_session_by_name_or_id(name_or_id: str) -> Optional[str]:
|
||||
"""Resolve a session name (title) or ID to a session ID.
|
||||
|
||||
|
|
@ -2711,6 +2818,18 @@ def cmd_config(args):
|
|||
config_command(args)
|
||||
|
||||
|
||||
def cmd_backup(args):
|
||||
"""Back up Hermes home directory to a zip file."""
|
||||
from hermes_cli.backup import run_backup
|
||||
run_backup(args)
|
||||
|
||||
|
||||
def cmd_import(args):
|
||||
"""Restore a Hermes backup from a zip file."""
|
||||
from hermes_cli.backup import run_import
|
||||
run_import(args)
|
||||
|
||||
|
||||
def cmd_version(args):
|
||||
"""Show version."""
|
||||
print(f"Hermes Agent v{__version__} ({__release_date__})")
|
||||
|
|
@ -3876,7 +3995,7 @@ def cmd_update(args):
|
|||
# Exclude PIDs that belong to just-restarted services so we don't
|
||||
# immediately kill the process that systemd/launchd just spawned.
|
||||
service_pids = _get_service_pids()
|
||||
manual_pids = find_gateway_pids(exclude_pids=service_pids)
|
||||
manual_pids = find_gateway_pids(exclude_pids=service_pids, all_profiles=True)
|
||||
for pid in manual_pids:
|
||||
try:
|
||||
os.kill(pid, _signal.SIGTERM)
|
||||
|
|
@ -4231,6 +4350,7 @@ def cmd_logs(args):
|
|||
level=getattr(args, "level", None),
|
||||
session=getattr(args, "session", None),
|
||||
since=getattr(args, "since", None),
|
||||
component=getattr(args, "component", None),
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -4796,7 +4916,43 @@ For more help on a command:
|
|||
help="Show redacted API key prefixes (first/last 4 chars) instead of just set/not set"
|
||||
)
|
||||
dump_parser.set_defaults(func=cmd_dump)
|
||||
|
||||
|
||||
# =========================================================================
|
||||
# backup command
|
||||
# =========================================================================
|
||||
backup_parser = subparsers.add_parser(
|
||||
"backup",
|
||||
help="Back up Hermes home directory to a zip file",
|
||||
description="Create a zip archive of your entire Hermes configuration, "
|
||||
"skills, sessions, and data (excludes the hermes-agent codebase)"
|
||||
)
|
||||
backup_parser.add_argument(
|
||||
"-o", "--output",
|
||||
help="Output path for the zip file (default: ~/hermes-backup-<timestamp>.zip)"
|
||||
)
|
||||
backup_parser.set_defaults(func=cmd_backup)
|
||||
|
||||
# =========================================================================
|
||||
# import command
|
||||
# =========================================================================
|
||||
import_parser = subparsers.add_parser(
|
||||
"import",
|
||||
help="Restore a Hermes backup from a zip file",
|
||||
description="Extract a previously created Hermes backup into your "
|
||||
"Hermes home directory, restoring configuration, skills, "
|
||||
"sessions, and data"
|
||||
)
|
||||
import_parser.add_argument(
|
||||
"zipfile",
|
||||
help="Path to the backup zip file"
|
||||
)
|
||||
import_parser.add_argument(
|
||||
"--force", "-f",
|
||||
action="store_true",
|
||||
help="Overwrite existing files without confirmation"
|
||||
)
|
||||
import_parser.set_defaults(func=cmd_import)
|
||||
|
||||
# =========================================================================
|
||||
# config command
|
||||
# =========================================================================
|
||||
|
|
@ -5146,6 +5302,8 @@ For more help on a command:
|
|||
mcp_add_p.add_argument("--command", help="Stdio command (e.g. npx)")
|
||||
mcp_add_p.add_argument("--args", nargs="*", default=[], help="Arguments for stdio command")
|
||||
mcp_add_p.add_argument("--auth", choices=["oauth", "header"], help="Auth method")
|
||||
mcp_add_p.add_argument("--preset", help="Known MCP preset name")
|
||||
mcp_add_p.add_argument("--env", nargs="*", default=[], help="Environment variables for stdio servers (KEY=VALUE)")
|
||||
|
||||
mcp_rm_p = mcp_sub.add_parser("remove", aliases=["rm"], help="Remove an MCP server")
|
||||
mcp_rm_p.add_argument("name", help="Server name to remove")
|
||||
|
|
@ -5628,6 +5786,7 @@ Examples:
|
|||
hermes logs gateway -n 100 Show last 100 lines of gateway.log
|
||||
hermes logs --level WARNING Only show WARNING and above
|
||||
hermes logs --session abc123 Filter by session ID
|
||||
hermes logs --component tools Only show tool-related lines
|
||||
hermes logs --since 1h Lines from the last hour
|
||||
hermes logs --since 30m -f Follow, starting from 30 min ago
|
||||
hermes logs list List available log files with sizes
|
||||
|
|
@ -5657,6 +5816,10 @@ Examples:
|
|||
"--since", metavar="TIME",
|
||||
help="Show lines since TIME ago (e.g. 1h, 30m, 2d)",
|
||||
)
|
||||
logs_parser.add_argument(
|
||||
"--component", metavar="NAME",
|
||||
help="Filter by component: gateway, agent, tools, cli, cron",
|
||||
)
|
||||
logs_parser.set_defaults(func=cmd_logs)
|
||||
|
||||
# =========================================================================
|
||||
|
|
@ -5665,9 +5828,22 @@ Examples:
|
|||
# Pre-process argv so unquoted multi-word session names after -c / -r
|
||||
# are merged into a single token before argparse sees them.
|
||||
# e.g. ``hermes -c Pokemon Agent Dev`` → ``hermes -c 'Pokemon Agent Dev'``
|
||||
# ── Container-aware routing ────────────────────────────────────────
|
||||
# When NixOS container mode is active, route ALL subcommands into
|
||||
# the managed container. This MUST run before parse_args() so that
|
||||
# --help, unrecognised flags, and every subcommand are forwarded
|
||||
# transparently instead of being intercepted by argparse on the host.
|
||||
from hermes_cli.config import get_container_exec_info
|
||||
container_info = get_container_exec_info()
|
||||
if container_info:
|
||||
_exec_in_container(container_info, sys.argv[1:])
|
||||
# Unreachable: os.execvp never returns on success (process is replaced)
|
||||
# and raises OSError on failure (which propagates as a traceback).
|
||||
sys.exit(1)
|
||||
|
||||
_processed_argv = _coalesce_session_name_args(sys.argv[1:])
|
||||
args = parser.parse_args(_processed_argv)
|
||||
|
||||
|
||||
# Handle --version flag
|
||||
if args.version:
|
||||
cmd_version(args)
|
||||
|
|
|
|||
|
|
@ -9,7 +9,6 @@ configuration in ~/.hermes/config.yaml under the ``mcp_servers`` key.
|
|||
"""
|
||||
|
||||
import asyncio
|
||||
import getpass
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
|
|
@ -28,6 +27,11 @@ from hermes_constants import display_hermes_home
|
|||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
_ENV_VAR_NAME_RE = re.compile(r"^[A-Za-z_][A-Za-z0-9_]*$")
|
||||
|
||||
|
||||
_MCP_PRESETS: Dict[str, Dict[str, Any]] = {}
|
||||
|
||||
|
||||
# ─── UI Helpers ───────────────────────────────────────────────────────────────
|
||||
|
||||
|
|
@ -98,6 +102,59 @@ def _env_key_for_server(name: str) -> str:
|
|||
return f"MCP_{name.upper().replace('-', '_')}_API_KEY"
|
||||
|
||||
|
||||
def _parse_env_assignments(raw_env: Optional[List[str]]) -> Dict[str, str]:
|
||||
"""Parse ``KEY=VALUE`` strings from CLI args into an env dict."""
|
||||
parsed: Dict[str, str] = {}
|
||||
for item in raw_env or []:
|
||||
text = str(item or "").strip()
|
||||
if not text:
|
||||
continue
|
||||
if "=" not in text:
|
||||
raise ValueError(f"Invalid --env value '{text}' (expected KEY=VALUE)")
|
||||
key, value = text.split("=", 1)
|
||||
key = key.strip()
|
||||
if not key:
|
||||
raise ValueError(f"Invalid --env value '{text}' (missing variable name)")
|
||||
if not _ENV_VAR_NAME_RE.match(key):
|
||||
raise ValueError(f"Invalid --env variable name '{key}'")
|
||||
parsed[key] = value
|
||||
return parsed
|
||||
|
||||
|
||||
def _apply_mcp_preset(
|
||||
name: str,
|
||||
*,
|
||||
preset_name: Optional[str],
|
||||
url: Optional[str],
|
||||
command: Optional[str],
|
||||
cmd_args: List[str],
|
||||
server_config: Dict[str, Any],
|
||||
) -> tuple[Optional[str], Optional[str], List[str], bool]:
|
||||
"""Apply a known MCP preset when transport details were omitted."""
|
||||
if not preset_name:
|
||||
return url, command, cmd_args, False
|
||||
|
||||
preset = _MCP_PRESETS.get(preset_name)
|
||||
if not preset:
|
||||
raise ValueError(f"Unknown MCP preset: {preset_name}")
|
||||
|
||||
if url or command:
|
||||
return url, command, cmd_args, False
|
||||
|
||||
url = preset.get("url")
|
||||
command = preset.get("command")
|
||||
cmd_args = list(preset.get("args") or [])
|
||||
|
||||
if url:
|
||||
server_config["url"] = url
|
||||
if command:
|
||||
server_config["command"] = command
|
||||
if cmd_args:
|
||||
server_config["args"] = cmd_args
|
||||
|
||||
return url, command, cmd_args, True
|
||||
|
||||
|
||||
# ─── Discovery (temporary connect) ───────────────────────────────────────────
|
||||
|
||||
def _probe_single_server(
|
||||
|
|
@ -166,13 +223,35 @@ def cmd_mcp_add(args):
|
|||
command = getattr(args, "command", None)
|
||||
cmd_args = getattr(args, "args", None) or []
|
||||
auth_type = getattr(args, "auth", None)
|
||||
preset_name = getattr(args, "preset", None)
|
||||
raw_env = getattr(args, "env", None)
|
||||
|
||||
server_config: Dict[str, Any] = {}
|
||||
try:
|
||||
explicit_env = _parse_env_assignments(raw_env)
|
||||
url, command, cmd_args, _preset_applied = _apply_mcp_preset(
|
||||
name,
|
||||
preset_name=preset_name,
|
||||
url=url,
|
||||
command=command,
|
||||
cmd_args=list(cmd_args),
|
||||
server_config=server_config,
|
||||
)
|
||||
except ValueError as exc:
|
||||
_error(str(exc))
|
||||
return
|
||||
|
||||
if url and explicit_env:
|
||||
_error("--env is only supported for stdio MCP servers (--command or stdio presets)")
|
||||
return
|
||||
|
||||
# Validate transport
|
||||
if not url and not command:
|
||||
_error("Must specify --url <endpoint> or --command <cmd>")
|
||||
_error("Must specify --url <endpoint>, --command <cmd>, or --preset <name>")
|
||||
_info("Examples:")
|
||||
_info(' hermes mcp add ink --url "https://mcp.ml.ink/mcp"')
|
||||
_info(' hermes mcp add github --command npx --args @modelcontextprotocol/server-github')
|
||||
_info(' hermes mcp add myserver --preset mypreset')
|
||||
return
|
||||
|
||||
# Check if server already exists
|
||||
|
|
@ -183,13 +262,15 @@ def cmd_mcp_add(args):
|
|||
return
|
||||
|
||||
# Build initial config
|
||||
server_config: Dict[str, Any] = {}
|
||||
if url:
|
||||
server_config["url"] = url
|
||||
else:
|
||||
server_config["command"] = command
|
||||
if cmd_args:
|
||||
server_config["args"] = cmd_args
|
||||
if explicit_env:
|
||||
server_config["env"] = explicit_env
|
||||
|
||||
|
||||
# ── Authentication ────────────────────────────────────────────────
|
||||
|
||||
|
|
@ -627,6 +708,7 @@ def mcp_command(args):
|
|||
_info("hermes mcp serve Run as MCP server")
|
||||
_info("hermes mcp add <name> --url <endpoint> Add an MCP server")
|
||||
_info("hermes mcp add <name> --command <cmd> Add a stdio server")
|
||||
_info("hermes mcp add <name> --preset <preset> Add from a known preset")
|
||||
_info("hermes mcp remove <name> Remove a server")
|
||||
_info("hermes mcp list List servers")
|
||||
_info("hermes mcp test <name> Test connection")
|
||||
|
|
|
|||
|
|
@ -74,13 +74,13 @@ _DOT_TO_HYPHEN_PROVIDERS: frozenset[str] = frozenset({
|
|||
_STRIP_VENDOR_ONLY_PROVIDERS: frozenset[str] = frozenset({
|
||||
"copilot",
|
||||
"copilot-acp",
|
||||
"openai-codex",
|
||||
})
|
||||
|
||||
# Providers whose native naming is authoritative -- pass through unchanged.
|
||||
_AUTHORITATIVE_NATIVE_PROVIDERS: frozenset[str] = frozenset({
|
||||
"gemini",
|
||||
"huggingface",
|
||||
"openai-codex",
|
||||
})
|
||||
|
||||
# Direct providers that accept bare native names but should repair a matching
|
||||
|
|
@ -360,7 +360,11 @@ def normalize_model_for_provider(model_input: str, target_provider: str) -> str:
|
|||
|
||||
# --- Copilot: strip matching provider prefix, keep dots ---
|
||||
if provider in _STRIP_VENDOR_ONLY_PROVIDERS:
|
||||
return _strip_matching_provider_prefix(name, provider)
|
||||
stripped = _strip_matching_provider_prefix(name, provider)
|
||||
if stripped == name and name.startswith("openai/"):
|
||||
# openai-codex maps openai/gpt-5.4 -> gpt-5.4
|
||||
return name.split("/", 1)[1]
|
||||
return stripped
|
||||
|
||||
# --- DeepSeek: map to one of two canonical names ---
|
||||
if provider == "deepseek":
|
||||
|
|
|
|||
|
|
@ -1809,6 +1809,35 @@ def validate_requested_model(
|
|||
"message": message,
|
||||
}
|
||||
|
||||
# OpenAI Codex has its own catalog path; /v1/models probing is not the right validation path.
|
||||
if normalized == "openai-codex":
|
||||
try:
|
||||
codex_models = provider_model_ids("openai-codex")
|
||||
except Exception:
|
||||
codex_models = []
|
||||
if codex_models:
|
||||
if requested_for_lookup in set(codex_models):
|
||||
return {
|
||||
"accepted": True,
|
||||
"persist": True,
|
||||
"recognized": True,
|
||||
"message": None,
|
||||
}
|
||||
suggestions = get_close_matches(requested_for_lookup, codex_models, n=3, cutoff=0.5)
|
||||
suggestion_text = ""
|
||||
if suggestions:
|
||||
suggestion_text = "\n Similar models: " + ", ".join(f"`{s}`" for s in suggestions)
|
||||
return {
|
||||
"accepted": True,
|
||||
"persist": True,
|
||||
"recognized": False,
|
||||
"message": (
|
||||
f"Note: `{requested}` was not found in the OpenAI Codex model listing. "
|
||||
f"It may still work if your account has access to it."
|
||||
f"{suggestion_text}"
|
||||
),
|
||||
}
|
||||
|
||||
# Probe the live API to check if the model actually exists
|
||||
api_models = fetch_api_models(api_key, base_url)
|
||||
|
||||
|
|
|
|||
|
|
@ -33,6 +33,7 @@ PLATFORMS: OrderedDict[str, PlatformInfo] = OrderedDict([
|
|||
("dingtalk", PlatformInfo(label="💬 DingTalk", default_toolset="hermes-dingtalk")),
|
||||
("feishu", PlatformInfo(label="🪽 Feishu", default_toolset="hermes-feishu")),
|
||||
("wecom", PlatformInfo(label="💬 WeCom", default_toolset="hermes-wecom")),
|
||||
("wecom_callback", PlatformInfo(label="💬 WeCom Callback", default_toolset="hermes-wecom-callback")),
|
||||
("weixin", PlatformInfo(label="💬 Weixin", default_toolset="hermes-weixin")),
|
||||
("webhook", PlatformInfo(label="🔗 Webhook", default_toolset="hermes-webhook")),
|
||||
("api_server", PlatformInfo(label="🌐 API Server", default_toolset="hermes-api-server")),
|
||||
|
|
|
|||
|
|
@ -1969,6 +1969,48 @@ def _setup_weixin():
|
|||
_gateway_setup_weixin()
|
||||
|
||||
|
||||
def _setup_signal():
|
||||
"""Configure Signal via gateway setup."""
|
||||
from hermes_cli.gateway import _setup_signal as _gateway_setup_signal
|
||||
_gateway_setup_signal()
|
||||
|
||||
|
||||
def _setup_email():
|
||||
"""Configure Email via gateway setup."""
|
||||
from hermes_cli.gateway import _setup_email as _gateway_setup_email
|
||||
_gateway_setup_email()
|
||||
|
||||
|
||||
def _setup_sms():
|
||||
"""Configure SMS (Twilio) via gateway setup."""
|
||||
from hermes_cli.gateway import _setup_sms as _gateway_setup_sms
|
||||
_gateway_setup_sms()
|
||||
|
||||
|
||||
def _setup_dingtalk():
|
||||
"""Configure DingTalk via gateway setup."""
|
||||
from hermes_cli.gateway import _setup_dingtalk as _gateway_setup_dingtalk
|
||||
_gateway_setup_dingtalk()
|
||||
|
||||
|
||||
def _setup_feishu():
|
||||
"""Configure Feishu / Lark via gateway setup."""
|
||||
from hermes_cli.gateway import _setup_feishu as _gateway_setup_feishu
|
||||
_gateway_setup_feishu()
|
||||
|
||||
|
||||
def _setup_wecom():
|
||||
"""Configure WeCom (Enterprise WeChat) via gateway setup."""
|
||||
from hermes_cli.gateway import _setup_wecom as _gateway_setup_wecom
|
||||
_gateway_setup_wecom()
|
||||
|
||||
|
||||
def _setup_wecom_callback():
|
||||
"""Configure WeCom Callback (self-built app) via gateway setup."""
|
||||
from hermes_cli.gateway import _setup_wecom_callback as _gw_setup
|
||||
_gw_setup()
|
||||
|
||||
|
||||
def _setup_bluebubbles():
|
||||
"""Configure BlueBubbles iMessage gateway."""
|
||||
print_header("BlueBubbles (iMessage)")
|
||||
|
|
@ -2085,9 +2127,16 @@ _GATEWAY_PLATFORMS = [
|
|||
("Telegram", "TELEGRAM_BOT_TOKEN", _setup_telegram),
|
||||
("Discord", "DISCORD_BOT_TOKEN", _setup_discord),
|
||||
("Slack", "SLACK_BOT_TOKEN", _setup_slack),
|
||||
("Signal", "SIGNAL_HTTP_URL", _setup_signal),
|
||||
("Email", "EMAIL_ADDRESS", _setup_email),
|
||||
("SMS (Twilio)", "TWILIO_ACCOUNT_SID", _setup_sms),
|
||||
("Matrix", "MATRIX_ACCESS_TOKEN", _setup_matrix),
|
||||
("Mattermost", "MATTERMOST_TOKEN", _setup_mattermost),
|
||||
("WhatsApp", "WHATSAPP_ENABLED", _setup_whatsapp),
|
||||
("DingTalk", "DINGTALK_CLIENT_ID", _setup_dingtalk),
|
||||
("Feishu / Lark", "FEISHU_APP_ID", _setup_feishu),
|
||||
("WeCom (Enterprise WeChat)", "WECOM_BOT_ID", _setup_wecom),
|
||||
("WeCom Callback (Self-Built App)", "WECOM_CALLBACK_CORP_ID", _setup_wecom_callback),
|
||||
("Weixin (WeChat)", "WEIXIN_ACCOUNT_ID", _setup_weixin),
|
||||
("BlueBubbles (iMessage)", "BLUEBUBBLES_SERVER_URL", _setup_bluebubbles),
|
||||
("Webhooks (GitHub, GitLab, etc.)", "WEBHOOK_ENABLED", _setup_webhooks),
|
||||
|
|
@ -2129,10 +2178,17 @@ def setup_gateway(config: dict):
|
|||
get_env_value("TELEGRAM_BOT_TOKEN")
|
||||
or get_env_value("DISCORD_BOT_TOKEN")
|
||||
or get_env_value("SLACK_BOT_TOKEN")
|
||||
or get_env_value("SIGNAL_HTTP_URL")
|
||||
or get_env_value("EMAIL_ADDRESS")
|
||||
or get_env_value("TWILIO_ACCOUNT_SID")
|
||||
or get_env_value("MATTERMOST_TOKEN")
|
||||
or get_env_value("MATRIX_ACCESS_TOKEN")
|
||||
or get_env_value("MATRIX_PASSWORD")
|
||||
or get_env_value("WHATSAPP_ENABLED")
|
||||
or get_env_value("DINGTALK_CLIENT_ID")
|
||||
or get_env_value("FEISHU_APP_ID")
|
||||
or get_env_value("WECOM_BOT_ID")
|
||||
or get_env_value("WEIXIN_ACCOUNT_ID")
|
||||
or get_env_value("BLUEBUBBLES_SERVER_URL")
|
||||
or get_env_value("WEBHOOK_ENABLED")
|
||||
)
|
||||
|
|
@ -2321,12 +2377,30 @@ def _get_section_config_summary(config: dict, section_key: str) -> Optional[str]
|
|||
platforms.append("Discord")
|
||||
if get_env_value("SLACK_BOT_TOKEN"):
|
||||
platforms.append("Slack")
|
||||
if get_env_value("WHATSAPP_PHONE_NUMBER_ID"):
|
||||
platforms.append("WhatsApp")
|
||||
if get_env_value("SIGNAL_ACCOUNT"):
|
||||
platforms.append("Signal")
|
||||
if get_env_value("EMAIL_ADDRESS"):
|
||||
platforms.append("Email")
|
||||
if get_env_value("TWILIO_ACCOUNT_SID"):
|
||||
platforms.append("SMS")
|
||||
if get_env_value("MATRIX_ACCESS_TOKEN") or get_env_value("MATRIX_PASSWORD"):
|
||||
platforms.append("Matrix")
|
||||
if get_env_value("MATTERMOST_TOKEN"):
|
||||
platforms.append("Mattermost")
|
||||
if get_env_value("WHATSAPP_PHONE_NUMBER_ID"):
|
||||
platforms.append("WhatsApp")
|
||||
if get_env_value("DINGTALK_CLIENT_ID"):
|
||||
platforms.append("DingTalk")
|
||||
if get_env_value("FEISHU_APP_ID"):
|
||||
platforms.append("Feishu")
|
||||
if get_env_value("WECOM_BOT_ID"):
|
||||
platforms.append("WeCom")
|
||||
if get_env_value("WEIXIN_ACCOUNT_ID"):
|
||||
platforms.append("Weixin")
|
||||
if get_env_value("BLUEBUBBLES_SERVER_URL"):
|
||||
platforms.append("BlueBubbles")
|
||||
if get_env_value("WEBHOOK_ENABLED"):
|
||||
platforms.append("Webhooks")
|
||||
if platforms:
|
||||
return ", ".join(platforms)
|
||||
return None # No platforms configured — section must run
|
||||
|
|
|
|||
|
|
@ -302,6 +302,7 @@ def show_status(args):
|
|||
"DingTalk": ("DINGTALK_CLIENT_ID", None),
|
||||
"Feishu": ("FEISHU_APP_ID", "FEISHU_HOME_CHANNEL"),
|
||||
"WeCom": ("WECOM_BOT_ID", "WECOM_HOME_CHANNEL"),
|
||||
"WeCom Callback": ("WECOM_CALLBACK_CORP_ID", None),
|
||||
"Weixin": ("WEIXIN_ACCOUNT_ID", "WEIXIN_HOME_CHANNEL"),
|
||||
"BlueBubbles": ("BLUEBUBBLES_SERVER_URL", "BLUEBUBBLES_HOME_CHANNEL"),
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue