Add container detection utility to hermes_constants

Extract `is_container()` detection logic from scattered locations
(`config.py`, `voice_mode.py`) into a centralized, cached function in
`hermes_constants.py`. This follows the same pattern as `is_wsl()` and
`is_termux()` — checking `/.dockerenv`, `/run/.containerenv`, and cgroup
markers.

Update gateway status detection (`status.py`, `dump.py`) to use the new
utility and handle Docker/Podman differently from systemd-based systems.
Update setup guidance (`setup.py`) to show Docker restart instructions
when running in a container.

Add Dockerfile.test for CI integration testing and spec.md as a Python
module taste guide for contributors.
This commit is contained in:
alt-glitch 2026-04-12 13:08:15 -07:00
parent e89b9d9732
commit 2f2eeffb96
13 changed files with 785 additions and 68 deletions

4
Dockerfile.test Normal file
View file

@ -0,0 +1,4 @@
FROM nousresearch/hermes-agent:latest
COPY hermes_cli/ /opt/hermes/hermes_cli/
COPY hermes_constants.py /opt/hermes/hermes_constants.py
COPY tools/voice_mode.py /opt/hermes/tools/voice_mode.py

View file

@ -148,25 +148,6 @@ def managed_error(action: str = "modify configuration"):
# Container-aware CLI (NixOS container mode)
# =============================================================================
def _is_inside_container() -> bool:
"""Detect if we're already running inside a Docker/Podman container."""
# Standard Docker/Podman indicators
if os.path.exists("/.dockerenv"):
return True
# Podman uses /run/.containerenv
if os.path.exists("/run/.containerenv"):
return True
# Check cgroup for container runtime evidence (works for both Docker & Podman)
try:
with open("/proc/1/cgroup", "r") as f:
cgroup = f.read()
if "docker" in cgroup or "podman" in cgroup or "/lxc/" in cgroup:
return True
except OSError:
pass
return False
def get_container_exec_info() -> Optional[dict]:
"""Read container mode metadata from HERMES_HOME/.container-mode.
@ -181,7 +162,8 @@ def get_container_exec_info() -> Optional[dict]:
if os.environ.get("HERMES_DEV") == "1":
return None
if _is_inside_container():
from hermes_constants import is_container
if is_container():
return None
container_mode_file = get_hermes_home() / ".container-mode"

View file

@ -44,6 +44,16 @@ def _redact(value: str) -> str:
def _gateway_status() -> str:
"""Return a short gateway status string."""
if sys.platform.startswith("linux"):
from hermes_constants import is_container
if is_container():
try:
from hermes_cli.gateway import find_gateway_pids
pids = find_gateway_pids()
if pids:
return f"running (docker, pid {pids[0]})"
return "stopped (docker)"
except Exception:
return "stopped (docker)"
try:
from hermes_cli.gateway import get_service_name
svc = get_service_name()

View file

@ -331,7 +331,7 @@ def is_linux() -> bool:
return sys.platform.startswith('linux')
from hermes_constants import is_termux, is_wsl
from hermes_constants import is_container, is_termux, is_wsl
def _wsl_systemd_operational() -> bool:
@ -353,7 +353,7 @@ def _wsl_systemd_operational() -> bool:
def supports_systemd_services() -> bool:
if not is_linux() or is_termux():
if not is_linux() or is_termux() or is_container():
return False
if shutil.which("systemctl") is None:
return False
@ -485,6 +485,21 @@ def _journalctl_cmd(system: bool = False) -> list[str]:
return ["journalctl"] if system else ["journalctl", "--user"]
def _run_systemctl(args: list[str], *, system: bool = False, **kwargs) -> subprocess.CompletedProcess:
"""Run a systemctl command, raising RuntimeError if systemctl is missing.
Defense-in-depth: callers are gated by ``supports_systemd_services()``,
but this ensures any future caller that bypasses the gate still gets a
clear error instead of a raw ``FileNotFoundError`` traceback.
"""
try:
return subprocess.run(_systemctl_cmd(system) + args, **kwargs)
except FileNotFoundError:
raise RuntimeError(
"systemctl is not available on this system"
) from None
def _service_scope_label(system: bool = False) -> str:
return "system" if system else "user"
@ -931,7 +946,7 @@ def refresh_systemd_unit_if_needed(system: bool = False) -> bool:
expected_user = _read_systemd_user_from_unit(unit_path) if system else None
unit_path.write_text(generate_systemd_unit(system=system, run_as_user=expected_user), encoding="utf-8")
subprocess.run(_systemctl_cmd(system) + ["daemon-reload"], check=True, timeout=30)
_run_systemctl(["daemon-reload"], system=system, check=True, timeout=30)
print(f"↻ Updated gateway {_service_scope_label(system)} service definition to match the current Hermes install")
return True
@ -1027,7 +1042,7 @@ def systemd_install(force: bool = False, system: bool = False, run_as_user: str
if not systemd_unit_is_current(system=system):
print(f"↻ Repairing outdated {_service_scope_label(system)} systemd service at: {unit_path}")
refresh_systemd_unit_if_needed(system=system)
subprocess.run(_systemctl_cmd(system) + ["enable", get_service_name()], check=True, timeout=30)
_run_systemctl(["enable", get_service_name()], system=system, check=True, timeout=30)
print(f"{_service_scope_label(system).capitalize()} service definition updated")
return
print(f"Service already installed at: {unit_path}")
@ -1038,8 +1053,8 @@ def systemd_install(force: bool = False, system: bool = False, run_as_user: str
print(f"Installing {_service_scope_label(system)} systemd service to: {unit_path}")
unit_path.write_text(generate_systemd_unit(system=system, run_as_user=run_as_user), encoding="utf-8")
subprocess.run(_systemctl_cmd(system) + ["daemon-reload"], check=True, timeout=30)
subprocess.run(_systemctl_cmd(system) + ["enable", get_service_name()], check=True, timeout=30)
_run_systemctl(["daemon-reload"], system=system, check=True, timeout=30)
_run_systemctl(["enable", get_service_name()], system=system, check=True, timeout=30)
print()
print(f"{_service_scope_label(system).capitalize()} service installed and enabled!")
@ -1065,15 +1080,15 @@ def systemd_uninstall(system: bool = False):
if system:
_require_root_for_system_service("uninstall")
subprocess.run(_systemctl_cmd(system) + ["stop", get_service_name()], check=False, timeout=90)
subprocess.run(_systemctl_cmd(system) + ["disable", get_service_name()], check=False, timeout=30)
_run_systemctl(["stop", get_service_name()], system=system, check=False, timeout=90)
_run_systemctl(["disable", get_service_name()], system=system, check=False, timeout=30)
unit_path = get_systemd_unit_path(system=system)
if unit_path.exists():
unit_path.unlink()
print(f"✓ Removed {unit_path}")
subprocess.run(_systemctl_cmd(system) + ["daemon-reload"], check=True, timeout=30)
_run_systemctl(["daemon-reload"], system=system, check=True, timeout=30)
print(f"{_service_scope_label(system).capitalize()} service uninstalled")
@ -1082,7 +1097,7 @@ def systemd_start(system: bool = False):
if system:
_require_root_for_system_service("start")
refresh_systemd_unit_if_needed(system=system)
subprocess.run(_systemctl_cmd(system) + ["start", get_service_name()], check=True, timeout=30)
_run_systemctl(["start", get_service_name()], system=system, check=True, timeout=30)
print(f"{_service_scope_label(system).capitalize()} service started")
@ -1091,7 +1106,7 @@ def systemd_stop(system: bool = False):
system = _select_systemd_scope(system)
if system:
_require_root_for_system_service("stop")
subprocess.run(_systemctl_cmd(system) + ["stop", get_service_name()], check=True, timeout=90)
_run_systemctl(["stop", get_service_name()], system=system, check=True, timeout=90)
print(f"{_service_scope_label(system).capitalize()} service stopped")
@ -1107,7 +1122,7 @@ def systemd_restart(system: bool = False):
if pid is not None and _request_gateway_self_restart(pid):
print(f"{_service_scope_label(system).capitalize()} service restart requested")
return
subprocess.run(_systemctl_cmd(system) + ["reload-or-restart", get_service_name()], check=True, timeout=90)
_run_systemctl(["reload-or-restart", get_service_name()], system=system, check=True, timeout=90)
print(f"{_service_scope_label(system).capitalize()} service restarted")
@ -1131,14 +1146,16 @@ def systemd_status(deep: bool = False, system: bool = False):
print(f" Run: {'sudo ' if system else ''}hermes gateway restart{scope_flag} # auto-refreshes the unit")
print()
subprocess.run(
_systemctl_cmd(system) + ["status", get_service_name(), "--no-pager"],
_run_systemctl(
["status", get_service_name(), "--no-pager"],
system=system,
capture_output=False,
timeout=10,
)
result = subprocess.run(
_systemctl_cmd(system) + ["is-active", get_service_name()],
result = _run_systemctl(
["is-active", get_service_name()],
system=system,
capture_output=True,
text=True,
timeout=10,
@ -2131,24 +2148,24 @@ def _is_service_running() -> bool:
if user_unit_exists:
try:
result = subprocess.run(
_systemctl_cmd(False) + ["is-active", get_service_name()],
capture_output=True, text=True, timeout=10,
result = _run_systemctl(
["is-active", get_service_name()],
system=False, capture_output=True, text=True, timeout=10,
)
if result.stdout.strip() == "active":
return True
except (FileNotFoundError, subprocess.TimeoutExpired):
except (RuntimeError, subprocess.TimeoutExpired):
pass
if system_unit_exists:
try:
result = subprocess.run(
_systemctl_cmd(True) + ["is-active", get_service_name()],
capture_output=True, text=True, timeout=10,
result = _run_systemctl(
["is-active", get_service_name()],
system=True, capture_output=True, text=True, timeout=10,
)
if result.stdout.strip() == "active":
return True
except (FileNotFoundError, subprocess.TimeoutExpired):
except (RuntimeError, subprocess.TimeoutExpired):
pass
return False
@ -2608,6 +2625,15 @@ def gateway_command(args):
print(" tmux new -s hermes 'hermes gateway run' # persistent via tmux")
print(" nohup hermes gateway run > ~/.hermes/logs/gateway.log 2>&1 & # background")
sys.exit(1)
elif is_container():
print("Service installation is not needed inside a Docker container.")
print("The container runtime is your service manager — use Docker restart policies instead:")
print()
print(" docker run --restart unless-stopped ... # auto-restart on crash/reboot")
print(" docker restart <container> # manual restart")
print()
print("To run the gateway: hermes gateway run")
sys.exit(0)
else:
print("Service installation not supported on this platform.")
print("Run manually: hermes gateway run")
@ -2626,10 +2652,17 @@ def gateway_command(args):
systemd_uninstall(system=system)
elif is_macos():
launchd_uninstall()
elif is_container():
print("Service uninstall is not applicable inside a Docker container.")
print("To stop the gateway, stop or remove the container:")
print()
print(" docker stop <container>")
print(" docker rm <container>")
sys.exit(0)
else:
print("Not supported on this platform.")
sys.exit(1)
elif subcmd == "start":
system = getattr(args, 'system', False)
if is_termux():
@ -2650,10 +2683,19 @@ def gateway_command(args):
print()
print("To enable systemd: add systemd=true to /etc/wsl.conf and run 'wsl --shutdown' from PowerShell.")
sys.exit(1)
elif is_container():
print("Service start is not applicable inside a Docker container.")
print("The gateway runs as the container's main process.")
print()
print(" docker start <container> # start a stopped container")
print(" docker restart <container> # restart a running container")
print()
print("Or run the gateway directly: hermes gateway run")
sys.exit(0)
else:
print("Not supported on this platform.")
sys.exit(1)
elif subcmd == "stop":
stop_all = getattr(args, 'all', False)
system = getattr(args, 'system', False)

View file

@ -2303,8 +2303,17 @@ def setup_gateway(config: dict):
print_info(" Or as a boot-time service: sudo hermes gateway install --system")
print_info(" Or run in foreground: hermes gateway")
else:
print_info("Start the gateway to bring your bots online:")
print_info(" hermes gateway # Run in foreground")
from hermes_constants import is_container
if is_container():
print_info("Start the gateway to bring your bots online:")
print_info(" hermes gateway run # Run as container main process")
print_info("")
print_info("For automatic restarts, use a Docker restart policy:")
print_info(" docker run --restart unless-stopped ...")
print_info(" docker restart <container> # Manual restart")
else:
print_info("Start the gateway to bring your bots online:")
print_info(" hermes gateway # Run in foreground")
print_info("" * 50)

View file

@ -346,23 +346,35 @@ def show_status(args):
print(" Note: Android may stop background jobs when Termux is suspended")
elif sys.platform.startswith('linux'):
try:
from hermes_cli.gateway import get_service_name
_gw_svc = get_service_name()
except Exception:
_gw_svc = "hermes-gateway"
try:
result = subprocess.run(
["systemctl", "--user", "is-active", _gw_svc],
capture_output=True,
text=True,
timeout=5
)
is_active = result.stdout.strip() == "active"
except (FileNotFoundError, subprocess.TimeoutExpired):
is_active = False
print(f" Status: {check_mark(is_active)} {'running' if is_active else 'stopped'}")
print(" Manager: systemd (user)")
from hermes_constants import is_container
if is_container():
# Docker/Podman: no systemd — check for running gateway processes
try:
from hermes_cli.gateway import find_gateway_pids
gateway_pids = find_gateway_pids()
is_active = len(gateway_pids) > 0
except Exception:
is_active = False
print(f" Status: {check_mark(is_active)} {'running' if is_active else 'stopped'}")
print(" Manager: docker (foreground)")
else:
try:
from hermes_cli.gateway import get_service_name
_gw_svc = get_service_name()
except Exception:
_gw_svc = "hermes-gateway"
try:
result = subprocess.run(
["systemctl", "--user", "is-active", _gw_svc],
capture_output=True,
text=True,
timeout=5
)
is_active = result.stdout.strip() == "active"
except (FileNotFoundError, subprocess.TimeoutExpired):
is_active = False
print(f" Status: {check_mark(is_active)} {'running' if is_active else 'stopped'}")
print(" Manager: systemd (user)")
elif sys.platform == 'darwin':
from hermes_cli.gateway import get_launchd_label

View file

@ -189,6 +189,37 @@ def is_wsl() -> bool:
return _wsl_detected
_container_detected: bool | None = None
def is_container() -> bool:
"""Return True when running inside a Docker/Podman container.
Checks ``/.dockerenv`` (Docker), ``/run/.containerenv`` (Podman),
and ``/proc/1/cgroup`` for container runtime markers. Result is
cached for the process lifetime. Import-safe no heavy deps.
"""
global _container_detected
if _container_detected is not None:
return _container_detected
if os.path.exists("/.dockerenv"):
_container_detected = True
return True
if os.path.exists("/run/.containerenv"):
_container_detected = True
return True
try:
with open("/proc/1/cgroup", "r") as f:
cgroup = f.read()
if "docker" in cgroup or "podman" in cgroup or "/lxc/" in cgroup:
_container_detected = True
return True
except OSError:
pass
_container_detected = False
return False
# ─── Well-Known Paths ─────────────────────────────────────────────────────────

374
spec.md Normal file
View file

@ -0,0 +1,374 @@
# Python Module Taste Guide
_Opinionated notes on structuring Python projects where many people (and agents) contribute. Not a style guide — a taste document._
---
## 1. The file is the unit of understanding
Every `.py` file should be explainable in one sentence. If you can't say "this file handles X" without using the word "and", split it.
```
# Good: one sentence each
backends/docker.py → "Docker execution backend"
backends/ssh.py → "SSH execution backend"
agent/planner.py → "Step planning and decomposition"
agent/executor.py → "Tool dispatch and result collection"
# Bad: needs "and"
agent/core.py → "Planning and execution and tool dispatch and error handling"
utils/helpers.py → "Retry logic and string formatting and path resolution"
```
A 400-line file with one clear purpose is better than four 100-line files with fuzzy purposes.
---
## 2. Directory = namespace = concept boundary
A directory exists to group files that share a concept AND need to import each other. If the files don't need each other, they don't need a directory — they can be siblings.
```
# This directory earns its existence:
backends/
├── __init__.py # re-exports Backend, DockerBackend, etc.
├── base.py # Protocol/ABC
├── docker.py # imports base
├── ssh.py # imports base
└── local.py # imports base
# This directory shouldn't exist:
utils/
├── __init__.py
├── retry.py # used by backends
├── formatting.py # used by cli
└── paths.py # used by config
# These have nothing to do with each other. Just put them where they're used.
```
**Test:** if you delete the `__init__.py` and the directory, would each file work as a top-level module? If yes, the directory is probably just cosmetic grouping, not a real namespace.
---
## 3. Flat until it hurts (the two-level rule)
Start with at most two levels of nesting under `src/`. Add a third level only when a directory has 7+ files AND they cluster into obvious sub-groups.
```
# Good: two levels
src/hermes_agent/
├── backends/
├── agent/
├── tools/
├── config/
└── cli/
# Premature: three levels when you only have 2 files
src/hermes_agent/
└── backends/
└── docker/
├── __init__.py
├── container.py # only 80 lines
└── image.py # only 60 lines
# Just keep this as backends/docker.py until it's 300+ lines
```
Depth costs cognitive overhead. Every nested directory is a question: "do I look in `docker/` or `docker/container/`?" Flat trees answer questions faster.
---
## 4. `__init__.py` is your public API
Treat `__init__.py` as the **only** file external consumers should import from. Everything else is internal.
```python
# backends/__init__.py
from .base import Backend, ExecResult
from .docker import DockerBackend
from .local import LocalBackend
from .ssh import SSHBackend
__all__ = ["Backend", "ExecResult", "DockerBackend", "LocalBackend", "SSHBackend"]
```
This gives you freedom to refactor internals. You can split `docker.py` into `docker_container.py` + `docker_network.py` without changing any external imports — because everyone imports from `backends`, not from `backends.docker`.
**Rule:** if you see `from hermes_agent.backends.docker import DockerBackend` in the agent code, that's a smell. It should be `from hermes_agent.backends import DockerBackend`.
---
## 5. Dependency arrows flow one way
Draw the import graph. It should be a DAG with clear layers:
```
cli
agent
↓ ↘
tools backends
↓ ↓
config config
```
**Hard rules:**
- `config` imports nothing from the project (it's the leaf)
- `backends` never imports from `agent`
- `tools` never imports from `agent`
- `agent` imports from `tools` and `backends`
- `cli` imports from `agent` (and maybe `config`)
If you're tempted to create a circular import, you're missing an interface. Extract the shared type into `config` or a `types.py` at the appropriate level.
```python
# Bad: circular
# agent/executor.py imports backends.docker
# backends/docker.py imports agent.state ← circular!
# Fix: extract the shared type
# types.py (or config/types.py)
@dataclass
class AgentState:
...
# Now both agent and backends can import from types
```
---
## 6. One file owns each type
Every important class/dataclass/Protocol should live in exactly one file, and that file should be obvious from the type name.
```
BackendConfig → config/backends.py (or config.py if config is flat)
DockerBackend → backends/docker.py
AgentLoop → agent/loop.py
ToolRegistry → tools/registry.py
```
Anti-pattern: putting `BackendConfig` in `backends/base.py` because "it's related to backends." No — config lives in config. Backends _use_ config, they don't _define_ config. This keeps the dependency arrows clean.
---
## 7. Protocols over ABCs for external contracts
Use `typing.Protocol` when you want to define "what shape does this thing have" without forcing inheritance. Use ABCs when you want to share implementation.
```python
# Protocol: structural subtyping, no inheritance needed
# Good for: interfaces consumed by other packages
@runtime_checkable
class Backend(Protocol):
async def execute(self, cmd: str) -> ExecResult: ...
async def upload(self, local: Path, remote: str) -> None: ...
# ABC: nominal subtyping, shared implementation
# Good for: when backends share 50+ lines of common logic
class BaseBackend(ABC):
def __init__(self, config: BackendConfig):
self.config = config
self._setup_logging() # shared
@abstractmethod
async def execute(self, cmd: str) -> ExecResult: ...
def _setup_logging(self): # shared implementation
...
```
**Default to Protocol.** Reach for ABC only when you have real shared code, not just shared signatures.
---
## 8. Config is typed, loaded once, passed explicitly
```python
# config.py
from pydantic import BaseModel
class BackendConfig(BaseModel):
type: str = "local"
docker_image: str | None = None
ssh_host: str | None = None
timeout: float = 30.0
class AgentConfig(BaseModel):
model: str = "gpt-4o"
max_steps: int = 50
backend: BackendConfig = BackendConfig()
# Loading happens once, at the edge
def load_config(path: Path) -> AgentConfig:
raw = yaml.safe_load(path.read_text())
return AgentConfig(**raw)
```
**Rules:**
- Config classes import nothing from the project
- Config is loaded in `cli/` or `main()`, never inside library code
- No module-level globals like `CONFIG = load_config()`. Pass it through constructors.
- No `os.environ.get()` scattered through library code. Read env vars in config loading only.
---
## 9. The "where does new code go?" test
Before committing to a structure, simulate these scenarios:
| Scenario | Should be obvious where to add it |
| ---------------------------------------- | ------------------------------------------------------- |
| New execution backend (e.g., Kubernetes) | `backends/kubernetes.py` + register in `__init__.py` |
| New CLI subcommand | `cli/new_command.py` or a function in existing cli file |
| New tool for the agent | `tools/new_tool.py` + register in tool registry |
| New config option | Add field to existing config model in `config.py` |
| Bug fix in SSH execution | `backends/ssh.py`, nowhere else |
| New eval benchmark | `eval/new_benchmark.py` |
If any of these require touching 5+ files or the answer is "I'm not sure," the structure needs work.
---
## 10. Files that earn their existence
Every file in the project should pass one of these tests:
1. **It's the single home for a concept** (e.g., `docker.py` owns DockerBackend)
2. **It's a boundary** (e.g., `__init__.py` defines the public API)
3. **It's an entrypoint** (e.g., `__main__.py`, CLI commands)
4. **It's config/constants** (e.g., `config.py`, `defaults.py`)
Files that don't pass: `helpers.py`, `misc.py`, `common.py`, `base.py` (when it has no ABC/Protocol), `types.py` (when it has 2 types that belong in their respective modules).
---
## 11. Naming that communicates
**Files:** noun or noun_phrase, lowercase_snake. The name should tell you what's _in_ the file, not what it _does_.
```
# Good: tells you what's inside
registry.py # contains ToolRegistry
docker.py # contains DockerBackend
planner.py # contains Planner, PlanStep
# Bad: tells you what it does (vague)
run.py # run what?
process.py # process what?
handle.py # handle what?
```
**Directories:** plural nouns for collections, singular for a single concern.
```
backends/ # plural: collection of backend implementations
config/ # singular: one concern
tools/ # plural: collection of tools
agent/ # singular: one agent system
```
---
## 12. Tests: organize by confidence, not by source
```
tests/
├── unit/ # fast, isolated, mock everything external
│ ├── test_planner.py
│ └── test_config.py
├── integration/ # real backends, real I/O, but controlled
│ ├── test_docker_backend.py
│ └── test_ssh_backend.py
├── e2e/ # full agent runs, slow, CI-only
│ ├── test_ctf_solve.py
│ └── test_migration.py
└── fixtures/ # shared test data
├── sample_config.yaml
└── mock_responses/
```
Don't mirror `src/` 1:1. Test files group by _what you're verifying_, not which source file they exercise. `test_agent_can_recover_from_backend_failure.py` might touch `agent/`, `backends/`, and `config/` — and that's fine.
---
## 13. The import order tells a story
Within a file, imports should read top-down as: stdlib → third-party → project internals, with project internals going from "far away" to "nearby."
```python
# stdlib
import asyncio
from pathlib import Path
# third-party
from pydantic import BaseModel
# project: far away (config is a leaf, used everywhere)
from hermes_agent.config import BackendConfig
# project: nearby (same package)
from .base import Backend, ExecResult
```
This isn't just aesthetics — it makes dependency direction visible at a glance.
---
## 14. When to split a file
Split when ANY of these are true:
- File exceeds ~400 lines AND has 2+ distinct responsibilities
- Two people frequently have merge conflicts in the same file
- You find yourself adding `# --- Section: X ---` comments to navigate
- The file has internal classes/functions that another module wants to import
Do NOT split just because:
- The file is "long" (a 600-line file with one clear purpose is fine)
- You "might need to" someday
- A linter told you to
---
## 15. Module-level code is a liability
Every line that runs at import time is a line that can break `import hermes_agent`.
```python
# Bad: runs at import time
import docker
client = docker.from_env() # crashes if Docker isn't running
# Good: lazy, runs when needed
def get_docker_client():
import docker
return docker.from_env()
# Also good: runs in __init__, not at module level
class DockerBackend:
def __init__(self, config: BackendConfig):
import docker
self._client = docker.from_env()
```
**Rule:** module-level code should only be: imports, type definitions, constants, and function/class definitions. Never side effects.
---
## Reading list
These are worth reading not for rules but for _calibrating your taste_:
- **Hynek Schlawack — [Testing & Packaging](https://hynek.me/articles/testing-packaging/)**: Best single article on src layout and why it matters.
- **Python Packaging Guide — [src layout vs flat layout](https://packaging.python.org/en/latest/discussions/src-layout-vs-flat-layout/)**: The official take.
- **Brandon Rhodes — [The Clean Architecture in Python](https://rhodesmill.org/brandon/talks/#clean-architecture-python)** (PyCon talk): Good for understanding dependency direction without going full enterprise.
- **Cosmicpython — [Architecture Patterns with Python](https://www.cosmicpython.com/)**: Free online book. Chapters 1-4 on repository pattern and dependency inversion are relevant; skip the CQRS/event-sourcing stuff unless you need it.
- **Hatch documentation**: Modern Python project management. Reading how Hatch structures things will passively teach you good layout conventions.
- **Any well-structured open source project**: `httpx`, `pydantic`, `ruff` (Rust but the Python wrapper layout is instructive), `textual`. Read their `src/` trees and `__init__.py` files.
---
_The goal is not elegance. The goal is that a new contributor — human or agent — can go from "I need to change X" to "I know which file to open" in under 10 seconds._

View file

@ -401,6 +401,14 @@ class TestGatewayServiceDetection:
assert gateway_cli.supports_systemd_services() is False
def test_supports_systemd_services_returns_true_when_systemctl_present(self, monkeypatch):
monkeypatch.setattr(gateway_cli, "is_linux", lambda: True)
monkeypatch.setattr(gateway_cli, "is_termux", lambda: False)
monkeypatch.setattr(gateway_cli, "is_wsl", lambda: False)
monkeypatch.setattr(gateway_cli.shutil, "which", lambda name: "/usr/bin/systemctl")
assert gateway_cli.supports_systemd_services() is True
def test_is_service_running_checks_system_scope_when_user_scope_is_inactive(self, monkeypatch):
user_unit = SimpleNamespace(exists=lambda: True)
system_unit = SimpleNamespace(exists=lambda: True)
@ -1025,3 +1033,91 @@ class TestSystemUnitPathRemapping:
# Target user paths should be present
assert "/home/alice" in unit
assert "WorkingDirectory=/home/alice/.hermes/hermes-agent" in unit
class TestDockerAwareGateway:
"""Tests for Docker container awareness in gateway commands."""
def test_run_systemctl_raises_runtimeerror_when_missing(self, monkeypatch):
"""_run_systemctl raises RuntimeError with container guidance when systemctl is absent."""
import pytest
def fake_run(cmd, **kwargs):
raise FileNotFoundError("systemctl")
monkeypatch.setattr(gateway_cli.subprocess, "run", fake_run)
with pytest.raises(RuntimeError, match="systemctl is not available"):
gateway_cli._run_systemctl(["start", "hermes-gateway"])
def test_run_systemctl_passes_through_on_success(self, monkeypatch):
"""_run_systemctl delegates to subprocess.run when systemctl exists."""
calls = []
def fake_run(cmd, **kwargs):
calls.append(cmd)
return SimpleNamespace(returncode=0, stdout="", stderr="")
monkeypatch.setattr(gateway_cli.subprocess, "run", fake_run)
result = gateway_cli._run_systemctl(["status", "hermes-gateway"])
assert result.returncode == 0
assert len(calls) == 1
assert "status" in calls[0]
def test_install_in_container_prints_docker_guidance(self, monkeypatch, capsys):
"""'hermes gateway install' inside Docker exits 0 with container guidance."""
import pytest
monkeypatch.setattr(gateway_cli, "is_managed", lambda: False)
monkeypatch.setattr(gateway_cli, "is_termux", lambda: False)
monkeypatch.setattr(gateway_cli, "supports_systemd_services", lambda: False)
monkeypatch.setattr(gateway_cli, "is_macos", lambda: False)
monkeypatch.setattr(gateway_cli, "is_wsl", lambda: False)
monkeypatch.setattr(gateway_cli, "is_container", lambda: True)
args = SimpleNamespace(gateway_command="install", force=False, system=False, run_as_user=None)
with pytest.raises(SystemExit) as exc_info:
gateway_cli.gateway_command(args)
assert exc_info.value.code == 0
out = capsys.readouterr().out
assert "Docker" in out or "docker" in out
assert "restart" in out.lower()
def test_uninstall_in_container_prints_docker_guidance(self, monkeypatch, capsys):
"""'hermes gateway uninstall' inside Docker exits 0 with container guidance."""
import pytest
monkeypatch.setattr(gateway_cli, "is_managed", lambda: False)
monkeypatch.setattr(gateway_cli, "is_termux", lambda: False)
monkeypatch.setattr(gateway_cli, "supports_systemd_services", lambda: False)
monkeypatch.setattr(gateway_cli, "is_macos", lambda: False)
monkeypatch.setattr(gateway_cli, "is_container", lambda: True)
args = SimpleNamespace(gateway_command="uninstall", system=False)
with pytest.raises(SystemExit) as exc_info:
gateway_cli.gateway_command(args)
assert exc_info.value.code == 0
out = capsys.readouterr().out
assert "docker" in out.lower()
def test_start_in_container_prints_docker_guidance(self, monkeypatch, capsys):
"""'hermes gateway start' inside Docker exits 0 with container guidance."""
import pytest
monkeypatch.setattr(gateway_cli, "is_termux", lambda: False)
monkeypatch.setattr(gateway_cli, "supports_systemd_services", lambda: False)
monkeypatch.setattr(gateway_cli, "is_macos", lambda: False)
monkeypatch.setattr(gateway_cli, "is_wsl", lambda: False)
monkeypatch.setattr(gateway_cli, "is_container", lambda: True)
args = SimpleNamespace(gateway_command="start", system=False)
with pytest.raises(SystemExit) as exc_info:
gateway_cli.gateway_command(args)
assert exc_info.value.code == 0
out = capsys.readouterr().out
assert "docker" in out.lower()
assert "hermes gateway run" in out

View file

@ -181,6 +181,48 @@ def test_setup_gateway_skips_service_install_when_systemctl_missing(monkeypatch,
assert "hermes gateway" in out
def test_setup_gateway_in_container_shows_docker_guidance(monkeypatch, capsys):
"""setup_gateway() in a Docker container shows Docker-specific restart instructions."""
env = {
"TELEGRAM_BOT_TOKEN": "",
"TELEGRAM_HOME_CHANNEL": "",
"DISCORD_BOT_TOKEN": "",
"DISCORD_HOME_CHANNEL": "",
"SLACK_BOT_TOKEN": "",
"SLACK_HOME_CHANNEL": "",
"MATRIX_HOMESERVER": "https://matrix.example.com",
"MATRIX_USER_ID": "@alice:example.com",
"MATRIX_PASSWORD": "",
"MATRIX_ACCESS_TOKEN": "token",
"BLUEBUBBLES_SERVER_URL": "",
"BLUEBUBBLES_HOME_CHANNEL": "",
"WHATSAPP_ENABLED": "",
"WEBHOOK_ENABLED": "",
}
monkeypatch.setattr(setup_mod, "get_env_value", lambda key: env.get(key, ""))
monkeypatch.setattr(setup_mod, "prompt_yes_no", lambda *args, **kwargs: False)
monkeypatch.setattr("platform.system", lambda: "Linux")
import hermes_cli.gateway as gateway_mod
monkeypatch.setattr(gateway_mod, "supports_systemd_services", lambda: False)
monkeypatch.setattr(gateway_mod, "is_macos", lambda: False)
monkeypatch.setattr(gateway_mod, "_is_service_installed", lambda: False)
monkeypatch.setattr(gateway_mod, "_is_service_running", lambda: False)
# Patch is_container at the import location in setup.py
import hermes_constants
monkeypatch.setattr(hermes_constants, "is_container", lambda: True)
setup_mod.setup_gateway({})
out = capsys.readouterr().out
assert "Messaging platforms configured!" in out
assert "docker" in out.lower() or "Docker" in out
assert "restart" in out.lower()
def test_setup_syncs_custom_provider_removal_from_disk(tmp_path, monkeypatch):
"""Removing the last custom provider in model setup should persist."""
monkeypatch.setenv("HERMES_HOME", str(tmp_path))

View file

@ -6,7 +6,8 @@ from unittest.mock import patch
import pytest
from hermes_constants import get_default_hermes_root
import hermes_constants
from hermes_constants import get_default_hermes_root, is_container
class TestGetDefaultHermesRoot:
@ -60,3 +61,53 @@ class TestGetDefaultHermesRoot:
monkeypatch.setattr(Path, "home", lambda: tmp_path)
monkeypatch.setenv("HERMES_HOME", str(profile))
assert get_default_hermes_root() == docker_root
class TestIsContainer:
"""Tests for is_container() — Docker/Podman detection."""
def _reset_cache(self, monkeypatch):
"""Reset the cached detection result before each test."""
monkeypatch.setattr(hermes_constants, "_container_detected", None)
def test_detects_dockerenv(self, monkeypatch, tmp_path):
"""/.dockerenv triggers container detection."""
self._reset_cache(monkeypatch)
monkeypatch.setattr(os.path, "exists", lambda p: p == "/.dockerenv")
assert is_container() is True
def test_detects_containerenv(self, monkeypatch, tmp_path):
"""/run/.containerenv triggers container detection (Podman)."""
self._reset_cache(monkeypatch)
monkeypatch.setattr(os.path, "exists", lambda p: p == "/run/.containerenv")
assert is_container() is True
def test_detects_cgroup_docker(self, monkeypatch, tmp_path):
"""/proc/1/cgroup containing 'docker' triggers detection."""
import builtins
self._reset_cache(monkeypatch)
monkeypatch.setattr(os.path, "exists", lambda p: False)
cgroup_file = tmp_path / "cgroup"
cgroup_file.write_text("12:memory:/docker/abc123\n")
_real_open = builtins.open
monkeypatch.setattr("builtins.open", lambda p, *a, **kw: _real_open(str(cgroup_file), *a, **kw) if p == "/proc/1/cgroup" else _real_open(p, *a, **kw))
assert is_container() is True
def test_negative_case(self, monkeypatch, tmp_path):
"""Returns False on a regular Linux host."""
import builtins
self._reset_cache(monkeypatch)
monkeypatch.setattr(os.path, "exists", lambda p: False)
cgroup_file = tmp_path / "cgroup"
cgroup_file.write_text("12:memory:/\n")
_real_open = builtins.open
monkeypatch.setattr("builtins.open", lambda p, *a, **kw: _real_open(str(cgroup_file), *a, **kw) if p == "/proc/1/cgroup" else _real_open(p, *a, **kw))
assert is_container() is False
def test_caches_result(self, monkeypatch):
"""Second call uses cached value without re-probing."""
monkeypatch.setattr(hermes_constants, "_container_detected", True)
assert is_container() is True
# Even if we make os.path.exists return False, cached value wins
monkeypatch.setattr(os.path, "exists", lambda p: False)
assert is_container() is True

View file

@ -106,8 +106,9 @@ def detect_audio_environment() -> dict:
if any(os.environ.get(v) for v in ('SSH_CLIENT', 'SSH_TTY', 'SSH_CONNECTION')):
warnings.append("Running over SSH -- no audio devices available")
# Docker detection
if os.path.exists('/.dockerenv'):
# Docker/Podman container detection
from hermes_constants import is_container
if is_container():
warnings.append("Running inside Docker container -- no audio devices")
# WSL detection — PulseAudio bridge makes audio work in WSL.

63
uv.lock generated
View file

@ -165,6 +165,15 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/fb/76/641ae371508676492379f16e2fa48f4e2c11741bd63c48be4b12a6b09cba/aiosignal-1.4.0-py3-none-any.whl", hash = "sha256:053243f8b92b990551949e63930a839ff0cf0b0ebbe0597b0f3fb19e1a0fe82e", size = 7490, upload-time = "2025-07-03T22:54:42.156Z" },
]
[[package]]
name = "aiosqlite"
version = "0.22.1"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/4e/8a/64761f4005f17809769d23e518d915db74e6310474e733e3593cfc854ef1/aiosqlite-0.22.1.tar.gz", hash = "sha256:043e0bd78d32888c0a9ca90fc788b38796843360c855a7262a532813133a0650", size = 14821, upload-time = "2025-12-23T19:25:43.997Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/00/b7/e3bf5133d697a08128598c8d0abc5e16377b51465a33756de24fa7dee953/aiosqlite-0.22.1-py3-none-any.whl", hash = "sha256:21c002eb13823fad740196c5a2e9d8e62f6243bd9e7e4a1f87fb5e44ecb4fceb", size = 17405, upload-time = "2025-12-23T19:25:42.139Z" },
]
[[package]]
name = "altair"
version = "6.0.0"
@ -240,6 +249,54 @@ wheels = [
{ url = "https://files.pythonhosted.org/packages/38/0e/27be9fdef66e72d64c0cdc3cc2823101b80585f8119b5c112c2e8f5f7dab/anyio-4.12.1-py3-none-any.whl", hash = "sha256:d405828884fc140aa80a3c667b8beed277f1dfedec42ba031bd6ac3db606ab6c", size = 113592, upload-time = "2026-01-06T11:45:19.497Z" },
]
[[package]]
name = "asyncpg"
version = "0.31.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/fe/cc/d18065ce2380d80b1bcce927c24a2642efd38918e33fd724bc4bca904877/asyncpg-0.31.0.tar.gz", hash = "sha256:c989386c83940bfbd787180f2b1519415e2d3d6277a70d9d0f0145ac73500735", size = 993667, upload-time = "2025-11-24T23:27:00.812Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/08/17/cc02bc49bc350623d050fa139e34ea512cd6e020562f2a7312a7bcae4bc9/asyncpg-0.31.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:eee690960e8ab85063ba93af2ce128c0f52fd655fdff9fdb1a28df01329f031d", size = 643159, upload-time = "2025-11-24T23:25:36.443Z" },
{ url = "https://files.pythonhosted.org/packages/a4/62/4ded7d400a7b651adf06f49ea8f73100cca07c6df012119594d1e3447aa6/asyncpg-0.31.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2657204552b75f8288de08ca60faf4a99a65deef3a71d1467454123205a88fab", size = 638157, upload-time = "2025-11-24T23:25:37.89Z" },
{ url = "https://files.pythonhosted.org/packages/d6/5b/4179538a9a72166a0bf60ad783b1ef16efb7960e4d7b9afe9f77a5551680/asyncpg-0.31.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a429e842a3a4b4ea240ea52d7fe3f82d5149853249306f7ff166cb9948faa46c", size = 2918051, upload-time = "2025-11-24T23:25:39.461Z" },
{ url = "https://files.pythonhosted.org/packages/e6/35/c27719ae0536c5b6e61e4701391ffe435ef59539e9360959240d6e47c8c8/asyncpg-0.31.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c0807be46c32c963ae40d329b3a686356e417f674c976c07fa49f1b30303f109", size = 2972640, upload-time = "2025-11-24T23:25:41.512Z" },
{ url = "https://files.pythonhosted.org/packages/43/f4/01ebb9207f29e645a64699b9ce0eefeff8e7a33494e1d29bb53736f7766b/asyncpg-0.31.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e5d5098f63beeae93512ee513d4c0c53dc12e9aa2b7a1af5a81cddf93fe4e4da", size = 2851050, upload-time = "2025-11-24T23:25:43.153Z" },
{ url = "https://files.pythonhosted.org/packages/3e/f4/03ff1426acc87be0f4e8d40fa2bff5c3952bef0080062af9efc2212e3be8/asyncpg-0.31.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:37fc6c00a814e18eef51833545d1891cac9aa69140598bb076b4cd29b3e010b9", size = 2962574, upload-time = "2025-11-24T23:25:44.942Z" },
{ url = "https://files.pythonhosted.org/packages/c7/39/cc788dfca3d4060f9d93e67be396ceec458dfc429e26139059e58c2c244d/asyncpg-0.31.0-cp311-cp311-win32.whl", hash = "sha256:5a4af56edf82a701aece93190cc4e094d2df7d33f6e915c222fb09efbb5afc24", size = 521076, upload-time = "2025-11-24T23:25:46.486Z" },
{ url = "https://files.pythonhosted.org/packages/28/fc/735af5384c029eb7f1ca60ccb8fa95521dbdaeef788edf4cecfc604c3cab/asyncpg-0.31.0-cp311-cp311-win_amd64.whl", hash = "sha256:480c4befbdf079c14c9ca43c8c5e1fe8b6296c96f1f927158d4f1e750aacc047", size = 584980, upload-time = "2025-11-24T23:25:47.938Z" },
{ url = "https://files.pythonhosted.org/packages/2a/a6/59d0a146e61d20e18db7396583242e32e0f120693b67a8de43f1557033e2/asyncpg-0.31.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b44c31e1efc1c15188ef183f287c728e2046abb1d26af4d20858215d50d91fad", size = 662042, upload-time = "2025-11-24T23:25:49.578Z" },
{ url = "https://files.pythonhosted.org/packages/36/01/ffaa189dcb63a2471720615e60185c3f6327716fdc0fc04334436fbb7c65/asyncpg-0.31.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0c89ccf741c067614c9b5fc7f1fc6f3b61ab05ae4aaa966e6fd6b93097c7d20d", size = 638504, upload-time = "2025-11-24T23:25:51.501Z" },
{ url = "https://files.pythonhosted.org/packages/9f/62/3f699ba45d8bd24c5d65392190d19656d74ff0185f42e19d0bbd973bb371/asyncpg-0.31.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:12b3b2e39dc5470abd5e98c8d3373e4b1d1234d9fbdedf538798b2c13c64460a", size = 3426241, upload-time = "2025-11-24T23:25:53.278Z" },
{ url = "https://files.pythonhosted.org/packages/8c/d1/a867c2150f9c6e7af6462637f613ba67f78a314b00db220cd26ff559d532/asyncpg-0.31.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:aad7a33913fb8bcb5454313377cc330fbb19a0cd5faa7272407d8a0c4257b671", size = 3520321, upload-time = "2025-11-24T23:25:54.982Z" },
{ url = "https://files.pythonhosted.org/packages/7a/1a/cce4c3f246805ecd285a3591222a2611141f1669d002163abef999b60f98/asyncpg-0.31.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3df118d94f46d85b2e434fd62c84cb66d5834d5a890725fe625f498e72e4d5ec", size = 3316685, upload-time = "2025-11-24T23:25:57.43Z" },
{ url = "https://files.pythonhosted.org/packages/40/ae/0fc961179e78cc579e138fad6eb580448ecae64908f95b8cb8ee2f241f67/asyncpg-0.31.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:bd5b6efff3c17c3202d4b37189969acf8927438a238c6257f66be3c426beba20", size = 3471858, upload-time = "2025-11-24T23:25:59.636Z" },
{ url = "https://files.pythonhosted.org/packages/52/b2/b20e09670be031afa4cbfabd645caece7f85ec62d69c312239de568e058e/asyncpg-0.31.0-cp312-cp312-win32.whl", hash = "sha256:027eaa61361ec735926566f995d959ade4796f6a49d3bde17e5134b9964f9ba8", size = 527852, upload-time = "2025-11-24T23:26:01.084Z" },
{ url = "https://files.pythonhosted.org/packages/b5/f0/f2ed1de154e15b107dc692262395b3c17fc34eafe2a78fc2115931561730/asyncpg-0.31.0-cp312-cp312-win_amd64.whl", hash = "sha256:72d6bdcbc93d608a1158f17932de2321f68b1a967a13e014998db87a72ed3186", size = 597175, upload-time = "2025-11-24T23:26:02.564Z" },
{ url = "https://files.pythonhosted.org/packages/95/11/97b5c2af72a5d0b9bc3fa30cd4b9ce22284a9a943a150fdc768763caf035/asyncpg-0.31.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:c204fab1b91e08b0f47e90a75d1b3c62174dab21f670ad6c5d0f243a228f015b", size = 661111, upload-time = "2025-11-24T23:26:04.467Z" },
{ url = "https://files.pythonhosted.org/packages/1b/71/157d611c791a5e2d0423f09f027bd499935f0906e0c2a416ce712ba51ef3/asyncpg-0.31.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:54a64f91839ba59008eccf7aad2e93d6e3de688d796f35803235ea1c4898ae1e", size = 636928, upload-time = "2025-11-24T23:26:05.944Z" },
{ url = "https://files.pythonhosted.org/packages/2e/fc/9e3486fb2bbe69d4a867c0b76d68542650a7ff1574ca40e84c3111bb0c6e/asyncpg-0.31.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c0e0822b1038dc7253b337b0f3f676cadc4ac31b126c5d42691c39691962e403", size = 3424067, upload-time = "2025-11-24T23:26:07.957Z" },
{ url = "https://files.pythonhosted.org/packages/12/c6/8c9d076f73f07f995013c791e018a1cd5f31823c2a3187fc8581706aa00f/asyncpg-0.31.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bef056aa502ee34204c161c72ca1f3c274917596877f825968368b2c33f585f4", size = 3518156, upload-time = "2025-11-24T23:26:09.591Z" },
{ url = "https://files.pythonhosted.org/packages/ae/3b/60683a0baf50fbc546499cfb53132cb6835b92b529a05f6a81471ab60d0c/asyncpg-0.31.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0bfbcc5b7ffcd9b75ab1558f00db2ae07db9c80637ad1b2469c43df79d7a5ae2", size = 3319636, upload-time = "2025-11-24T23:26:11.168Z" },
{ url = "https://files.pythonhosted.org/packages/50/dc/8487df0f69bd398a61e1792b3cba0e47477f214eff085ba0efa7eac9ce87/asyncpg-0.31.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:22bc525ebbdc24d1261ecbf6f504998244d4e3be1721784b5f64664d61fbe602", size = 3472079, upload-time = "2025-11-24T23:26:13.164Z" },
{ url = "https://files.pythonhosted.org/packages/13/a1/c5bbeeb8531c05c89135cb8b28575ac2fac618bcb60119ee9696c3faf71c/asyncpg-0.31.0-cp313-cp313-win32.whl", hash = "sha256:f890de5e1e4f7e14023619399a471ce4b71f5418cd67a51853b9910fdfa73696", size = 527606, upload-time = "2025-11-24T23:26:14.78Z" },
{ url = "https://files.pythonhosted.org/packages/91/66/b25ccb84a246b470eb943b0107c07edcae51804912b824054b3413995a10/asyncpg-0.31.0-cp313-cp313-win_amd64.whl", hash = "sha256:dc5f2fa9916f292e5c5c8b2ac2813763bcd7f58e130055b4ad8a0531314201ab", size = 596569, upload-time = "2025-11-24T23:26:16.189Z" },
{ url = "https://files.pythonhosted.org/packages/3c/36/e9450d62e84a13aea6580c83a47a437f26c7ca6fa0f0fd40b6670793ea30/asyncpg-0.31.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:f6b56b91bb0ffc328c4e3ed113136cddd9deefdf5f79ab448598b9772831df44", size = 660867, upload-time = "2025-11-24T23:26:17.631Z" },
{ url = "https://files.pythonhosted.org/packages/82/4b/1d0a2b33b3102d210439338e1beea616a6122267c0df459ff0265cd5807a/asyncpg-0.31.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:334dec28cf20d7f5bb9e45b39546ddf247f8042a690bff9b9573d00086e69cb5", size = 638349, upload-time = "2025-11-24T23:26:19.689Z" },
{ url = "https://files.pythonhosted.org/packages/41/aa/e7f7ac9a7974f08eff9183e392b2d62516f90412686532d27e196c0f0eeb/asyncpg-0.31.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:98cc158c53f46de7bb677fd20c417e264fc02b36d901cc2a43bd6cb0dc6dbfd2", size = 3410428, upload-time = "2025-11-24T23:26:21.275Z" },
{ url = "https://files.pythonhosted.org/packages/6f/de/bf1b60de3dede5c2731e6788617a512bc0ebd9693eac297ee74086f101d7/asyncpg-0.31.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9322b563e2661a52e3cdbc93eed3be7748b289f792e0011cb2720d278b366ce2", size = 3471678, upload-time = "2025-11-24T23:26:23.627Z" },
{ url = "https://files.pythonhosted.org/packages/46/78/fc3ade003e22d8bd53aaf8f75f4be48f0b460fa73738f0391b9c856a9147/asyncpg-0.31.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:19857a358fc811d82227449b7ca40afb46e75b33eb8897240c3839dd8b744218", size = 3313505, upload-time = "2025-11-24T23:26:25.235Z" },
{ url = "https://files.pythonhosted.org/packages/bf/e9/73eb8a6789e927816f4705291be21f2225687bfa97321e40cd23055e903a/asyncpg-0.31.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:ba5f8886e850882ff2c2ace5732300e99193823e8107e2c53ef01c1ebfa1e85d", size = 3434744, upload-time = "2025-11-24T23:26:26.944Z" },
{ url = "https://files.pythonhosted.org/packages/08/4b/f10b880534413c65c5b5862f79b8e81553a8f364e5238832ad4c0af71b7f/asyncpg-0.31.0-cp314-cp314-win32.whl", hash = "sha256:cea3a0b2a14f95834cee29432e4ddc399b95700eb1d51bbc5bfee8f31fa07b2b", size = 532251, upload-time = "2025-11-24T23:26:28.404Z" },
{ url = "https://files.pythonhosted.org/packages/d3/2d/7aa40750b7a19efa5d66e67fc06008ca0f27ba1bd082e457ad82f59aba49/asyncpg-0.31.0-cp314-cp314-win_amd64.whl", hash = "sha256:04d19392716af6b029411a0264d92093b6e5e8285ae97a39957b9a9c14ea72be", size = 604901, upload-time = "2025-11-24T23:26:30.34Z" },
{ url = "https://files.pythonhosted.org/packages/ce/fe/b9dfe349b83b9dee28cc42360d2c86b2cdce4cb551a2c2d27e156bcac84d/asyncpg-0.31.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:bdb957706da132e982cc6856bb2f7b740603472b54c3ebc77fe60ea3e57e1bd2", size = 702280, upload-time = "2025-11-24T23:26:32Z" },
{ url = "https://files.pythonhosted.org/packages/6a/81/e6be6e37e560bd91e6c23ea8a6138a04fd057b08cf63d3c5055c98e81c1d/asyncpg-0.31.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:6d11b198111a72f47154fa03b85799f9be63701e068b43f84ac25da0bda9cb31", size = 682931, upload-time = "2025-11-24T23:26:33.572Z" },
{ url = "https://files.pythonhosted.org/packages/a6/45/6009040da85a1648dd5bc75b3b0a062081c483e75a1a29041ae63a0bf0dc/asyncpg-0.31.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:18c83b03bc0d1b23e6230f5bf8d4f217dc9bc08644ce0502a9d91dc9e634a9c7", size = 3581608, upload-time = "2025-11-24T23:26:35.638Z" },
{ url = "https://files.pythonhosted.org/packages/7e/06/2e3d4d7608b0b2b3adbee0d0bd6a2d29ca0fc4d8a78f8277df04e2d1fd7b/asyncpg-0.31.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e009abc333464ff18b8f6fd146addffd9aaf63e79aa3bb40ab7a4c332d0c5e9e", size = 3498738, upload-time = "2025-11-24T23:26:37.275Z" },
{ url = "https://files.pythonhosted.org/packages/7d/aa/7d75ede780033141c51d83577ea23236ba7d3a23593929b32b49db8ed36e/asyncpg-0.31.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:3b1fbcb0e396a5ca435a8826a87e5c2c2cc0c8c68eb6fadf82168056b0e53a8c", size = 3401026, upload-time = "2025-11-24T23:26:39.423Z" },
{ url = "https://files.pythonhosted.org/packages/ba/7a/15e37d45e7f7c94facc1e9148c0e455e8f33c08f0b8a0b1deb2c5171771b/asyncpg-0.31.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:8df714dba348efcc162d2adf02d213e5fab1bd9f557e1305633e851a61814a7a", size = 3429426, upload-time = "2025-11-24T23:26:41.032Z" },
{ url = "https://files.pythonhosted.org/packages/13/d5/71437c5f6ae5f307828710efbe62163974e71237d5d46ebd2869ea052d10/asyncpg-0.31.0-cp314-cp314t-win32.whl", hash = "sha256:1b41f1afb1033f2b44f3234993b15096ddc9cd71b21a42dbd87fc6a57b43d65d", size = 614495, upload-time = "2025-11-24T23:26:42.659Z" },
{ url = "https://files.pythonhosted.org/packages/3c/d7/8fb3044eaef08a310acfe23dae9a8e2e07d305edc29a53497e52bc76eca7/asyncpg-0.31.0-cp314-cp314t-win_amd64.whl", hash = "sha256:bd4107bb7cdd0e9e65fae66a62afd3a249663b844fa34d479f6d5b3bef9c04c3", size = 706062, upload-time = "2025-11-24T23:26:44.086Z" },
]
[[package]]
name = "atroposlib"
version = "0.4.0"
@ -1672,6 +1729,8 @@ acp = [
all = [
{ name = "agent-client-protocol" },
{ name = "aiohttp" },
{ name = "aiosqlite", marker = "sys_platform == 'linux'" },
{ name = "asyncpg", marker = "sys_platform == 'linux'" },
{ name = "croniter" },
{ name = "daytona" },
{ name = "debugpy" },
@ -1727,6 +1786,8 @@ honcho = [
{ name = "honcho-ai" },
]
matrix = [
{ name = "aiosqlite" },
{ name = "asyncpg" },
{ name = "markdown" },
{ name = "mautrix", extra = ["encryption"] },
]
@ -1791,7 +1852,9 @@ requires-dist = [
{ name = "aiohttp", marker = "extra == 'homeassistant'", specifier = ">=3.9.0,<4" },
{ name = "aiohttp", marker = "extra == 'messaging'", specifier = ">=3.13.3,<4" },
{ name = "aiohttp", marker = "extra == 'sms'", specifier = ">=3.9.0,<4" },
{ name = "aiosqlite", marker = "extra == 'matrix'", specifier = ">=0.20" },
{ name = "anthropic", specifier = ">=0.39.0,<1" },
{ name = "asyncpg", marker = "extra == 'matrix'", specifier = ">=0.29" },
{ name = "atroposlib", marker = "extra == 'rl'", git = "https://github.com/NousResearch/atropos.git" },
{ name = "croniter", marker = "extra == 'cron'", specifier = ">=6.0.0,<7" },
{ name = "daytona", marker = "extra == 'daytona'", specifier = ">=0.148.0,<1" },