mirror of
https://github.com/NousResearch/hermes-agent.git
synced 2026-04-25 00:51:20 +00:00
feat: add docker_env config for explicit container environment variables (#4738)
Add docker_env option to terminal config — a dict of key-value pairs that
get set inside Docker containers via -e flags at both container creation
(docker run) and per-command execution (docker exec) time.
This complements docker_forward_env (which reads values dynamically from
the host process environment). docker_env is useful when Hermes runs as a
systemd service without access to the user's shell environment — e.g.
setting SSH_AUTH_SOCK or GNUPGHOME to known stable paths for SSH/GPG
agent socket forwarding.
Precedence: docker_env provides baseline values; docker_forward_env
overrides for the same key.
Config example:
terminal:
docker_env:
SSH_AUTH_SOCK: /run/user/1000/ssh-agent.sock
GNUPGHOME: /root/.gnupg
docker_volumes:
- /run/user/1000/ssh-agent.sock:/run/user/1000/ssh-agent.sock
- /run/user/1000/gnupg/S.gpg-agent:/root/.gnupg/S.gpg-agent
This commit is contained in:
parent
78ec8b017f
commit
43d3efd5c8
4 changed files with 175 additions and 5 deletions
|
|
@ -222,6 +222,12 @@ DEFAULT_CONFIG = {
|
|||
"env_passthrough": [],
|
||||
"docker_image": "nikolaik/python-nodejs:python3.11-nodejs20",
|
||||
"docker_forward_env": [],
|
||||
# Explicit environment variables to set inside Docker containers.
|
||||
# Unlike docker_forward_env (which reads values from the host process),
|
||||
# docker_env lets you specify exact key-value pairs — useful when Hermes
|
||||
# runs as a systemd service without access to the user's shell environment.
|
||||
# Example: {"SSH_AUTH_SOCK": "/run/user/1000/ssh-agent.sock"}
|
||||
"docker_env": {},
|
||||
"singularity_image": "docker://nikolaik/python-nodejs:python3.11-nodejs20",
|
||||
"modal_image": "nikolaik/python-nodejs:python3.11-nodejs20",
|
||||
"daytona_image": "nikolaik/python-nodejs:python3.11-nodejs20",
|
||||
|
|
|
|||
|
|
@ -44,6 +44,7 @@ def _make_dummy_env(**kwargs):
|
|||
network=kwargs.get("network", True),
|
||||
host_cwd=kwargs.get("host_cwd"),
|
||||
auto_mount_cwd=kwargs.get("auto_mount_cwd", False),
|
||||
env=kwargs.get("env"),
|
||||
)
|
||||
|
||||
|
||||
|
|
@ -239,6 +240,7 @@ def _make_execute_only_env(forward_env=None):
|
|||
env.cwd = "/root"
|
||||
env.timeout = 60
|
||||
env._forward_env = forward_env or []
|
||||
env._env = {}
|
||||
env._prepare_command = lambda command: (command, None)
|
||||
env._timeout_result = lambda timeout: {"output": f"timed out after {timeout}", "returncode": 124}
|
||||
env._container_id = "test-container"
|
||||
|
|
@ -280,3 +282,120 @@ def test_execute_prefers_shell_env_over_hermes_dotenv(monkeypatch):
|
|||
|
||||
assert "GITHUB_TOKEN=value_from_shell" in popen_calls[0]
|
||||
assert "GITHUB_TOKEN=value_from_dotenv" not in popen_calls[0]
|
||||
|
||||
|
||||
# ── docker_env tests ──────────────────────────────────────────────
|
||||
|
||||
|
||||
def test_docker_env_appears_in_run_command(monkeypatch):
|
||||
"""Explicit docker_env values should be passed via -e at docker run time."""
|
||||
monkeypatch.setattr(docker_env, "find_docker", lambda: "/usr/bin/docker")
|
||||
calls = _mock_subprocess_run(monkeypatch)
|
||||
|
||||
_make_dummy_env(env={"SSH_AUTH_SOCK": "/run/user/1000/ssh-agent.sock", "GNUPGHOME": "/root/.gnupg"})
|
||||
|
||||
run_calls = [c for c in calls if isinstance(c[0], list) and len(c[0]) >= 2 and c[0][1] == "run"]
|
||||
assert run_calls, "docker run should have been called"
|
||||
run_args = run_calls[0][0]
|
||||
run_args_str = " ".join(run_args)
|
||||
assert "SSH_AUTH_SOCK=/run/user/1000/ssh-agent.sock" in run_args_str
|
||||
assert "GNUPGHOME=/root/.gnupg" in run_args_str
|
||||
|
||||
|
||||
def test_docker_env_appears_in_exec_command(monkeypatch):
|
||||
"""Explicit docker_env values should also be passed via -e at docker exec time."""
|
||||
env = _make_execute_only_env()
|
||||
env._env = {"MY_VAR": "my_value"}
|
||||
popen_calls = []
|
||||
|
||||
def _fake_popen(cmd, **kwargs):
|
||||
popen_calls.append(cmd)
|
||||
return _FakePopen(cmd, **kwargs)
|
||||
|
||||
monkeypatch.setattr(docker_env.subprocess, "Popen", _fake_popen)
|
||||
|
||||
env.execute("echo hi")
|
||||
|
||||
assert popen_calls, "Popen should have been called"
|
||||
assert "MY_VAR=my_value" in popen_calls[0]
|
||||
|
||||
|
||||
def test_forward_env_overrides_docker_env(monkeypatch):
|
||||
"""docker_forward_env should override docker_env for the same key."""
|
||||
env = _make_execute_only_env(forward_env=["MY_KEY"])
|
||||
env._env = {"MY_KEY": "static_value"}
|
||||
popen_calls = []
|
||||
|
||||
def _fake_popen(cmd, **kwargs):
|
||||
popen_calls.append(cmd)
|
||||
return _FakePopen(cmd, **kwargs)
|
||||
|
||||
monkeypatch.setenv("MY_KEY", "dynamic_value")
|
||||
monkeypatch.setattr(docker_env, "_load_hermes_env_vars", lambda: {})
|
||||
monkeypatch.setattr(docker_env.subprocess, "Popen", _fake_popen)
|
||||
|
||||
env.execute("echo hi")
|
||||
|
||||
cmd_str = " ".join(popen_calls[0])
|
||||
assert "MY_KEY=dynamic_value" in cmd_str
|
||||
assert "MY_KEY=static_value" not in cmd_str
|
||||
|
||||
|
||||
def test_docker_env_and_forward_env_merge(monkeypatch):
|
||||
"""docker_env and docker_forward_env with different keys should both appear."""
|
||||
env = _make_execute_only_env(forward_env=["TOKEN"])
|
||||
env._env = {"SSH_AUTH_SOCK": "/run/user/1000/agent.sock"}
|
||||
popen_calls = []
|
||||
|
||||
def _fake_popen(cmd, **kwargs):
|
||||
popen_calls.append(cmd)
|
||||
return _FakePopen(cmd, **kwargs)
|
||||
|
||||
monkeypatch.setenv("TOKEN", "secret123")
|
||||
monkeypatch.setattr(docker_env, "_load_hermes_env_vars", lambda: {})
|
||||
monkeypatch.setattr(docker_env.subprocess, "Popen", _fake_popen)
|
||||
|
||||
env.execute("echo hi")
|
||||
|
||||
cmd_str = " ".join(popen_calls[0])
|
||||
assert "SSH_AUTH_SOCK=/run/user/1000/agent.sock" in cmd_str
|
||||
assert "TOKEN=secret123" in cmd_str
|
||||
|
||||
|
||||
def test_normalize_env_dict_filters_invalid_keys():
|
||||
"""_normalize_env_dict should reject invalid variable names."""
|
||||
result = docker_env._normalize_env_dict({
|
||||
"VALID_KEY": "ok",
|
||||
"123bad": "rejected",
|
||||
"": "rejected",
|
||||
"also valid": "rejected", # spaces invalid
|
||||
"GOOD": "ok",
|
||||
})
|
||||
assert result == {"VALID_KEY": "ok", "GOOD": "ok"}
|
||||
|
||||
|
||||
def test_normalize_env_dict_coerces_scalars():
|
||||
"""_normalize_env_dict should coerce int/float/bool to str."""
|
||||
result = docker_env._normalize_env_dict({
|
||||
"PORT": 8080,
|
||||
"DEBUG": True,
|
||||
"RATIO": 0.5,
|
||||
})
|
||||
assert result == {"PORT": "8080", "DEBUG": "True", "RATIO": "0.5"}
|
||||
|
||||
|
||||
def test_normalize_env_dict_rejects_non_dict():
|
||||
"""_normalize_env_dict should return empty dict for non-dict input."""
|
||||
assert docker_env._normalize_env_dict("not a dict") == {}
|
||||
assert docker_env._normalize_env_dict(None) == {}
|
||||
assert docker_env._normalize_env_dict([]) == {}
|
||||
|
||||
|
||||
def test_normalize_env_dict_rejects_complex_values():
|
||||
"""_normalize_env_dict should reject list/dict values."""
|
||||
result = docker_env._normalize_env_dict({
|
||||
"GOOD": "string",
|
||||
"BAD_LIST": [1, 2, 3],
|
||||
"BAD_DICT": {"nested": True},
|
||||
})
|
||||
assert result == {"GOOD": "string"}
|
||||
|
|
|
|||
|
|
@ -60,6 +60,36 @@ def _normalize_forward_env_names(forward_env: list[str] | None) -> list[str]:
|
|||
return normalized
|
||||
|
||||
|
||||
def _normalize_env_dict(env: dict | None) -> dict[str, str]:
|
||||
"""Validate and normalize a docker_env dict to {str: str}.
|
||||
|
||||
Filters out entries with invalid variable names or non-string values.
|
||||
"""
|
||||
if not env:
|
||||
return {}
|
||||
if not isinstance(env, dict):
|
||||
logger.warning("docker_env is not a dict: %r", env)
|
||||
return {}
|
||||
|
||||
normalized: dict[str, str] = {}
|
||||
for key, value in env.items():
|
||||
if not isinstance(key, str) or not _ENV_VAR_NAME_RE.match(key.strip()):
|
||||
logger.warning("Ignoring invalid docker_env key: %r", key)
|
||||
continue
|
||||
key = key.strip()
|
||||
if not isinstance(value, str):
|
||||
# Coerce simple scalar types (int, bool, float) to string;
|
||||
# reject complex types.
|
||||
if isinstance(value, (int, float, bool)):
|
||||
value = str(value)
|
||||
else:
|
||||
logger.warning("Ignoring non-string docker_env value for %r: %r", key, value)
|
||||
continue
|
||||
normalized[key] = value
|
||||
|
||||
return normalized
|
||||
|
||||
|
||||
def _load_hermes_env_vars() -> dict[str, str]:
|
||||
"""Load ~/.hermes/.env values without failing Docker command execution."""
|
||||
try:
|
||||
|
|
@ -210,6 +240,7 @@ class DockerEnvironment(BaseEnvironment):
|
|||
task_id: str = "default",
|
||||
volumes: list = None,
|
||||
forward_env: list[str] | None = None,
|
||||
env: dict | None = None,
|
||||
network: bool = True,
|
||||
host_cwd: str = None,
|
||||
auto_mount_cwd: bool = False,
|
||||
|
|
@ -221,6 +252,7 @@ class DockerEnvironment(BaseEnvironment):
|
|||
self._persistent = persistent_filesystem
|
||||
self._task_id = task_id
|
||||
self._forward_env = _normalize_forward_env_names(forward_env)
|
||||
self._env = _normalize_env_dict(env)
|
||||
self._container_id: Optional[str] = None
|
||||
logger.info(f"DockerEnvironment volumes: {volumes}")
|
||||
# Ensure volumes is a list (config.yaml could be malformed)
|
||||
|
|
@ -362,8 +394,14 @@ class DockerEnvironment(BaseEnvironment):
|
|||
except Exception as e:
|
||||
logger.debug("Docker: could not load credential file mounts: %s", e)
|
||||
|
||||
# Explicit environment variables (docker_env config) — set at container
|
||||
# creation so they're available to all processes (including entrypoint).
|
||||
env_args = []
|
||||
for key in sorted(self._env):
|
||||
env_args.extend(["-e", f"{key}={self._env[key]}"])
|
||||
|
||||
logger.info(f"Docker volume_args: {volume_args}")
|
||||
all_run_args = list(_SECURITY_ARGS) + writable_args + resource_args + volume_args
|
||||
all_run_args = list(_SECURITY_ARGS) + writable_args + resource_args + volume_args + env_args
|
||||
logger.info(f"Docker run_args: {all_run_args}")
|
||||
|
||||
# Resolve the docker executable once so it works even when
|
||||
|
|
@ -456,9 +494,11 @@ class DockerEnvironment(BaseEnvironment):
|
|||
if effective_stdin is not None:
|
||||
cmd.append("-i")
|
||||
cmd.extend(["-w", work_dir])
|
||||
# Combine explicit docker_forward_env with skill-declared env_passthrough
|
||||
# vars so skills that declare required_environment_variables (e.g. Notion)
|
||||
# have their keys forwarded into the container automatically.
|
||||
# Build the per-exec environment: start with explicit docker_env values
|
||||
# (static config), then overlay docker_forward_env / skill env_passthrough
|
||||
# (dynamic from host process). Forward values take precedence.
|
||||
exec_env: dict[str, str] = dict(self._env)
|
||||
|
||||
forward_keys = set(self._forward_env)
|
||||
try:
|
||||
from tools.env_passthrough import get_all_passthrough
|
||||
|
|
@ -471,7 +511,10 @@ class DockerEnvironment(BaseEnvironment):
|
|||
if value is None:
|
||||
value = hermes_env.get(key)
|
||||
if value is not None:
|
||||
cmd.extend(["-e", f"{key}={value}"])
|
||||
exec_env[key] = value
|
||||
|
||||
for key in sorted(exec_env):
|
||||
cmd.extend(["-e", f"{key}={exec_env[key]}"])
|
||||
cmd.extend([self._container_id, "bash", "-lc", exec_command])
|
||||
|
||||
try:
|
||||
|
|
|
|||
|
|
@ -583,6 +583,7 @@ def _create_environment(env_type: str, image: str, cwd: str, timeout: int,
|
|||
persistent = cc.get("container_persistent", True)
|
||||
volumes = cc.get("docker_volumes", [])
|
||||
docker_forward_env = cc.get("docker_forward_env", [])
|
||||
docker_env = cc.get("docker_env", {})
|
||||
|
||||
if env_type == "local":
|
||||
lc = local_config or {}
|
||||
|
|
@ -598,6 +599,7 @@ def _create_environment(env_type: str, image: str, cwd: str, timeout: int,
|
|||
host_cwd=host_cwd,
|
||||
auto_mount_cwd=cc.get("docker_mount_cwd_to_workspace", False),
|
||||
forward_env=docker_forward_env,
|
||||
env=docker_env,
|
||||
)
|
||||
|
||||
elif env_type == "singularity":
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue