[verified] fix: materialize hindsight profile env during setup

This commit is contained in:
bwjoke 2026-04-22 21:06:26 +08:00 committed by Teknium
parent 127048e643
commit 3e994e38f7
2 changed files with 182 additions and 43 deletions

View file

@ -233,6 +233,72 @@ def _utc_timestamp() -> str:
return datetime.now(timezone.utc).isoformat(timespec="milliseconds").replace("+00:00", "Z") return datetime.now(timezone.utc).isoformat(timespec="milliseconds").replace("+00:00", "Z")
def _embedded_profile_name(config: dict[str, Any]) -> str:
"""Return the Hindsight embedded profile name for this Hermes config."""
profile = config.get("profile", "hermes")
return str(profile or "hermes")
def _load_simple_env(path) -> dict[str, str]:
"""Parse a simple KEY=VALUE env file, ignoring comments and blank lines."""
if not path.exists():
return {}
values: dict[str, str] = {}
for line in path.read_text(encoding="utf-8").splitlines():
if not line or line.startswith("#") or "=" not in line:
continue
key, value = line.split("=", 1)
values[key.strip()] = value.strip()
return values
def _build_embedded_profile_env(config: dict[str, Any], *, llm_api_key: str | None = None) -> dict[str, str]:
"""Build the profile-scoped env file that standalone hindsight-embed consumes."""
current_key = llm_api_key
if current_key is None:
current_key = (
config.get("llmApiKey")
or config.get("llm_api_key")
or os.environ.get("HINDSIGHT_LLM_API_KEY", "")
)
current_provider = config.get("llm_provider", "")
current_model = config.get("llm_model", "")
current_base_url = config.get("llm_base_url") or os.environ.get("HINDSIGHT_API_LLM_BASE_URL", "")
# The embedded daemon expects OpenAI wire format for these providers.
daemon_provider = "openai" if current_provider in ("openai_compatible", "openrouter") else current_provider
env_values = {
"HINDSIGHT_API_LLM_PROVIDER": str(daemon_provider),
"HINDSIGHT_API_LLM_API_KEY": str(current_key or ""),
"HINDSIGHT_API_LLM_MODEL": str(current_model),
"HINDSIGHT_API_LOG_LEVEL": "info",
}
if current_base_url:
env_values["HINDSIGHT_API_LLM_BASE_URL"] = str(current_base_url)
return env_values
def _embedded_profile_env_path(config: dict[str, Any]):
from pathlib import Path
return Path.home() / ".hindsight" / "profiles" / f"{_embedded_profile_name(config)}.env"
def _materialize_embedded_profile_env(config: dict[str, Any], *, llm_api_key: str | None = None):
"""Write the profile-scoped env file that standalone hindsight-embed uses."""
profile_env = _embedded_profile_env_path(config)
profile_env.parent.mkdir(parents=True, exist_ok=True)
env_values = _build_embedded_profile_env(config, llm_api_key=llm_api_key)
profile_env.write_text(
"".join(f"{key}={value}\n" for key, value in env_values.items()),
encoding="utf-8",
)
return profile_env
# --------------------------------------------------------------------------- # ---------------------------------------------------------------------------
# MemoryProvider implementation # MemoryProvider implementation
# --------------------------------------------------------------------------- # ---------------------------------------------------------------------------
@ -367,7 +433,7 @@ class HindsightMemoryProvider(MemoryProvider):
else: else:
deps_to_install = [cloud_dep] deps_to_install = [cloud_dep]
print(f"\n Checking dependencies...") print("\n Checking dependencies...")
uv_path = shutil.which("uv") uv_path = shutil.which("uv")
if not uv_path: if not uv_path:
print(" ⚠ uv not found — install it: curl -LsSf https://astral.sh/uv/install.sh | sh") print(" ⚠ uv not found — install it: curl -LsSf https://astral.sh/uv/install.sh | sh")
@ -378,14 +444,14 @@ class HindsightMemoryProvider(MemoryProvider):
[uv_path, "pip", "install", "--python", sys.executable, "--quiet", "--upgrade"] + deps_to_install, [uv_path, "pip", "install", "--python", sys.executable, "--quiet", "--upgrade"] + deps_to_install,
check=True, timeout=120, capture_output=True, check=True, timeout=120, capture_output=True,
) )
print(f" ✓ Dependencies up to date") print(" ✓ Dependencies up to date")
except Exception as e: except Exception as e:
print(f" ⚠ Install failed: {e}") print(f" ⚠ Install failed: {e}")
print(f" Run manually: uv pip install --python {sys.executable} {' '.join(deps_to_install)}") print(f" Run manually: uv pip install --python {sys.executable} {' '.join(deps_to_install)}")
# Step 3: Mode-specific config # Step 3: Mode-specific config
if mode == "cloud": if mode == "cloud":
print(f"\n Get your API key at https://ui.hindsight.vectorize.io\n") print("\n Get your API key at https://ui.hindsight.vectorize.io\n")
existing_key = os.environ.get("HINDSIGHT_API_KEY", "") existing_key = os.environ.get("HINDSIGHT_API_KEY", "")
if existing_key: if existing_key:
masked = f"...{existing_key[-4:]}" if len(existing_key) > 4 else "set" masked = f"...{existing_key[-4:]}" if len(existing_key) > 4 else "set"
@ -446,7 +512,6 @@ class HindsightMemoryProvider(MemoryProvider):
# Step 4: Save everything # Step 4: Save everything
provider_config["bank_id"] = "hermes" provider_config["bank_id"] = "hermes"
provider_config["recall_budget"] = "mid" provider_config["recall_budget"] = "mid"
bank_id = "hermes"
config["memory"]["provider"] = "hindsight" config["memory"]["provider"] = "hindsight"
save_config(config) save_config(config)
@ -472,10 +537,32 @@ class HindsightMemoryProvider(MemoryProvider):
new_lines.append(f"{k}={v}") new_lines.append(f"{k}={v}")
env_path.write_text("\n".join(new_lines) + "\n") env_path.write_text("\n".join(new_lines) + "\n")
if mode == "local_embedded":
materialized_config = dict(provider_config)
config_path = Path(hermes_home) / "hindsight" / "config.json"
try:
materialized_config = json.loads(config_path.read_text(encoding="utf-8"))
except Exception:
pass
llm_api_key = env_writes.get("HINDSIGHT_LLM_API_KEY", "")
if not llm_api_key:
llm_api_key = _load_simple_env(Path(hermes_home) / ".env").get("HINDSIGHT_LLM_API_KEY", "")
if not llm_api_key:
llm_api_key = _load_simple_env(_embedded_profile_env_path(materialized_config)).get(
"HINDSIGHT_API_LLM_API_KEY",
"",
)
_materialize_embedded_profile_env(
materialized_config,
llm_api_key=llm_api_key or None,
)
print(f"\n ✓ Hindsight memory configured ({mode} mode)") print(f"\n ✓ Hindsight memory configured ({mode} mode)")
if env_writes: if env_writes:
print(f" API keys saved to .env") print(" API keys saved to .env")
print(f"\n Start a new session to activate.\n") print("\n Start a new session to activate.\n")
def get_config_schema(self): def get_config_schema(self):
return [ return [
@ -554,7 +641,9 @@ class HindsightMemoryProvider(MemoryProvider):
if Version(installed) < Version(_MIN_CLIENT_VERSION): if Version(installed) < Version(_MIN_CLIENT_VERSION):
logger.warning("hindsight-client %s is outdated (need >=%s), attempting upgrade...", logger.warning("hindsight-client %s is outdated (need >=%s), attempting upgrade...",
installed, _MIN_CLIENT_VERSION) installed, _MIN_CLIENT_VERSION)
import shutil, subprocess, sys import shutil
import subprocess
import sys
uv_path = shutil.which("uv") uv_path = shutil.which("uv")
if uv_path: if uv_path:
try: try:
@ -675,42 +764,13 @@ class HindsightMemoryProvider(MemoryProvider):
# Update the profile .env to match our current config so # Update the profile .env to match our current config so
# the daemon always starts with the right settings. # the daemon always starts with the right settings.
# If the config changed and the daemon is running, stop it. # If the config changed and the daemon is running, stop it.
from pathlib import Path as _Path profile_env = _embedded_profile_env_path(self._config)
profile_env = _Path.home() / ".hindsight" / "profiles" / f"{profile}.env" expected_env = _build_embedded_profile_env(self._config)
current_key = self._config.get("llm_api_key") or os.environ.get("HINDSIGHT_LLM_API_KEY", "") saved = _load_simple_env(profile_env)
current_provider = self._config.get("llm_provider", "") config_changed = saved != expected_env
current_model = self._config.get("llm_model", "")
current_base_url = self._config.get("llm_base_url") or os.environ.get("HINDSIGHT_API_LLM_BASE_URL", "")
# Map openai_compatible/openrouter → openai for the daemon (OpenAI wire format)
daemon_provider = "openai" if current_provider in ("openai_compatible", "openrouter") else current_provider
# Read saved profile config
saved = {}
if profile_env.exists():
for line in profile_env.read_text().splitlines():
if "=" in line and not line.startswith("#"):
k, v = line.split("=", 1)
saved[k.strip()] = v.strip()
config_changed = (
saved.get("HINDSIGHT_API_LLM_PROVIDER") != daemon_provider or
saved.get("HINDSIGHT_API_LLM_MODEL") != current_model or
saved.get("HINDSIGHT_API_LLM_API_KEY") != current_key or
saved.get("HINDSIGHT_API_LLM_BASE_URL", "") != current_base_url
)
if config_changed: if config_changed:
# Write updated profile .env profile_env = _materialize_embedded_profile_env(self._config)
profile_env.parent.mkdir(parents=True, exist_ok=True)
env_lines = (
f"HINDSIGHT_API_LLM_PROVIDER={daemon_provider}\n"
f"HINDSIGHT_API_LLM_API_KEY={current_key}\n"
f"HINDSIGHT_API_LLM_MODEL={current_model}\n"
f"HINDSIGHT_API_LOG_LEVEL=info\n"
)
if current_base_url:
env_lines += f"HINDSIGHT_API_LLM_BASE_URL={current_base_url}\n"
profile_env.write_text(env_lines)
if client._manager.is_running(profile): if client._manager.is_running(profile):
with open(log_path, "a") as f: with open(log_path, "a") as f:
f.write("\n=== Config changed, restarting daemon ===\n") f.write("\n=== Config changed, restarting daemon ===\n")

View file

@ -7,9 +7,8 @@ turn counting, tags), and schema completeness.
import json import json
import re import re
import threading
from types import SimpleNamespace from types import SimpleNamespace
from unittest.mock import AsyncMock, MagicMock, patch from unittest.mock import AsyncMock, MagicMock
import pytest import pytest
@ -251,6 +250,86 @@ class TestConfig:
assert cfg["banks"]["hermes"]["budget"] == "high" assert cfg["banks"]["hermes"]["budget"] == "high"
class TestPostSetup:
def test_local_embedded_setup_materializes_profile_env(self, tmp_path, monkeypatch):
hermes_home = tmp_path / "hermes-home"
user_home = tmp_path / "user-home"
user_home.mkdir()
monkeypatch.setenv("HOME", str(user_home))
selections = iter([1, 0]) # local_embedded, openai
monkeypatch.setattr("hermes_cli.memory_setup._curses_select", lambda *args, **kwargs: next(selections))
monkeypatch.setattr("shutil.which", lambda name: None)
monkeypatch.setattr("builtins.input", lambda prompt="": "")
monkeypatch.setattr("sys.stdin.isatty", lambda: True)
monkeypatch.setattr("getpass.getpass", lambda prompt="": "sk-local-test")
saved_configs = []
monkeypatch.setattr("hermes_cli.config.save_config", lambda cfg: saved_configs.append(cfg.copy()))
provider = HindsightMemoryProvider()
provider.post_setup(str(hermes_home), {"memory": {}})
assert saved_configs[-1]["memory"]["provider"] == "hindsight"
assert (hermes_home / ".env").read_text() == "HINDSIGHT_LLM_API_KEY=sk-local-test\n"
profile_env = user_home / ".hindsight" / "profiles" / "hermes.env"
assert profile_env.exists()
assert profile_env.read_text() == (
"HINDSIGHT_API_LLM_PROVIDER=openai\n"
"HINDSIGHT_API_LLM_API_KEY=sk-local-test\n"
"HINDSIGHT_API_LLM_MODEL=gpt-4o-mini\n"
"HINDSIGHT_API_LOG_LEVEL=info\n"
)
def test_local_embedded_setup_respects_existing_profile_name(self, tmp_path, monkeypatch):
hermes_home = tmp_path / "hermes-home"
user_home = tmp_path / "user-home"
user_home.mkdir()
monkeypatch.setenv("HOME", str(user_home))
selections = iter([1, 0]) # local_embedded, openai
monkeypatch.setattr("hermes_cli.memory_setup._curses_select", lambda *args, **kwargs: next(selections))
monkeypatch.setattr("shutil.which", lambda name: None)
monkeypatch.setattr("builtins.input", lambda prompt="": "")
monkeypatch.setattr("sys.stdin.isatty", lambda: True)
monkeypatch.setattr("getpass.getpass", lambda prompt="": "sk-local-test")
monkeypatch.setattr("hermes_cli.config.save_config", lambda cfg: None)
provider = HindsightMemoryProvider()
provider.save_config({"profile": "coder"}, str(hermes_home))
provider.post_setup(str(hermes_home), {"memory": {}})
coder_env = user_home / ".hindsight" / "profiles" / "coder.env"
hermes_env = user_home / ".hindsight" / "profiles" / "hermes.env"
assert coder_env.exists()
assert not hermes_env.exists()
def test_local_embedded_setup_preserves_existing_key_when_input_left_blank(self, tmp_path, monkeypatch):
hermes_home = tmp_path / "hermes-home"
user_home = tmp_path / "user-home"
user_home.mkdir()
monkeypatch.setenv("HOME", str(user_home))
selections = iter([1, 0]) # local_embedded, openai
monkeypatch.setattr("hermes_cli.memory_setup._curses_select", lambda *args, **kwargs: next(selections))
monkeypatch.setattr("shutil.which", lambda name: None)
monkeypatch.setattr("builtins.input", lambda prompt="": "")
monkeypatch.setattr("sys.stdin.isatty", lambda: True)
monkeypatch.setattr("getpass.getpass", lambda prompt="": "")
monkeypatch.setattr("hermes_cli.config.save_config", lambda cfg: None)
env_path = hermes_home / ".env"
env_path.parent.mkdir(parents=True, exist_ok=True)
env_path.write_text("HINDSIGHT_LLM_API_KEY=existing-key\n")
provider = HindsightMemoryProvider()
provider.post_setup(str(hermes_home), {"memory": {}})
profile_env = user_home / ".hindsight" / "profiles" / "hermes.env"
assert profile_env.exists()
assert "HINDSIGHT_API_LLM_API_KEY=existing-key\n" in profile_env.read_text()
# --------------------------------------------------------------------------- # ---------------------------------------------------------------------------
# Tool handler tests # Tool handler tests
# --------------------------------------------------------------------------- # ---------------------------------------------------------------------------