feat(honcho): context injection overhaul, 5-tool surface, cost safety, session isolation

Context Injection Overhaul:
- Base layer: peer.context() (representation + card) cached with 5-minute TTL
- Dialectic supplement: cadence-gated, cached until next refresh
- Trivial prompt skip: short inputs/slash commands skip injection
- New peer guard: dialectic skipped at session start when peer has no context
- Targeted warm prompt for better dialectic quality

Tool Surface (5 bidirectional tools):
- honcho_profile: read or update peer card
- honcho_search: semantic search over context
- honcho_context: full session context (summary, representation, card, messages)
- honcho_reasoning: synthesized answer, reasoning_level param
- honcho_conclude: create or delete conclusions (PII removal)

Cost Safety:
- dialectic_cadence defaults to 3 (~66% fewer LLM calls)
- context_tokens defaults to uncapped (cap opt-in via config/wizard)
- on_turn_start hook wired up (fixes broken cadence/injection gating)

Correctness:
- Explicit target= on peer context/card fetches (fixes identity blur)
- honcho_search perspective fix under directional observation
- Timeout config plumbing
- peerName precedence over gateway user_id
- skip_memory on temp agents (orphan session prevention)
- gateway_session_key for stable per-chat session continuity
- initOnSessionStart for eager tools-mode init
- get_session_context fallback respects peer param
- mid -> medium in reasoning level validation

ABC changes (minimal, honcho-only):
- run_agent.py: gateway_session_key param + memory provider wiring (+5 lines)
- gateway/run.py: skip_memory on 2 temp agents, gateway_session_key on main agent (+3 lines)
- agent/memory_manager.py: sanitize regex for context tag variants (+9 lines)
This commit is contained in:
Erosika 2026-04-14 18:07:19 -04:00
parent 95d11dfd8e
commit 11b4c9ecf9
16 changed files with 1283 additions and 331 deletions

View file

@ -486,36 +486,9 @@ class HonchoSessionManager:
_REASONING_LEVELS = ("minimal", "low", "medium", "high", "max")
def _dynamic_reasoning_level(self, query: str) -> str:
"""
Pick a reasoning level for a dialectic query.
When dialecticDynamic is true (default), auto-bumps based on query
length so Honcho applies more inference where it matters:
< 120 chars -> configured default (typically "low")
120-400 chars -> +1 level above default (cap at "high")
> 400 chars -> +2 levels above default (cap at "high")
"max" is never selected automatically -- reserve it for explicit config.
When dialecticDynamic is false, always returns the configured level.
"""
if not self._dialectic_dynamic:
return self._dialectic_reasoning_level
levels = self._REASONING_LEVELS
default_idx = levels.index(self._dialectic_reasoning_level) if self._dialectic_reasoning_level in levels else 1
n = len(query)
if n < 120:
bump = 0
elif n < 400:
bump = 1
else:
bump = 2
# Cap at "high" (index 3) for auto-selection
idx = min(default_idx + bump, 3)
return levels[idx]
def _default_reasoning_level(self) -> str:
"""Return the configured default reasoning level."""
return self._dialectic_reasoning_level
def dialectic_query(
self, session_key: str, query: str,
@ -532,8 +505,9 @@ class HonchoSessionManager:
Args:
session_key: The session key to query against.
query: Natural language question.
reasoning_level: Override the config default. If None, uses
_dynamic_reasoning_level(query).
reasoning_level: Override the configured default (dialecticReasoningLevel).
Only honored when dialecticDynamic is true.
If None or dialecticDynamic is false, uses the configured default.
peer: Which peer to query "user" (default) or "ai".
Returns:
@ -543,29 +517,34 @@ class HonchoSessionManager:
if not session:
return ""
target_peer_id = self._resolve_peer_id(session, peer)
if target_peer_id is None:
return ""
# Guard: truncate query to Honcho's dialectic input limit
if len(query) > self._dialectic_max_input_chars:
query = query[:self._dialectic_max_input_chars].rsplit(" ", 1)[0]
level = reasoning_level or self._dynamic_reasoning_level(query)
if self._dialectic_dynamic and reasoning_level:
level = reasoning_level
else:
level = self._default_reasoning_level()
try:
if self._ai_observe_others:
# AI peer can observe user — use cross-observation routing
if peer == "ai":
ai_peer_obj = self._get_or_create_peer(session.assistant_peer_id)
# AI peer can observe other peers — use assistant as observer.
ai_peer_obj = self._get_or_create_peer(session.assistant_peer_id)
if target_peer_id == session.assistant_peer_id:
result = ai_peer_obj.chat(query, reasoning_level=level) or ""
else:
ai_peer_obj = self._get_or_create_peer(session.assistant_peer_id)
result = ai_peer_obj.chat(
query,
target=session.user_peer_id,
target=target_peer_id,
reasoning_level=level,
) or ""
else:
# AI can't observe others — each peer queries self
peer_id = session.assistant_peer_id if peer == "ai" else session.user_peer_id
target_peer = self._get_or_create_peer(peer_id)
# Without cross-observation, each peer queries its own context.
target_peer = self._get_or_create_peer(target_peer_id)
result = target_peer.chat(query, reasoning_level=level) or ""
# Apply Hermes-side char cap before caching
@ -666,7 +645,7 @@ class HonchoSessionManager:
result: dict[str, str] = {}
try:
user_ctx = self._fetch_peer_context(session.user_peer_id)
user_ctx = self._fetch_peer_context(session.user_peer_id, target=session.user_peer_id)
result["representation"] = user_ctx["representation"]
result["card"] = "\n".join(user_ctx["card"])
except Exception as e:
@ -674,7 +653,7 @@ class HonchoSessionManager:
# Also fetch AI peer's own representation so Hermes knows itself.
try:
ai_ctx = self._fetch_peer_context(session.assistant_peer_id)
ai_ctx = self._fetch_peer_context(session.assistant_peer_id, target=session.assistant_peer_id)
result["ai_representation"] = ai_ctx["representation"]
result["ai_card"] = "\n".join(ai_ctx["card"])
except Exception as e:
@ -862,7 +841,7 @@ class HonchoSessionManager:
return [str(item) for item in card if item]
return [str(card)]
def _fetch_peer_card(self, peer_id: str) -> list[str]:
def _fetch_peer_card(self, peer_id: str, *, target: str | None = None) -> list[str]:
"""Fetch a peer card directly from the peer object.
This avoids relying on session.context(), which can return an empty
@ -872,22 +851,33 @@ class HonchoSessionManager:
peer = self._get_or_create_peer(peer_id)
getter = getattr(peer, "get_card", None)
if callable(getter):
return self._normalize_card(getter())
return self._normalize_card(getter(target=target) if target is not None else getter())
legacy_getter = getattr(peer, "card", None)
if callable(legacy_getter):
return self._normalize_card(legacy_getter())
return self._normalize_card(legacy_getter(target=target) if target is not None else legacy_getter())
return []
def _fetch_peer_context(self, peer_id: str, search_query: str | None = None) -> dict[str, Any]:
def _fetch_peer_context(
self,
peer_id: str,
search_query: str | None = None,
*,
target: str | None = None,
) -> dict[str, Any]:
"""Fetch representation + peer card directly from a peer object."""
peer = self._get_or_create_peer(peer_id)
representation = ""
card: list[str] = []
try:
ctx = peer.context(search_query=search_query) if search_query else peer.context()
context_kwargs: dict[str, Any] = {}
if target is not None:
context_kwargs["target"] = target
if search_query is not None:
context_kwargs["search_query"] = search_query
ctx = peer.context(**context_kwargs) if context_kwargs else peer.context()
representation = (
getattr(ctx, "representation", None)
or getattr(ctx, "peer_representation", None)
@ -899,24 +889,106 @@ class HonchoSessionManager:
if not representation:
try:
representation = peer.representation() or ""
representation = (
peer.representation(target=target) if target is not None else peer.representation()
) or ""
except Exception as e:
logger.debug("Direct peer.representation() failed for '%s': %s", peer_id, e)
if not card:
try:
card = self._fetch_peer_card(peer_id)
card = self._fetch_peer_card(peer_id, target=target)
except Exception as e:
logger.debug("Direct peer card fetch failed for '%s': %s", peer_id, e)
return {"representation": representation, "card": card}
def get_peer_card(self, session_key: str) -> list[str]:
def get_session_context(self, session_key: str, peer: str = "user") -> dict[str, Any]:
"""Fetch full session context from Honcho including summary.
Uses the session-level context() API which returns summary,
peer_representation, peer_card, and messages.
"""
Fetch the user peer's card — a curated list of key facts.
session = self._cache.get(session_key)
if not session:
return {}
honcho_session = self._sessions_cache.get(session.honcho_session_id)
if not honcho_session:
# Fall back to peer-level context
return self._fetch_peer_context(session.user_peer_id, target=session.user_peer_id)
try:
peer_id = self._resolve_peer_id(session, peer)
ctx = honcho_session.context(
summary=True,
peer_target=peer_id,
peer_perspective=session.user_peer_id if peer == "user" else session.assistant_peer_id,
)
result: dict[str, Any] = {}
# Summary
if ctx.summary:
result["summary"] = ctx.summary.content
# Peer representation and card
if ctx.peer_representation:
result["representation"] = ctx.peer_representation
if ctx.peer_card:
result["card"] = "\n".join(ctx.peer_card)
# Messages (last N for context)
if ctx.messages:
recent = ctx.messages[-10:] # last 10 messages
result["recent_messages"] = [
{"role": m.role, "content": m.content[:500]}
for m in recent
]
return result
except Exception as e:
logger.warning("Session context fetch failed: %s", e)
return {}
def _resolve_peer_id(self, session: HonchoSession, peer: str | None) -> str | None:
"""Resolve a peer alias or explicit peer ID to a concrete Honcho peer ID."""
candidate = (peer or "user").strip()
if not candidate:
return session.user_peer_id
normalized = self._sanitize_id(candidate)
if normalized == self._sanitize_id("user"):
return session.user_peer_id
if normalized == self._sanitize_id("ai"):
return session.assistant_peer_id
return normalized
def _resolve_observer_target(
self,
session: HonchoSession,
peer: str | None,
) -> tuple[str, str | None]:
"""Resolve observer and target peer IDs for context/search/profile queries."""
target_peer_id = self._resolve_peer_id(session, peer)
if target_peer_id is None:
return session.user_peer_id, None
if target_peer_id == session.assistant_peer_id:
return session.assistant_peer_id, session.assistant_peer_id
if self._ai_observe_others:
return session.assistant_peer_id, target_peer_id
return target_peer_id, None
def get_peer_card(self, session_key: str, peer: str = "user") -> list[str]:
"""
Fetch a peer card a curated list of key facts.
Fast, no LLM reasoning. Returns raw structured facts Honcho has
inferred about the user (name, role, preferences, patterns).
inferred about the target peer (name, role, preferences, patterns).
Empty list if unavailable.
"""
session = self._cache.get(session_key)
@ -924,12 +996,19 @@ class HonchoSessionManager:
return []
try:
return self._fetch_peer_card(session.user_peer_id)
observer_peer_id, target_peer_id = self._resolve_observer_target(session, peer)
return self._fetch_peer_card(observer_peer_id, target=target_peer_id)
except Exception as e:
logger.debug("Failed to fetch peer card from Honcho: %s", e)
return []
def search_context(self, session_key: str, query: str, max_tokens: int = 800) -> str:
def search_context(
self,
session_key: str,
query: str,
max_tokens: int = 800,
peer: str = "user",
) -> str:
"""
Semantic search over Honcho session context.
@ -941,6 +1020,7 @@ class HonchoSessionManager:
session_key: Session to search against.
query: Search query for semantic matching.
max_tokens: Token budget for returned content.
peer: Peer alias or explicit peer ID to search about.
Returns:
Relevant context excerpts as a string, or empty string if none.
@ -950,7 +1030,13 @@ class HonchoSessionManager:
return ""
try:
ctx = self._fetch_peer_context(session.user_peer_id, search_query=query)
observer_peer_id, target = self._resolve_observer_target(session, peer)
ctx = self._fetch_peer_context(
observer_peer_id,
search_query=query,
target=target,
)
parts = []
if ctx["representation"]:
parts.append(ctx["representation"])
@ -962,16 +1048,17 @@ class HonchoSessionManager:
logger.debug("Honcho search_context failed: %s", e)
return ""
def create_conclusion(self, session_key: str, content: str) -> bool:
"""Write a conclusion about the user back to Honcho.
def create_conclusion(self, session_key: str, content: str, peer: str = "user") -> bool:
"""Write a conclusion about a target peer back to Honcho.
Conclusions are facts the AI peer observes about the user
preferences, corrections, clarifications, project context.
They feed into the user's peer card and representation.
Conclusions are facts a peer observes about another peer or itself
preferences, corrections, clarifications, and project context.
They feed into the target peer's card and representation.
Args:
session_key: Session to associate the conclusion with.
content: The conclusion text (e.g. "User prefers dark mode").
content: The conclusion text.
peer: Peer alias or explicit peer ID. "user" is the default alias.
Returns:
True on success, False on failure.
@ -985,25 +1072,87 @@ class HonchoSessionManager:
return False
try:
if self._ai_observe_others:
# AI peer creates conclusion about user (cross-observation)
target_peer_id = self._resolve_peer_id(session, peer)
if target_peer_id is None:
logger.warning("Could not resolve conclusion peer '%s' for session '%s'", peer, session_key)
return False
if target_peer_id == session.assistant_peer_id:
assistant_peer = self._get_or_create_peer(session.assistant_peer_id)
conclusions_scope = assistant_peer.conclusions_of(session.user_peer_id)
conclusions_scope = assistant_peer.conclusions_of(session.assistant_peer_id)
elif self._ai_observe_others:
assistant_peer = self._get_or_create_peer(session.assistant_peer_id)
conclusions_scope = assistant_peer.conclusions_of(target_peer_id)
else:
# AI can't observe others — user peer creates self-conclusion
user_peer = self._get_or_create_peer(session.user_peer_id)
conclusions_scope = user_peer.conclusions_of(session.user_peer_id)
target_peer = self._get_or_create_peer(target_peer_id)
conclusions_scope = target_peer.conclusions_of(target_peer_id)
conclusions_scope.create([{
"content": content.strip(),
"session_id": session.honcho_session_id,
}])
logger.info("Created conclusion for %s: %s", session_key, content[:80])
logger.info("Created conclusion about %s for %s: %s", target_peer_id, session_key, content[:80])
return True
except Exception as e:
logger.error("Failed to create conclusion: %s", e)
return False
def delete_conclusion(self, session_key: str, conclusion_id: str, peer: str = "user") -> bool:
"""Delete a conclusion by ID. Use only for PII removal.
Args:
session_key: Session key for peer resolution.
conclusion_id: The conclusion ID to delete.
peer: Peer alias or explicit peer ID.
Returns:
True on success, False on failure.
"""
session = self._cache.get(session_key)
if not session:
return False
try:
target_peer_id = self._resolve_peer_id(session, peer)
if target_peer_id == session.assistant_peer_id:
observer = self._get_or_create_peer(session.assistant_peer_id)
scope = observer.conclusions_of(session.assistant_peer_id)
elif self._ai_observe_others:
observer = self._get_or_create_peer(session.assistant_peer_id)
scope = observer.conclusions_of(target_peer_id)
else:
target_peer = self._get_or_create_peer(target_peer_id)
scope = target_peer.conclusions_of(target_peer_id)
scope.delete(conclusion_id)
logger.info("Deleted conclusion %s for %s", conclusion_id, session_key)
return True
except Exception as e:
logger.error("Failed to delete conclusion %s: %s", conclusion_id, e)
return False
def set_peer_card(self, session_key: str, card: list[str], peer: str = "user") -> list[str] | None:
"""Update a peer's card.
Args:
session_key: Session key for peer resolution.
card: New peer card as list of fact strings.
peer: Peer alias or explicit peer ID.
Returns:
Updated card on success, None on failure.
"""
session = self._cache.get(session_key)
if not session:
return None
try:
peer_id = self._resolve_peer_id(session, peer)
peer_obj = self._get_or_create_peer(peer_id)
result = peer_obj.set_card(card)
logger.info("Updated peer card for %s (%d facts)", peer_id, len(card))
return result
except Exception as e:
logger.error("Failed to set peer card: %s", e)
return None
def seed_ai_identity(self, session_key: str, content: str, source: str = "manual") -> bool:
"""
Seed the AI peer's Honcho representation from text content.
@ -1061,7 +1210,7 @@ class HonchoSessionManager:
return {"representation": "", "card": ""}
try:
ctx = self._fetch_peer_context(session.assistant_peer_id)
ctx = self._fetch_peer_context(session.assistant_peer_id, target=session.assistant_peer_id)
return {
"representation": ctx["representation"] or "",
"card": "\n".join(ctx["card"]),