refactor(honcho): rename query_user_context to honcho_context

Consistent naming: all honcho tools now prefixed with honcho_
(honcho_context, honcho_search, honcho_profile, honcho_conclude).
This commit is contained in:
Erosika 2026-03-09 17:59:30 -04:00
parent 792be0e8e3
commit 0cb639d472
5 changed files with 18 additions and 18 deletions

View file

@ -398,7 +398,7 @@ OPTIONAL_ENV_VARS = {
"description": "Honcho API key for AI-native persistent memory",
"prompt": "Honcho API key",
"url": "https://app.honcho.dev",
"tools": ["query_user_context"],
"tools": ["honcho_context"],
"password": True,
"category": "tool",
},

View file

@ -196,7 +196,7 @@ def cmd_setup(args) -> None:
print(f" Mode: {_mode_str}")
print(f" Frequency: {hcfg.write_frequency}")
print(f"\n Tools available in chat:")
print(f" query_user_context — ask Honcho a question about you (LLM-synthesized)")
print(f" honcho_context — ask Honcho a question about you (LLM-synthesized)")
print(f" honcho_search — semantic search over your history (no LLM)")
print(f" honcho_profile — your peer card, key facts (no LLM)")
print(f"\n Other commands:")
@ -707,7 +707,7 @@ def cmd_migrate(args) -> None:
print(" automatically. Files become the seed, not the live store.")
print()
print(" Tool surface (available to the agent during conversation)")
print(" query_user_context — ask Honcho a question, get a synthesized answer (LLM)")
print(" honcho_context — ask Honcho a question, get a synthesized answer (LLM)")
print(" honcho_search — semantic search over stored context (no LLM)")
print(" honcho_profile — fast peer card snapshot (no LLM)")
print()

View file

@ -604,7 +604,7 @@ class AIAgent:
# all memory retrieval comes from the pre-warmed system prompt.
if hcfg.recall_mode != "context":
# Rebuild tool definitions now that Honcho check_fn will pass.
# (Tools were built before Honcho init, so query_user_context
# (Tools were built before Honcho init, so honcho_context
# was filtered out by _check_honcho_available() returning False.)
self.tools = get_tool_definitions(
enabled_toolsets=enabled_toolsets,
@ -1596,7 +1596,7 @@ class AIAgent:
elif recall_mode == "tools":
honcho_block += (
"Memory tools:\n"
" query_user_context <question> — ask Honcho a question, LLM-synthesized answer\n"
" honcho_context <question> — ask Honcho a question, LLM-synthesized answer\n"
" honcho_search <query> — semantic search, raw excerpts, no LLM\n"
" honcho_profile — user's peer card, key facts, no LLM\n"
" honcho_conclude <conclusion> — write a fact about the user to memory\n"
@ -1609,7 +1609,7 @@ class AIAgent:
"any tools. Only call memory tools when you need information beyond what is "
"already present in the Honcho Memory section.\n"
"Memory tools:\n"
" query_user_context <question> — ask Honcho a question, LLM-synthesized answer\n"
" honcho_context <question> — ask Honcho a question, LLM-synthesized answer\n"
" honcho_search <query> — semantic search, raw excerpts, no LLM\n"
" honcho_profile — user's peer card, key facts, no LLM\n"
" honcho_conclude <conclusion> — write a fact about the user to memory\n"

View file

@ -2,11 +2,11 @@
Registers three complementary tools, ordered by capability:
query_user_context dialectic Q&A (LLM-powered, direct answers)
honcho_context dialectic Q&A (LLM-powered, direct answers)
honcho_search semantic search (fast, no LLM, raw excerpts)
honcho_profile peer card (fast, no LLM, structured facts)
Use query_user_context when you need Honcho to synthesize an answer.
Use honcho_context when you need Honcho to synthesize an answer.
Use honcho_search or honcho_profile when you want raw data to reason
over yourself.
@ -58,7 +58,7 @@ _PROFILE_SCHEMA = {
"about them (name, role, preferences, communication style, patterns). "
"Fast, no LLM reasoning, minimal cost. "
"Use this at conversation start or when you need a quick factual snapshot. "
"Use query_user_context instead when you need Honcho to synthesize an answer."
"Use honcho_context instead when you need Honcho to synthesize an answer."
),
"parameters": {
"type": "object",
@ -88,9 +88,9 @@ _SEARCH_SCHEMA = {
"description": (
"Semantic search over Honcho's stored context about the user. "
"Returns raw excerpts ranked by relevance to your query — no LLM synthesis. "
"Cheaper and faster than query_user_context. "
"Cheaper and faster than honcho_context. "
"Good when you want to find specific past facts and reason over them yourself. "
"Use query_user_context when you need a direct synthesized answer."
"Use honcho_context when you need a direct synthesized answer."
),
"parameters": {
"type": "object",
@ -126,10 +126,10 @@ def _handle_honcho_search(args: dict, **kw) -> str:
return json.dumps({"error": f"Failed to search context: {e}"})
# ── query_user_context (dialectic — LLM-powered) ──
# ── honcho_context (dialectic — LLM-powered) ──
_QUERY_SCHEMA = {
"name": "query_user_context",
"name": "honcho_context",
"description": (
"Ask Honcho a natural language question about the user and get a synthesized answer. "
"Uses Honcho's LLM (dialectic reasoning) — higher cost than honcho_profile or honcho_search. "
@ -150,7 +150,7 @@ _QUERY_SCHEMA = {
}
def _handle_query_user_context(args: dict, **kw) -> str:
def _handle_honcho_context(args: dict, **kw) -> str:
query = args.get("query", "")
if not query:
return json.dumps({"error": "Missing required parameter: query"})
@ -228,10 +228,10 @@ registry.register(
)
registry.register(
name="query_user_context",
name="honcho_context",
toolset="honcho",
schema=_QUERY_SCHEMA,
handler=_handle_query_user_context,
handler=_handle_honcho_context,
check_fn=_check_honcho_available,
)

View file

@ -61,7 +61,7 @@ _HERMES_CORE_TOOLS = [
# Cross-platform messaging (gated on gateway running via check_fn)
"send_message",
# Honcho user context (gated on honcho being active via check_fn)
"query_user_context",
"honcho_context",
# Home Assistant smart home control (gated on HASS_TOKEN via check_fn)
"ha_list_entities", "ha_get_state", "ha_list_services", "ha_call_service",
]
@ -192,7 +192,7 @@ TOOLSETS = {
"honcho": {
"description": "Honcho AI-native memory for persistent cross-session user modeling",
"tools": ["query_user_context"],
"tools": ["honcho_context"],
"includes": []
},