Merge pull request #15755 from NousResearch/bb/tui-model-flag

fix(tui): honor launch model overrides
This commit is contained in:
brooklyn! 2026-04-25 14:30:26 -05:00 committed by GitHub
commit 283c8fd6e2
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
30 changed files with 626 additions and 235 deletions

View file

@ -103,7 +103,8 @@ COMMAND_REGISTRY: list[CommandDef] = [
# Configuration
CommandDef("config", "Show current configuration", "Configuration",
cli_only=True),
CommandDef("model", "Switch model for this session", "Configuration", args_hint="[model] [--provider name] [--global]"),
CommandDef("model", "Switch model for this session", "Configuration",
aliases=("provider",), args_hint="[model] [--provider name] [--global]"),
CommandDef("gquota", "Show Google Gemini Code Assist quota usage", "Info",
cli_only=True),

View file

@ -1028,7 +1028,12 @@ def _make_tui_argv(tui_dir: Path, tui_dev: bool) -> tuple[list[str], Path]:
return [node, str(root / "dist" / "entry.js")], root
def _launch_tui(resume_session_id: Optional[str] = None, tui_dev: bool = False):
def _launch_tui(
resume_session_id: Optional[str] = None,
tui_dev: bool = False,
model: Optional[str] = None,
provider: Optional[str] = None,
):
"""Replace current process with the TUI."""
tui_dir = PROJECT_ROOT / "ui-tui"
@ -1038,6 +1043,12 @@ def _launch_tui(resume_session_id: Optional[str] = None, tui_dev: bool = False):
)
env.setdefault("HERMES_PYTHON", sys.executable)
env.setdefault("HERMES_CWD", os.getcwd())
if model:
env["HERMES_MODEL"] = model
env["HERMES_INFERENCE_MODEL"] = model
if provider:
env["HERMES_TUI_PROVIDER"] = provider
env["HERMES_INFERENCE_PROVIDER"] = provider
# Guarantee an 8GB V8 heap + exposed GC for the TUI. Default node cap is
# ~1.54GB depending on version and can fatal-OOM on long sessions with
# large transcripts / reasoning blobs. Token-level merge: respect any
@ -1176,6 +1187,8 @@ def cmd_chat(args):
_launch_tui(
getattr(args, "resume", None),
tui_dev=getattr(args, "tui_dev", False),
model=getattr(args, "model", None),
provider=getattr(args, "provider", None),
)
# Import and run the CLI
@ -6913,7 +6926,7 @@ For more help on a command:
default=None,
help=(
"Model override for this invocation (e.g. anthropic/claude-sonnet-4.6). "
"Applies to -z/--oneshot. Also settable via HERMES_INFERENCE_MODEL env var."
"Applies to -z/--oneshot and --tui. Also settable via HERMES_INFERENCE_MODEL env var."
),
)
parser.add_argument(
@ -6921,7 +6934,7 @@ For more help on a command:
default=None,
help=(
"Provider override for this invocation (e.g. openrouter, anthropic). "
"Applies to -z/--oneshot. Also settable via HERMES_INFERENCE_PROVIDER env var."
"Applies to -z/--oneshot and --tui. Also settable via HERMES_INFERENCE_PROVIDER env var."
),
)
parser.add_argument(

View file

@ -1379,27 +1379,93 @@ def curated_models_for_provider(
return [(m, "") for m in models]
def detect_provider_for_model(
def _provider_keys(provider: str) -> set[str]:
key = (provider or "").strip().lower()
normalized = normalize_provider(provider)
return {k for k in (key, normalized) if k}
def _model_in_provider_catalog(name_lower: str, providers: set[str]) -> bool:
return any(
name_lower == model.lower()
for provider in providers
for model in _PROVIDER_MODELS.get(provider, [])
)
_AGGREGATOR_PROVIDERS = frozenset(
{"nous", "openrouter", "ai-gateway", "copilot", "kilocode"}
)
def _resolve_static_model_alias(
name_lower: str,
current_keys: set[str],
) -> Optional[tuple[str, str]]:
"""Resolve short aliases (e.g. sonnet/opus) using static catalogs only."""
try:
from hermes_cli.model_switch import MODEL_ALIASES
except Exception:
return None
identity = MODEL_ALIASES.get(name_lower)
if identity is None:
return None
vendor = identity.vendor
family = identity.family
def _match(provider: str) -> Optional[str]:
models = _PROVIDER_MODELS.get(provider, [])
if not models:
return None
prefix = (
f"{vendor}/{family}"
if provider in _AGGREGATOR_PROVIDERS
else family
).lower()
for model in models:
if model.lower().startswith(prefix):
return model
return None
for provider in current_keys:
if matched := _match(provider):
return provider, matched
for provider in _PROVIDER_MODELS:
if provider in current_keys or provider in _AGGREGATOR_PROVIDERS:
continue
if matched := _match(provider):
return provider, matched
for provider in _AGGREGATOR_PROVIDERS:
if provider in current_keys and (matched := _match(provider)):
return provider, matched
return None
def detect_static_provider_for_model(
model_name: str,
current_provider: str,
) -> Optional[tuple[str, str]]:
"""Auto-detect the best provider for a model name.
"""Auto-detect a provider from static catalogs only.
Returns ``(provider_id, model_name)`` the model name may be remapped
(e.g. bare ``deepseek-chat`` ``deepseek/deepseek-chat`` for OpenRouter).
Returns ``(provider_id, model_name)``. The model name may be remapped
when a static alias or bare provider name resolves to a catalog default.
Returns ``None`` when no confident match is found.
Priority:
0. Bare provider name switch to that provider's default model
1. Direct provider with credentials (highest)
2. Direct provider without credentials remap to OpenRouter slug
3. OpenRouter catalog match
"""
name = (model_name or "").strip()
if not name:
return None
name_lower = name.lower()
current_keys = _provider_keys(current_provider)
alias_match = _resolve_static_model_alias(name_lower, current_keys)
if alias_match:
return alias_match
# --- Step 0: bare provider name typed as model ---
# If someone types `/model nous` or `/model anthropic`, treat it as a
@ -1412,64 +1478,49 @@ def detect_provider_for_model(
if (
resolved_provider in _PROVIDER_LABELS
and default_models
and resolved_provider != normalize_provider(current_provider)
and resolved_provider not in current_keys
):
return (resolved_provider, default_models[0])
# Aggregators list other providers' models — never auto-switch TO them
_AGGREGATORS = {"nous", "openrouter", "ai-gateway", "copilot", "kilocode"}
# If the model belongs to the current provider's catalog, don't suggest switching
current_models = _PROVIDER_MODELS.get(current_provider, [])
if any(name_lower == m.lower() for m in current_models):
if _model_in_provider_catalog(name_lower, current_keys):
return None
# --- Step 1: check static provider catalogs for a direct match ---
direct_match: Optional[str] = None
for pid, models in _PROVIDER_MODELS.items():
if pid == current_provider or pid in _AGGREGATORS:
if pid in current_keys or pid in _AGGREGATOR_PROVIDERS:
continue
if any(name_lower == m.lower() for m in models):
direct_match = pid
break
return (pid, name)
if direct_match:
# Check if we have credentials for this provider — env vars,
# credential pool, or auth store entries.
has_creds = False
try:
from hermes_cli.auth import PROVIDER_REGISTRY
pconfig = PROVIDER_REGISTRY.get(direct_match)
if pconfig:
for env_var in pconfig.api_key_env_vars:
if os.getenv(env_var, "").strip():
has_creds = True
break
except Exception:
pass
# Also check credential pool and auth store — covers OAuth,
# Claude Code tokens, and other non-env-var credentials (#10300).
if not has_creds:
try:
from agent.credential_pool import load_pool
pool = load_pool(direct_match)
if pool.has_credentials():
has_creds = True
except Exception:
pass
if not has_creds:
try:
from hermes_cli.auth import _load_auth_store
store = _load_auth_store()
if direct_match in store.get("providers", {}) or direct_match in store.get("credential_pool", {}):
has_creds = True
except Exception:
pass
return None
# Always return the direct provider match. If credentials are
# missing, the client init will give a clear error rather than
# silently routing through the wrong provider (#10300).
return (direct_match, name)
def detect_provider_for_model(
model_name: str,
current_provider: str,
) -> Optional[tuple[str, str]]:
"""Auto-detect the best provider for a model name.
Returns ``(provider_id, model_name)`` the model name may be remapped
(e.g. bare ``deepseek-chat`` ``deepseek/deepseek-chat`` for OpenRouter).
Returns ``None`` when no confident match is found.
Priority:
0. Bare provider name switch to that provider's default model
1. Direct provider static catalog match
2. OpenRouter catalog match
"""
name = (model_name or "").strip()
if not name:
return None
static_match = detect_static_provider_for_model(name, current_provider)
if static_match:
return static_match
if _model_in_provider_catalog(name.lower(), _provider_keys(current_provider)):
return None
# --- Step 2: check OpenRouter catalog ---
# First try exact match (handles provider/model format)

View file

@ -256,6 +256,17 @@ class TestDetectProviderForModel:
"""Models belonging to the current provider should not trigger a switch."""
assert detect_provider_for_model("gpt-5.3-codex", "openai-codex") is None
def test_short_alias_resolves_to_static_model(self):
"""Short aliases (e.g. sonnet) should resolve without network lookups."""
with patch(
"hermes_cli.models.fetch_openrouter_models",
side_effect=AssertionError("network lookup should not run"),
):
result = detect_provider_for_model("sonnet", "auto")
assert result is not None
assert result[0] == "anthropic"
assert result[1].startswith("claude-sonnet")
def test_openrouter_slug_match(self):
"""Models in the OpenRouter catalog should be found."""
with patch("hermes_cli.models.fetch_openrouter_models", return_value=LIVE_OPENROUTER_MODELS):

View file

@ -1,4 +1,5 @@
from argparse import Namespace
from pathlib import Path
import sys
import types
@ -8,8 +9,11 @@ import pytest
def _args(**overrides):
base = {
"continue_last": None,
"model": None,
"provider": None,
"resume": None,
"tui": True,
"tui_dev": False,
}
base.update(overrides)
return Namespace(**base)
@ -31,7 +35,7 @@ def test_cmd_chat_tui_continue_uses_latest_tui_session(monkeypatch, main_mod):
calls.append(source)
return "20260408_235959_a1b2c3" if source == "tui" else None
def fake_launch(resume_session_id=None, tui_dev=False):
def fake_launch(resume_session_id=None, tui_dev=False, model=None, provider=None):
captured["resume"] = resume_session_id
raise SystemExit(0)
@ -58,7 +62,7 @@ def test_cmd_chat_tui_continue_falls_back_to_latest_cli_session(monkeypatch, mai
return "20260408_235959_d4e5f6"
return None
def fake_launch(resume_session_id=None, tui_dev=False):
def fake_launch(resume_session_id=None, tui_dev=False, model=None, provider=None):
captured["resume"] = resume_session_id
raise SystemExit(0)
@ -76,7 +80,7 @@ def test_cmd_chat_tui_continue_falls_back_to_latest_cli_session(monkeypatch, mai
def test_cmd_chat_tui_resume_resolves_title_before_launch(monkeypatch, main_mod):
captured = {}
def fake_launch(resume_session_id=None, tui_dev=False):
def fake_launch(resume_session_id=None, tui_dev=False, model=None, provider=None):
captured["resume"] = resume_session_id
raise SystemExit(0)
@ -89,6 +93,60 @@ def test_cmd_chat_tui_resume_resolves_title_before_launch(monkeypatch, main_mod)
assert captured["resume"] == "20260409_000000_aa11bb"
def test_cmd_chat_tui_passes_model_and_provider(monkeypatch, main_mod):
captured = {}
def fake_launch(resume_session_id=None, tui_dev=False, model=None, provider=None):
captured.update(
{
"model": model,
"provider": provider,
"resume": resume_session_id,
"tui_dev": tui_dev,
}
)
raise SystemExit(0)
monkeypatch.setattr(main_mod, "_launch_tui", fake_launch)
with pytest.raises(SystemExit):
main_mod.cmd_chat(
_args(model="anthropic/claude-sonnet-4.6", provider="anthropic")
)
assert captured == {
"model": "anthropic/claude-sonnet-4.6",
"provider": "anthropic",
"resume": None,
"tui_dev": False,
}
def test_launch_tui_exports_model_and_provider(monkeypatch, main_mod):
captured = {}
monkeypatch.setattr(
main_mod,
"_make_tui_argv",
lambda tui_dir, tui_dev: (["node", "dist/entry.js"], Path(".")),
)
def fake_call(argv, cwd=None, env=None):
captured.update({"argv": argv, "cwd": cwd, "env": env})
return 1
monkeypatch.setattr(main_mod.subprocess, "call", fake_call)
with pytest.raises(SystemExit):
main_mod._launch_tui(model="nous/hermes-test", provider="nous")
env = captured["env"]
assert env["HERMES_MODEL"] == "nous/hermes-test"
assert env["HERMES_INFERENCE_MODEL"] == "nous/hermes-test"
assert env["HERMES_TUI_PROVIDER"] == "nous"
assert env["HERMES_INFERENCE_PROVIDER"] == "nous"
def test_print_tui_exit_summary_includes_resume_and_token_totals(monkeypatch, capsys):
import hermes_cli.main as main_mod

View file

@ -83,6 +83,100 @@ def test_status_callback_accepts_single_message_argument():
)
def test_resolve_model_uses_inference_model_env(monkeypatch):
monkeypatch.delenv("HERMES_MODEL", raising=False)
monkeypatch.setenv("HERMES_INFERENCE_MODEL", " anthropic/claude-sonnet-4.6\n")
assert server._resolve_model() == "anthropic/claude-sonnet-4.6"
def test_resolve_model_strips_config_model(monkeypatch):
monkeypatch.delenv("HERMES_MODEL", raising=False)
monkeypatch.delenv("HERMES_INFERENCE_MODEL", raising=False)
monkeypatch.setattr(
server, "_load_cfg", lambda: {"model": {"default": " nous/hermes-test "}}
)
assert server._resolve_model() == "nous/hermes-test"
def test_startup_runtime_uses_tui_provider_env(monkeypatch):
monkeypatch.setenv("HERMES_MODEL", "nous/hermes-test")
monkeypatch.setenv("HERMES_TUI_PROVIDER", "nous")
monkeypatch.delenv("HERMES_INFERENCE_PROVIDER", raising=False)
assert server._resolve_startup_runtime() == ("nous/hermes-test", "nous")
def test_startup_runtime_does_not_treat_inference_provider_as_explicit(monkeypatch):
monkeypatch.setenv("HERMES_MODEL", "nous/hermes-test")
monkeypatch.delenv("HERMES_TUI_PROVIDER", raising=False)
monkeypatch.setenv("HERMES_INFERENCE_PROVIDER", "nous")
monkeypatch.setattr(
"hermes_cli.models.detect_static_provider_for_model",
lambda model, provider: None,
)
assert server._resolve_startup_runtime() == ("nous/hermes-test", None)
def test_startup_runtime_detects_provider_for_model_env(monkeypatch):
monkeypatch.setenv("HERMES_MODEL", "sonnet")
monkeypatch.delenv("HERMES_TUI_PROVIDER", raising=False)
monkeypatch.delenv("HERMES_INFERENCE_PROVIDER", raising=False)
monkeypatch.setattr(server, "_load_cfg", lambda: {"model": {"provider": "auto"}})
def fake_detect(model, current_provider):
assert model == "sonnet"
assert current_provider == "auto"
return "anthropic", "anthropic/claude-sonnet-4.6"
monkeypatch.setattr(
"hermes_cli.models.detect_static_provider_for_model", fake_detect
)
assert server._resolve_startup_runtime() == (
"anthropic/claude-sonnet-4.6",
"anthropic",
)
def test_startup_runtime_resolves_short_alias_without_network(monkeypatch):
monkeypatch.setenv("HERMES_MODEL", "sonnet")
monkeypatch.delenv("HERMES_TUI_PROVIDER", raising=False)
monkeypatch.delenv("HERMES_INFERENCE_PROVIDER", raising=False)
monkeypatch.setattr(server, "_load_cfg", lambda: {"model": {"provider": "auto"}})
monkeypatch.setattr(
"hermes_cli.models.fetch_openrouter_models",
lambda *_args, **_kwargs: (_ for _ in ()).throw(
AssertionError("network lookup should not run")
),
)
model, provider = server._resolve_startup_runtime()
assert provider == "anthropic"
assert model.startswith("claude-sonnet")
def test_startup_runtime_does_not_call_network_detector(monkeypatch):
monkeypatch.setenv("HERMES_MODEL", "sonnet")
monkeypatch.delenv("HERMES_TUI_PROVIDER", raising=False)
monkeypatch.delenv("HERMES_INFERENCE_PROVIDER", raising=False)
monkeypatch.setattr(server, "_load_cfg", lambda: {"model": {"provider": "auto"}})
monkeypatch.setattr(
"hermes_cli.models.detect_provider_for_model",
lambda *_args, **_kwargs: (_ for _ in ()).throw(
AssertionError("network detector called")
),
)
model, provider = server._resolve_startup_runtime()
assert model
assert provider in {None, "anthropic"}
def _session(agent=None, **extra):
return {
"agent": agent if agent is not None else types.SimpleNamespace(),
@ -245,6 +339,14 @@ def test_setup_status_reports_provider_config(monkeypatch):
assert resp["result"]["provider_configured"] is False
def test_complete_slash_includes_provider_alias():
resp = server.handle_request(
{"id": "1", "method": "complete.slash", "params": {"text": "/pro"}}
)
assert any(item["text"] == "provider" for item in resp["result"]["items"])
def test_config_set_reasoning_updates_live_session_and_agent(tmp_path, monkeypatch):
monkeypatch.setattr(server, "_hermes_home", tmp_path)
agent = types.SimpleNamespace(reasoning_config=None)
@ -415,6 +517,57 @@ def test_config_set_model_syncs_inference_provider_env(monkeypatch):
assert os.environ["HERMES_INFERENCE_PROVIDER"] == "anthropic"
def test_config_set_model_syncs_tui_provider_env(monkeypatch):
class Agent:
model = "gpt-5.3-codex"
provider = "openai-codex"
base_url = ""
api_key = ""
def switch_model(self, **kwargs):
self.model = kwargs["new_model"]
self.provider = kwargs["new_provider"]
agent = Agent()
server._sessions["sid"] = _session(agent=agent)
monkeypatch.setenv("HERMES_TUI_PROVIDER", "openai-codex")
monkeypatch.setattr(server, "_restart_slash_worker", lambda session: None)
monkeypatch.setattr(server, "_emit", lambda *args, **kwargs: None)
def fake_switch_model(**kwargs):
return types.SimpleNamespace(
success=True,
new_model="anthropic/claude-sonnet-4.6",
target_provider="anthropic",
api_key="key",
base_url="https://api.anthropic.com",
api_mode="anthropic_messages",
warning_message="",
)
monkeypatch.setattr("hermes_cli.model_switch.switch_model", fake_switch_model)
try:
resp = server.handle_request(
{
"id": "1",
"method": "config.set",
"params": {
"session_id": "sid",
"key": "model",
"value": "anthropic/claude-sonnet-4.6 --provider anthropic",
},
}
)
assert resp["result"]["value"] == "anthropic/claude-sonnet-4.6"
assert os.environ["HERMES_TUI_PROVIDER"] == "anthropic"
assert os.environ["HERMES_MODEL"] == "anthropic/claude-sonnet-4.6"
assert os.environ["HERMES_INFERENCE_MODEL"] == "anthropic/claude-sonnet-4.6"
finally:
server._sessions.clear()
def test_config_set_personality_rejects_unknown_name(monkeypatch):
monkeypatch.setattr(
server,

View file

@ -560,17 +560,55 @@ def resolve_skin() -> dict:
def _resolve_model() -> str:
env = os.environ.get("HERMES_MODEL", "")
env = (
os.environ.get("HERMES_MODEL", "")
or os.environ.get("HERMES_INFERENCE_MODEL", "")
).strip()
if env:
return env
m = _load_cfg().get("model", "")
if isinstance(m, dict):
return m.get("default", "")
return str(m.get("default", "") or "").strip()
if isinstance(m, str) and m:
return m
return m.strip()
return "anthropic/claude-sonnet-4"
def _resolve_startup_runtime() -> tuple[str, str | None]:
model = _resolve_model()
explicit_provider = os.environ.get("HERMES_TUI_PROVIDER", "").strip()
if explicit_provider:
return model, explicit_provider
explicit_model = (
os.environ.get("HERMES_MODEL", "")
or os.environ.get("HERMES_INFERENCE_MODEL", "")
).strip()
if not explicit_model:
return model, None
try:
from hermes_cli.models import detect_static_provider_for_model
cfg = _load_cfg().get("model") or {}
current_provider = (
(
str(cfg.get("provider") or "").strip().lower()
if isinstance(cfg, dict)
else ""
)
or os.environ.get("HERMES_INFERENCE_PROVIDER", "").strip().lower()
or "auto"
)
detected = detect_static_provider_for_model(explicit_model, current_provider)
if detected:
provider, detected_model = detected
return detected_model, provider
except Exception:
pass
return model, None
def _write_config_key(key_path: str, value):
cfg = _load_cfg()
current = cfg
@ -736,12 +774,15 @@ def _apply_model_switch(sid: str, session: dict, raw_input: str) -> dict:
_emit("session.info", sid, _session_info(agent))
os.environ["HERMES_MODEL"] = result.new_model
os.environ["HERMES_INFERENCE_MODEL"] = result.new_model
# Keep the process-level provider env var in sync with the user's explicit
# choice so any ambient re-resolution (credential pool refresh, compressor
# rebuild, aux clients) resolves to the new provider instead of the
# original one persisted in config or env.
if result.target_provider:
os.environ["HERMES_INFERENCE_PROVIDER"] = result.target_provider
if os.environ.get("HERMES_TUI_PROVIDER"):
os.environ["HERMES_TUI_PROVIDER"] = result.target_provider
if persist_global:
_persist_model_switch(result)
return {"value": result.new_model, "warning": result.warning_message or ""}
@ -1277,9 +1318,13 @@ def _make_agent(sid: str, key: str, session_id: str | None = None):
cfg = _load_cfg()
system_prompt = ((cfg.get("agent") or {}).get("system_prompt", "") or "").strip()
runtime = resolve_runtime_provider(requested=None)
model, requested_provider = _resolve_startup_runtime()
runtime = resolve_runtime_provider(
requested=requested_provider,
target_model=model or None,
)
return AIAgent(
model=_resolve_model(),
model=model,
provider=runtime.get("provider"),
base_url=runtime.get("base_url"),
api_key=runtime.get("api_key"),

View file

@ -53,7 +53,11 @@ export function AlternateScreen(t0: Props) {
}
writeRaw(
ENTER_ALT_SCREEN + ERASE_SCROLLBACK + ERASE_SCREEN + CURSOR_HOME + (mouseTracking ? ENABLE_MOUSE_TRACKING : DISABLE_MOUSE_TRACKING)
ENTER_ALT_SCREEN +
ERASE_SCROLLBACK +
ERASE_SCREEN +
CURSOR_HOME +
(mouseTracking ? ENABLE_MOUSE_TRACKING : DISABLE_MOUSE_TRACKING)
)
ink?.setAltScreenActive(true, mouseTracking)

View file

@ -323,27 +323,39 @@ const measureTextNode = function (
widthMode: LayoutMeasureMode
): { width: number; height: number } {
const elem = node.nodeName !== '#text' ? (node as DOMElement) : node.parentNode
if (elem && elem.nodeName === 'ink-text') {
let cache = elem._textMeasureCache
if (!cache) {
cache = { gen: 0, entries: new Map() }
elem._textMeasureCache = cache
}
const key = `${width}|${widthMode}`
const hit = cache.entries.get(key)
if (hit && hit._gen === cache.gen) {
return hit.result
}
const result = computeTextMeasure(node, width, widthMode)
// Enforce cap with FIFO eviction to avoid unbounded growth during
// pathological frames where yoga probes many widths.
if (cache.entries.size >= MEASURE_CACHE_CAP) {
const firstKey = cache.entries.keys().next().value
cache.entries.delete(firstKey)
if (firstKey !== undefined) {
cache.entries.delete(firstKey)
}
}
cache.entries.set(key, { _gen: cache.gen, result })
return result
}
return computeTextMeasure(node, width, widthMode)
}
@ -475,6 +487,7 @@ export const clearYogaNodeReferences = (node: DOMElement | TextNode): void => {
for (const child of node.childNodes) {
clearYogaNodeReferences(child)
}
node._textMeasureCache = undefined
}

View file

@ -9,18 +9,21 @@ describe('shouldEmitClipboardSequence', () => {
})
it('keeps OSC enabled for remote or plain local terminals', () => {
expect(shouldEmitClipboardSequence({ SSH_CONNECTION: '1', TMUX: '/tmp/tmux-1/default,1,0' } as NodeJS.ProcessEnv)).toBe(
true
)
expect(
shouldEmitClipboardSequence({ SSH_CONNECTION: '1', TMUX: '/tmp/tmux-1/default,1,0' } as NodeJS.ProcessEnv)
).toBe(true)
expect(shouldEmitClipboardSequence({ TERM: 'xterm-256color' } as NodeJS.ProcessEnv)).toBe(true)
})
it('honors explicit env override', () => {
expect(shouldEmitClipboardSequence({ HERMES_TUI_CLIPBOARD_OSC52: '1', TMUX: '/tmp/tmux-1/default,1,0' } as NodeJS.ProcessEnv)).toBe(
true
)
expect(shouldEmitClipboardSequence({ HERMES_TUI_COPY_OSC52: '0', TERM: 'xterm-256color' } as NodeJS.ProcessEnv)).toBe(
false
)
expect(
shouldEmitClipboardSequence({
HERMES_TUI_CLIPBOARD_OSC52: '1',
TMUX: '/tmp/tmux-1/default,1,0'
} as NodeJS.ProcessEnv)
).toBe(true)
expect(
shouldEmitClipboardSequence({ HERMES_TUI_COPY_OSC52: '0', TERM: 'xterm-256color' } as NodeJS.ProcessEnv)
).toBe(false)
})
})

View file

@ -226,7 +226,10 @@ describe('createGatewayEventHandler', () => {
const inlineDiff = '--- a/foo.ts\n+++ b/foo.ts\n@@\n-old\n+new'
const assistantText = 'Done. Clean swap:\n\n```diff\n-old\n+new\n```'
onEvent({ payload: { inline_diff: inlineDiff, summary: 'patched', tool_id: 'tool-1' }, type: 'tool.complete' } as any)
onEvent({
payload: { inline_diff: inlineDiff, summary: 'patched', tool_id: 'tool-1' },
type: 'tool.complete'
} as any)
onEvent({ payload: { text: assistantText }, type: 'message.complete' } as any)
expect(appended).toHaveLength(1)

View file

@ -17,6 +17,14 @@ describe('createSlashHandler', () => {
expect(getOverlayState().picker).toBe(true)
})
it('treats /provider as a local /model alias', () => {
const ctx = buildCtx()
expect(createSlashHandler(ctx)('/provider')).toBe(true)
expect(getOverlayState().modelPicker).toBe(true)
expect(ctx.gateway.gw.request).not.toHaveBeenCalled()
})
it('opens the skills hub locally for bare /skills', () => {
const ctx = buildCtx()
@ -118,9 +126,7 @@ describe('createSlashHandler', () => {
const ctx = buildCtx()
createSlashHandler(ctx)('/details tools blink')
expect(getUiState().sections.tools).toBeUndefined()
expect(ctx.transcript.sys).toHaveBeenCalledWith(
'usage: /details <section> [hidden|collapsed|expanded|reset]'
)
expect(ctx.transcript.sys).toHaveBeenCalledWith('usage: /details <section> [hidden|collapsed|expanded|reset]')
})
it('shows tool enable usage when names are missing', () => {

View file

@ -1,6 +1,6 @@
import { describe, expect, it } from 'vitest'
import { isSectionName, parseDetailsMode, resolveSections, sectionMode, SECTION_NAMES } from '../domain/details.js'
import { isSectionName, parseDetailsMode, resolveSections, SECTION_NAMES, sectionMode } from '../domain/details.js'
describe('parseDetailsMode', () => {
it('accepts the canonical modes case-insensitively', () => {

View file

@ -1,8 +1,8 @@
import { useStore } from '@nanostores/react'
import { GatewayProvider } from './app/gatewayContext.js'
import { useMainApp } from './app/useMainApp.js'
import { $uiState } from './app/uiStore.js'
import { useMainApp } from './app/useMainApp.js'
import { AppLayout } from './components/appLayout.js'
import type { GatewayClient } from './gatewayClient.js'

View file

@ -1,7 +1,7 @@
import { NO_CONFIRM_DESTRUCTIVE } from '../../../config/env.js'
import { dailyFortune, randomFortune } from '../../../content/fortunes.js'
import { HOTKEYS } from '../../../content/hotkeys.js'
import { SECTION_NAMES, isSectionName, nextDetailsMode, parseDetailsMode } from '../../../domain/details.js'
import { isSectionName, nextDetailsMode, parseDetailsMode, SECTION_NAMES } from '../../../domain/details.js'
import type {
ConfigGetValueResponse,
ConfigSetResponse,
@ -40,8 +40,10 @@ const flagFromArg = (arg: string, current: boolean): boolean | null => {
const RESET_WORDS = new Set(['reset', 'clear', 'default'])
const CYCLE_WORDS = new Set(['cycle', 'toggle'])
const DETAILS_USAGE =
'usage: /details [hidden|collapsed|expanded|cycle] or /details <section> [hidden|collapsed|expanded|reset]'
const DETAILS_SECTION_USAGE = 'usage: /details <section> [hidden|collapsed|expanded|reset]'
export const coreCommands: SlashCommand[] = [
@ -97,9 +99,7 @@ export const coreCommands: SlashCommand[] = [
}
patchUiState({ mouseTracking: next })
ctx.gateway
.rpc<ConfigSetResponse>('config.set', { key: 'mouse', value: next ? 'on' : 'off' })
.catch(() => {})
ctx.gateway.rpc<ConfigSetResponse>('config.set', { key: 'mouse', value: next ? 'on' : 'off' }).catch(() => {})
queueMicrotask(() => ctx.transcript.sys(`mouse tracking ${next ? 'on' : 'off'}`))
}
@ -178,7 +178,9 @@ export const coreCommands: SlashCommand[] = [
gateway
.rpc<ConfigGetValueResponse>('config.get', { key: 'details_mode' })
.then(r => {
if (ctx.stale()) return
if (ctx.stale()) {
return
}
const mode = parseDetailsMode(r?.value) ?? ui.detailsMode
patchUiState({ detailsMode: mode })

View file

@ -58,6 +58,7 @@ export const sessionCommands: SlashCommand[] = [
{
help: 'change or show model',
aliases: ['provider'],
name: 'model',
run: (arg, ctx) => {
if (ctx.session.guardBusySessionSwitch('change models')) {

View file

@ -5,18 +5,6 @@ import { runExternalSetup } from '../../setupHandoff.js'
import type { SlashCommand } from '../types.js'
export const setupCommands: SlashCommand[] = [
{
help: 'configure LLM provider + model (launches `hermes model`)',
name: 'provider',
run: (_arg, ctx) =>
void runExternalSetup({
args: ['model'],
ctx,
done: 'provider updated — starting session…',
launcher: launchHermesCommand,
suspend: withInkSuspended
})
},
{
help: 'run full setup wizard (launches `hermes setup`)',
name: 'setup',

View file

@ -300,6 +300,7 @@ class TurnController {
const hasDiffSegment = segments.some(msg => msg.kind === 'diff')
const detailsBelongBeforeDiff = hasDiffSegment && (tools.length > 0 || Boolean(savedReasoning))
const finalMessages = detailsBelongBeforeDiff
? insertBeforeFirstDiff(segments, {
kind: 'trail',

View file

@ -1,8 +1,8 @@
import { atom } from 'nanostores'
import { MOUSE_TRACKING } from '../config/env.js'
import { ZERO } from '../domain/usage.js'
import { DEFAULT_THEME } from '../theme.js'
import { MOUSE_TRACKING } from '../config/env.js'
import type { UiState } from './interfaces.js'

View file

@ -159,16 +159,14 @@ export function useInputHandlers(ctx: InputHandlerContext): InputHandlerResult {
voice.setProcessing(false)
}
gateway
.rpc<VoiceRecordResponse>('voice.record', { action })
.catch((e: Error) => {
// Revert optimistic UI on failure.
if (starting) {
voice.setRecording(false)
}
gateway.rpc<VoiceRecordResponse>('voice.record', { action }).catch((e: Error) => {
// Revert optimistic UI on failure.
if (starting) {
voice.setRecording(false)
}
actions.sys(`voice error: ${e.message}`)
})
actions.sys(`voice error: ${e.message}`)
})
}
useInput((ch, key) => {

View file

@ -640,14 +640,14 @@ export function useMainApp(gw: GatewayClient) {
const showProgressArea = anyPanelVisible
? Boolean(
ui.busy ||
turn.outcome ||
turn.streamPendingTools.length ||
turn.streamSegments.length ||
turn.subagents.length ||
turn.tools.length ||
turn.turnTrail.length ||
hasReasoning ||
turn.activity.length
turn.outcome ||
turn.streamPendingTools.length ||
turn.streamSegments.length ||
turn.subagents.length ||
turn.tools.length ||
turn.turnTrail.length ||
hasReasoning ||
turn.activity.length
)
: turn.activity.some(item => item.tone !== 'info')

View file

@ -218,11 +218,7 @@ export function StatusRule({
{voiceLabel ? (
<Text
color={
voiceLabel.startsWith('●')
? t.color.error
: voiceLabel.startsWith('◉')
? t.color.warn
: t.color.dim
voiceLabel.startsWith('●') ? t.color.error : voiceLabel.startsWith('◉') ? t.color.warn : t.color.dim
}
>
{' │ '}

View file

@ -9,6 +9,7 @@ import { $uiState } from '../app/uiStore.js'
import { FloatBox } from './appChrome.js'
import { MaskedPrompt } from './maskedPrompt.js'
import { ModelPicker } from './modelPicker.js'
import { OverlayHint } from './overlayControls.js'
import { ApprovalPrompt, ClarifyPrompt, ConfirmPrompt } from './prompts.js'
import { SessionPicker } from './sessionPicker.js'
import { SkillsHub } from './skillsHub.js'
@ -162,11 +163,11 @@ export function FloatingOverlays({
))}
<Box marginTop={1}>
<Text color={ui.theme.color.dim}>
<OverlayHint t={ui.theme}>
{overlay.pager.offset + pagerPageSize < overlay.pager.lines.length
? `↑↓/jk line · Enter/Space/PgDn page · b/PgUp back · g/G top/bottom · q close (${Math.min(overlay.pager.offset + pagerPageSize, overlay.pager.lines.length)}/${overlay.pager.lines.length})`
: `end · ↑↓/jk · b/PgUp back · g top · q close (${overlay.pager.lines.length} lines)`}
</Text>
? `↑↓/jk line · Enter/Space/PgDn page · b/PgUp back · g/G top/bottom · Esc/q close (${Math.min(overlay.pager.offset + pagerPageSize, overlay.pager.lines.length)}/${overlay.pager.lines.length})`
: `end · ↑↓/jk · b/PgUp back · g top · Esc/q close (${overlay.pager.lines.length} lines)`}
</OverlayHint>
</Box>
</Box>
</FloatBox>

View file

@ -1,8 +1,8 @@
import { Ansi, Box, NoSelect, Text } from '@hermes/ink'
import { memo } from 'react'
import { sectionMode } from '../domain/details.js'
import { LONG_MSG } from '../config/limits.js'
import { sectionMode } from '../domain/details.js'
import { userDisplay } from '../domain/messages.js'
import { ROLE } from '../domain/roles.js'
import { compactPreview, hasAnsi, isPasteBackedText, stripAnsi } from '../lib/text.js'
@ -72,8 +72,7 @@ export const MessageLine = memo(function MessageLine({
const { body, glyph, prefix } = ROLE[msg.role](t)
const showDetails =
(toolsMode !== 'hidden' && Boolean(msg.tools?.length)) ||
(thinkingMode !== 'hidden' && Boolean(thinking))
(toolsMode !== 'hidden' && Boolean(msg.tools?.length)) || (thinkingMode !== 'hidden' && Boolean(thinking))
const content = (() => {
if (msg.kind === 'slash') {

View file

@ -7,18 +7,12 @@ import type { ModelOptionProvider, ModelOptionsResponse } from '../gatewayTypes.
import { asRpcResult, rpcErrorMessage } from '../lib/rpc.js'
import type { Theme } from '../theme.js'
import { OverlayHint, useOverlayKeys, windowItems, windowOffset } from './overlayControls.js'
const VISIBLE = 12
const MIN_WIDTH = 40
const MAX_WIDTH = 90
const pageOffset = (count: number, sel: number) => Math.max(0, Math.min(sel - Math.floor(VISIBLE / 2), count - VISIBLE))
const visibleItems = (items: string[], sel: number) => {
const off = pageOffset(items.length, sel)
return { items: items.slice(off, off + VISIBLE), off }
}
export function ModelPicker({ gw, onCancel, onSelect, sessionId, t }: ModelPickerProps) {
const [providers, setProviders] = useState<ModelOptionProvider[]>([])
const [currentModel, setCurrentModel] = useState('')
@ -71,20 +65,20 @@ export function ModelPicker({ gw, onCancel, onSelect, sessionId, t }: ModelPicke
const models = provider?.models ?? []
const names = useMemo(() => providerDisplayNames(providers), [providers])
useInput((ch, key) => {
if (key.escape) {
if (stage === 'model') {
setStage('provider')
setModelIdx(0)
return
}
onCancel()
const back = () => {
if (stage === 'model') {
setStage('provider')
setModelIdx(0)
return
}
onCancel()
}
useOverlayKeys({ onBack: back, onClose: onCancel })
useInput((ch, key) => {
const count = stage === 'provider' ? providers.length : models.length
const sel = stage === 'provider' ? providerIdx : modelIdx
const setSel = stage === 'provider' ? setProviderIdx : setModelIdx
@ -133,16 +127,16 @@ export function ModelPicker({ gw, onCancel, onSelect, sessionId, t }: ModelPicke
const n = ch === '0' ? 10 : parseInt(ch, 10)
if (!Number.isNaN(n) && n >= 1 && n <= Math.min(10, count)) {
const off = pageOffset(count, sel)
const offset = windowOffset(count, sel, VISIBLE)
if (stage === 'provider') {
const next = off + n - 1
const next = offset + n - 1
if (providers[next]) {
setProviderIdx(next)
}
} else if (provider && models[off + n - 1]) {
onSelect(`${models[off + n - 1]} --provider ${provider.slug}${persistGlobal ? ' --global' : ''}`)
} else if (provider && models[offset + n - 1]) {
onSelect(`${models[offset + n - 1]} --provider ${provider.slug}${persistGlobal ? ' --global' : ''}`)
}
}
})
@ -155,7 +149,7 @@ export function ModelPicker({ gw, onCancel, onSelect, sessionId, t }: ModelPicke
return (
<Box flexDirection="column">
<Text color={t.color.label}>error: {err}</Text>
<Text color={t.color.dim}>Esc to cancel</Text>
<OverlayHint t={t}>Esc/q cancel</OverlayHint>
</Box>
)
}
@ -164,7 +158,7 @@ export function ModelPicker({ gw, onCancel, onSelect, sessionId, t }: ModelPicke
return (
<Box flexDirection="column">
<Text color={t.color.dim}>no authenticated providers</Text>
<Text color={t.color.dim}>Esc to cancel</Text>
<OverlayHint t={t}>Esc/q cancel</OverlayHint>
</Box>
)
}
@ -174,7 +168,7 @@ export function ModelPicker({ gw, onCancel, onSelect, sessionId, t }: ModelPicke
(p, i) => `${p.is_current ? '*' : ' '} ${names[i]} · ${p.total_models ?? p.models?.length ?? 0} models`
)
const { items, off } = visibleItems(rows, providerIdx)
const { items, offset } = windowItems(rows, providerIdx, VISIBLE)
return (
<Box flexDirection="column" width={width}>
@ -189,12 +183,12 @@ export function ModelPicker({ gw, onCancel, onSelect, sessionId, t }: ModelPicke
{provider?.warning ? `warning: ${provider.warning}` : ' '}
</Text>
<Text color={t.color.dim} wrap="truncate-end">
{off > 0 ? `${off} more` : ' '}
{offset > 0 ? `${offset} more` : ' '}
</Text>
{Array.from({ length: VISIBLE }, (_, i) => {
const row = items[i]
const idx = off + i
const idx = offset + i
return row ? (
<Text
@ -215,20 +209,18 @@ export function ModelPicker({ gw, onCancel, onSelect, sessionId, t }: ModelPicke
})}
<Text color={t.color.dim} wrap="truncate-end">
{off + VISIBLE < rows.length ? `${rows.length - off - VISIBLE} more` : ' '}
{offset + VISIBLE < rows.length ? `${rows.length - offset - VISIBLE} more` : ' '}
</Text>
<Text color={t.color.dim} wrap="truncate-end">
persist: {persistGlobal ? 'global' : 'session'} · g toggle
</Text>
<Text color={t.color.dim} wrap="truncate-end">
/ select · Enter choose · 1-9,0 quick · Esc cancel
</Text>
<OverlayHint t={t}>/ select · Enter choose · 1-9,0 quick · Esc/q cancel</OverlayHint>
</Box>
)
}
const { items, off } = visibleItems(models, modelIdx)
const { items, offset } = windowItems(models, modelIdx, VISIBLE)
return (
<Box flexDirection="column" width={width}>
@ -243,12 +235,12 @@ export function ModelPicker({ gw, onCancel, onSelect, sessionId, t }: ModelPicke
{provider?.warning ? `warning: ${provider.warning}` : ' '}
</Text>
<Text color={t.color.dim} wrap="truncate-end">
{off > 0 ? `${off} more` : ' '}
{offset > 0 ? `${offset} more` : ' '}
</Text>
{Array.from({ length: VISIBLE }, (_, i) => {
const row = items[i]
const idx = off + i
const idx = offset + i
if (!row) {
return !models.length && i === 0 ? (
@ -277,15 +269,15 @@ export function ModelPicker({ gw, onCancel, onSelect, sessionId, t }: ModelPicke
})}
<Text color={t.color.dim} wrap="truncate-end">
{off + VISIBLE < models.length ? `${models.length - off - VISIBLE} more` : ' '}
{offset + VISIBLE < models.length ? `${models.length - offset - VISIBLE} more` : ' '}
</Text>
<Text color={t.color.dim} wrap="truncate-end">
persist: {persistGlobal ? 'global' : 'session'} · g toggle
</Text>
<Text color={t.color.dim} wrap="truncate-end">
{models.length ? '↑/↓ select · Enter switch · 1-9,0 quick · Esc back' : 'Enter/Esc back'}
</Text>
<OverlayHint t={t}>
{models.length ? '↑/↓ select · Enter switch · 1-9,0 quick · Esc back · q close' : 'Enter/Esc back · q close'}
</OverlayHint>
</Box>
)
}

View file

@ -0,0 +1,50 @@
import { Text, useInput } from '@hermes/ink'
import type { Theme } from '../theme.js'
export function useOverlayKeys({ disabled = false, onBack, onClose }: OverlayKeysOptions) {
useInput((ch, key) => {
if (disabled) {
return
}
if (ch === 'q') {
return onClose()
}
if (key.escape) {
return onBack ? onBack() : onClose()
}
})
}
export function OverlayHint({ children, t }: OverlayHintProps) {
return (
<Text color={t.color.dim} wrap="truncate-end">
{children}
</Text>
)
}
export const windowOffset = (count: number, selected: number, visible: number) =>
Math.max(0, Math.min(selected - Math.floor(visible / 2), count - visible))
export function windowItems<T>(items: T[], selected: number, visible: number) {
const offset = windowOffset(items.length, selected, visible)
return {
items: items.slice(offset, offset + visible),
offset
}
}
interface OverlayHintProps {
children: string
t: Theme
}
interface OverlayKeysOptions {
disabled?: boolean
onBack?: () => void
onClose: () => void
}

View file

@ -6,6 +6,8 @@ import type { SessionListItem, SessionListResponse } from '../gatewayTypes.js'
import { asRpcResult, rpcErrorMessage } from '../lib/rpc.js'
import type { Theme } from '../theme.js'
import { OverlayHint, useOverlayKeys, windowOffset } from './overlayControls.js'
const VISIBLE = 15
const MIN_WIDTH = 60
const MAX_WIDTH = 120
@ -33,6 +35,8 @@ export function SessionPicker({ gw, onCancel, onSelect, t }: SessionPickerProps)
const { stdout } = useStdout()
const width = Math.max(MIN_WIDTH, Math.min(MAX_WIDTH, (stdout?.columns ?? 80) - 6))
useOverlayKeys({ onClose: onCancel })
useEffect(() => {
gw.request<SessionListResponse>('session.list', { limit: 20 })
.then(raw => {
@ -56,10 +60,6 @@ export function SessionPicker({ gw, onCancel, onSelect, t }: SessionPickerProps)
}, [gw])
useInput((ch, key) => {
if (key.escape) {
return onCancel()
}
if (key.upArrow && sel > 0) {
setSel(s => s - 1)
}
@ -87,7 +87,7 @@ export function SessionPicker({ gw, onCancel, onSelect, t }: SessionPickerProps)
return (
<Box flexDirection="column">
<Text color={t.color.label}>error: {err}</Text>
<Text color={t.color.dim}>Esc to cancel</Text>
<OverlayHint t={t}>Esc/q cancel</OverlayHint>
</Box>
)
}
@ -96,12 +96,12 @@ export function SessionPicker({ gw, onCancel, onSelect, t }: SessionPickerProps)
return (
<Box flexDirection="column">
<Text color={t.color.dim}>no previous sessions</Text>
<Text color={t.color.dim}>Esc to cancel</Text>
<OverlayHint t={t}>Esc/q cancel</OverlayHint>
</Box>
)
}
const off = Math.max(0, Math.min(sel - Math.floor(VISIBLE / 2), items.length - VISIBLE))
const offset = windowOffset(items.length, sel, VISIBLE)
return (
<Box flexDirection="column" width={width}>
@ -109,10 +109,10 @@ export function SessionPicker({ gw, onCancel, onSelect, t }: SessionPickerProps)
Resume Session
</Text>
{off > 0 && <Text color={t.color.dim}> {off} more</Text>}
{offset > 0 && <Text color={t.color.dim}> {offset} more</Text>}
{items.slice(off, off + VISIBLE).map((s, vi) => {
const i = off + vi
{items.slice(offset, offset + VISIBLE).map((s, vi) => {
const i = offset + vi
const selected = sel === i
return (
@ -140,8 +140,8 @@ export function SessionPicker({ gw, onCancel, onSelect, t }: SessionPickerProps)
)
})}
{off + VISIBLE < items.length && <Text color={t.color.dim}> {items.length - off - VISIBLE} more</Text>}
<Text color={t.color.dim}>/ select · Enter resume · 1-9 quick · Esc cancel</Text>
{offset + VISIBLE < items.length && <Text color={t.color.dim}> {items.length - offset - VISIBLE} more</Text>}
<OverlayHint t={t}>/ select · Enter resume · 1-9 quick · Esc/q cancel</OverlayHint>
</Box>
)
}

View file

@ -5,18 +5,12 @@ import type { GatewayClient } from '../gatewayClient.js'
import { rpcErrorMessage } from '../lib/rpc.js'
import type { Theme } from '../theme.js'
import { OverlayHint, useOverlayKeys, windowItems, windowOffset } from './overlayControls.js'
const VISIBLE = 12
const MIN_WIDTH = 40
const MAX_WIDTH = 90
const pageOffset = (count: number, sel: number) => Math.max(0, Math.min(sel - Math.floor(VISIBLE / 2), count - VISIBLE))
const visibleItems = (items: string[], sel: number) => {
const off = pageOffset(items.length, sel)
return { items: items.slice(off, off + VISIBLE), off }
}
export function SkillsHub({ gw, onClose, t }: SkillsHubProps) {
const [skillsByCat, setSkillsByCat] = useState<Record<string, string[]>>({})
const [selectedCat, setSelectedCat] = useState('')
@ -48,6 +42,27 @@ export function SkillsHub({ gw, onClose, t }: SkillsHubProps) {
const skills = selectedCat ? (skillsByCat[selectedCat] ?? []) : []
const skillName = skills[skillIdx] ?? ''
const back = () => {
if (stage === 'actions') {
setStage('skill')
setInfo(null)
setErr('')
return
}
if (stage === 'skill') {
setStage('category')
setSkillIdx(0)
return
}
onClose()
}
useOverlayKeys({ disabled: installing, onBack: back, onClose })
const inspect = (name: string) => {
setInfo(null)
setErr('')
@ -72,27 +87,6 @@ export function SkillsHub({ gw, onClose, t }: SkillsHubProps) {
return
}
if (key.escape) {
if (stage === 'actions') {
setStage('skill')
setInfo(null)
setErr('')
return
}
if (stage === 'skill') {
setStage('category')
setSkillIdx(0)
return
}
onClose()
return
}
if (stage === 'actions') {
if (key.return) {
setStage('skill')
@ -159,8 +153,7 @@ export function SkillsHub({ gw, onClose, t }: SkillsHubProps) {
const n = ch === '0' ? 10 : parseInt(ch, 10)
if (!Number.isNaN(n) && n >= 1 && n <= Math.min(10, count)) {
const off = pageOffset(count, sel)
const next = off + n - 1
const next = windowOffset(count, sel, VISIBLE) + n - 1
if (stage === 'category') {
const cat = cats[next]
@ -193,7 +186,7 @@ export function SkillsHub({ gw, onClose, t }: SkillsHubProps) {
return (
<Box flexDirection="column" width={width}>
<Text color={t.color.label}>error: {err}</Text>
<Text color={t.color.dim}>Esc to cancel</Text>
<OverlayHint t={t}>Esc/q cancel</OverlayHint>
</Box>
)
}
@ -202,14 +195,14 @@ export function SkillsHub({ gw, onClose, t }: SkillsHubProps) {
return (
<Box flexDirection="column" width={width}>
<Text color={t.color.dim}>no skills available</Text>
<Text color={t.color.dim}>Esc to cancel</Text>
<OverlayHint t={t}>Esc/q cancel</OverlayHint>
</Box>
)
}
if (stage === 'category') {
const rows = cats.map(c => `${c} · ${skillsByCat[c]?.length ?? 0} skills`)
const { items, off } = visibleItems(rows, catIdx)
const { items, offset } = windowItems(rows, catIdx, VISIBLE)
return (
<Box flexDirection="column" width={width}>
@ -218,10 +211,10 @@ export function SkillsHub({ gw, onClose, t }: SkillsHubProps) {
</Text>
<Text color={t.color.dim}>select a category</Text>
{off > 0 && <Text color={t.color.dim}> {off} more</Text>}
{offset > 0 && <Text color={t.color.dim}> {offset} more</Text>}
{items.map((row, i) => {
const idx = off + i
const idx = offset + i
return (
<Text
@ -237,14 +230,14 @@ export function SkillsHub({ gw, onClose, t }: SkillsHubProps) {
)
})}
{off + VISIBLE < rows.length && <Text color={t.color.dim}> {rows.length - off - VISIBLE} more</Text>}
<Text color={t.color.dim}>/ select · Enter open · 1-9,0 quick · Esc cancel</Text>
{offset + VISIBLE < rows.length && <Text color={t.color.dim}> {rows.length - offset - VISIBLE} more</Text>}
<OverlayHint t={t}>/ select · Enter open · 1-9,0 quick · Esc/q cancel</OverlayHint>
</Box>
)
}
if (stage === 'skill') {
const { items, off } = visibleItems(skills, skillIdx)
const { items, offset } = windowItems(skills, skillIdx, VISIBLE)
return (
<Box flexDirection="column" width={width}>
@ -254,10 +247,10 @@ export function SkillsHub({ gw, onClose, t }: SkillsHubProps) {
<Text color={t.color.dim}>{skills.length} skill(s)</Text>
{!skills.length ? <Text color={t.color.dim}>no skills in this category</Text> : null}
{off > 0 && <Text color={t.color.dim}> {off} more</Text>}
{offset > 0 && <Text color={t.color.dim}> {offset} more</Text>}
{items.map((row, i) => {
const idx = off + i
const idx = offset + i
return (
<Text
@ -273,10 +266,12 @@ export function SkillsHub({ gw, onClose, t }: SkillsHubProps) {
)
})}
{off + VISIBLE < skills.length && <Text color={t.color.dim}> {skills.length - off - VISIBLE} more</Text>}
<Text color={t.color.dim}>
{skills.length ? '↑/↓ select · Enter open · 1-9,0 quick · Esc back' : 'Esc back'}
</Text>
{offset + VISIBLE < skills.length && (
<Text color={t.color.dim}> {skills.length - offset - VISIBLE} more</Text>
)}
<OverlayHint t={t}>
{skills.length ? '↑/↓ select · Enter open · 1-9,0 quick · Esc back · q close' : 'Esc back · q close'}
</OverlayHint>
</Box>
)
}
@ -294,7 +289,7 @@ export function SkillsHub({ gw, onClose, t }: SkillsHubProps) {
{err ? <Text color={t.color.label}>error: {err}</Text> : null}
{installing ? <Text color={t.color.amber}>installing</Text> : null}
<Text color={t.color.dim}>i reinspect · x reinstall · Enter/Esc back</Text>
<OverlayHint t={t}>i reinspect · x reinstall · Enter/Esc back · q close</OverlayHint>
</Box>
)
}

View file

@ -1,5 +1,5 @@
import { Box, NoSelect, Text } from '@hermes/ink'
import { memo, useEffect, useMemo, useState, type ReactNode } from 'react'
import { memo, type ReactNode, useEffect, useMemo, useState } from 'react'
import spinners, { type BrailleSpinnerName } from 'unicode-animations'
import { THINKING_COT_MAX } from '../config/limits.js'
@ -919,13 +919,22 @@ export const ToolTrail = memo(function ToolTrail({
// hidden sections stay hidden so the override is honoured.
const expandAll = () => {
if (visible.thinking !== 'hidden') setOpenThinking(true)
if (visible.tools !== 'hidden') setOpenTools(true)
if (visible.thinking !== 'hidden') {
setOpenThinking(true)
}
if (visible.tools !== 'hidden') {
setOpenTools(true)
}
if (visible.subagents !== 'hidden') {
setOpenSubagents(true)
setDeepSubagents(true)
}
if (visible.activity !== 'hidden') setOpenMeta(true)
if (visible.activity !== 'hidden') {
setOpenMeta(true)
}
}
const metaTone: 'dim' | 'error' | 'warn' = activity.some(i => i.tone === 'error')

View file

@ -43,7 +43,5 @@ export const isAction = (key: { ctrl: boolean; meta: boolean; super?: boolean },
* accept Cmd+B (the platform action modifier) so existing macOS muscle memory
* keeps working.
*/
export const isVoiceToggleKey = (
key: { ctrl: boolean; meta: boolean; super?: boolean },
ch: string
): boolean => (key.ctrl || isActionMod(key)) && ch.toLowerCase() === 'b'
export const isVoiceToggleKey = (key: { ctrl: boolean; meta: boolean; super?: boolean }, ch: string): boolean =>
(key.ctrl || isActionMod(key)) && ch.toLowerCase() === 'b'