Merge pull request #15755 from NousResearch/bb/tui-model-flag

fix(tui): honor launch model overrides
This commit is contained in:
brooklyn! 2026-04-25 14:30:26 -05:00 committed by GitHub
commit 283c8fd6e2
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
30 changed files with 626 additions and 235 deletions

View file

@ -103,7 +103,8 @@ COMMAND_REGISTRY: list[CommandDef] = [
# Configuration # Configuration
CommandDef("config", "Show current configuration", "Configuration", CommandDef("config", "Show current configuration", "Configuration",
cli_only=True), cli_only=True),
CommandDef("model", "Switch model for this session", "Configuration", args_hint="[model] [--provider name] [--global]"), CommandDef("model", "Switch model for this session", "Configuration",
aliases=("provider",), args_hint="[model] [--provider name] [--global]"),
CommandDef("gquota", "Show Google Gemini Code Assist quota usage", "Info", CommandDef("gquota", "Show Google Gemini Code Assist quota usage", "Info",
cli_only=True), cli_only=True),

View file

@ -1028,7 +1028,12 @@ def _make_tui_argv(tui_dir: Path, tui_dev: bool) -> tuple[list[str], Path]:
return [node, str(root / "dist" / "entry.js")], root return [node, str(root / "dist" / "entry.js")], root
def _launch_tui(resume_session_id: Optional[str] = None, tui_dev: bool = False): def _launch_tui(
resume_session_id: Optional[str] = None,
tui_dev: bool = False,
model: Optional[str] = None,
provider: Optional[str] = None,
):
"""Replace current process with the TUI.""" """Replace current process with the TUI."""
tui_dir = PROJECT_ROOT / "ui-tui" tui_dir = PROJECT_ROOT / "ui-tui"
@ -1038,6 +1043,12 @@ def _launch_tui(resume_session_id: Optional[str] = None, tui_dev: bool = False):
) )
env.setdefault("HERMES_PYTHON", sys.executable) env.setdefault("HERMES_PYTHON", sys.executable)
env.setdefault("HERMES_CWD", os.getcwd()) env.setdefault("HERMES_CWD", os.getcwd())
if model:
env["HERMES_MODEL"] = model
env["HERMES_INFERENCE_MODEL"] = model
if provider:
env["HERMES_TUI_PROVIDER"] = provider
env["HERMES_INFERENCE_PROVIDER"] = provider
# Guarantee an 8GB V8 heap + exposed GC for the TUI. Default node cap is # Guarantee an 8GB V8 heap + exposed GC for the TUI. Default node cap is
# ~1.54GB depending on version and can fatal-OOM on long sessions with # ~1.54GB depending on version and can fatal-OOM on long sessions with
# large transcripts / reasoning blobs. Token-level merge: respect any # large transcripts / reasoning blobs. Token-level merge: respect any
@ -1176,6 +1187,8 @@ def cmd_chat(args):
_launch_tui( _launch_tui(
getattr(args, "resume", None), getattr(args, "resume", None),
tui_dev=getattr(args, "tui_dev", False), tui_dev=getattr(args, "tui_dev", False),
model=getattr(args, "model", None),
provider=getattr(args, "provider", None),
) )
# Import and run the CLI # Import and run the CLI
@ -6913,7 +6926,7 @@ For more help on a command:
default=None, default=None,
help=( help=(
"Model override for this invocation (e.g. anthropic/claude-sonnet-4.6). " "Model override for this invocation (e.g. anthropic/claude-sonnet-4.6). "
"Applies to -z/--oneshot. Also settable via HERMES_INFERENCE_MODEL env var." "Applies to -z/--oneshot and --tui. Also settable via HERMES_INFERENCE_MODEL env var."
), ),
) )
parser.add_argument( parser.add_argument(
@ -6921,7 +6934,7 @@ For more help on a command:
default=None, default=None,
help=( help=(
"Provider override for this invocation (e.g. openrouter, anthropic). " "Provider override for this invocation (e.g. openrouter, anthropic). "
"Applies to -z/--oneshot. Also settable via HERMES_INFERENCE_PROVIDER env var." "Applies to -z/--oneshot and --tui. Also settable via HERMES_INFERENCE_PROVIDER env var."
), ),
) )
parser.add_argument( parser.add_argument(

View file

@ -1379,27 +1379,93 @@ def curated_models_for_provider(
return [(m, "") for m in models] return [(m, "") for m in models]
def detect_provider_for_model( def _provider_keys(provider: str) -> set[str]:
key = (provider or "").strip().lower()
normalized = normalize_provider(provider)
return {k for k in (key, normalized) if k}
def _model_in_provider_catalog(name_lower: str, providers: set[str]) -> bool:
return any(
name_lower == model.lower()
for provider in providers
for model in _PROVIDER_MODELS.get(provider, [])
)
_AGGREGATOR_PROVIDERS = frozenset(
{"nous", "openrouter", "ai-gateway", "copilot", "kilocode"}
)
def _resolve_static_model_alias(
name_lower: str,
current_keys: set[str],
) -> Optional[tuple[str, str]]:
"""Resolve short aliases (e.g. sonnet/opus) using static catalogs only."""
try:
from hermes_cli.model_switch import MODEL_ALIASES
except Exception:
return None
identity = MODEL_ALIASES.get(name_lower)
if identity is None:
return None
vendor = identity.vendor
family = identity.family
def _match(provider: str) -> Optional[str]:
models = _PROVIDER_MODELS.get(provider, [])
if not models:
return None
prefix = (
f"{vendor}/{family}"
if provider in _AGGREGATOR_PROVIDERS
else family
).lower()
for model in models:
if model.lower().startswith(prefix):
return model
return None
for provider in current_keys:
if matched := _match(provider):
return provider, matched
for provider in _PROVIDER_MODELS:
if provider in current_keys or provider in _AGGREGATOR_PROVIDERS:
continue
if matched := _match(provider):
return provider, matched
for provider in _AGGREGATOR_PROVIDERS:
if provider in current_keys and (matched := _match(provider)):
return provider, matched
return None
def detect_static_provider_for_model(
model_name: str, model_name: str,
current_provider: str, current_provider: str,
) -> Optional[tuple[str, str]]: ) -> Optional[tuple[str, str]]:
"""Auto-detect the best provider for a model name. """Auto-detect a provider from static catalogs only.
Returns ``(provider_id, model_name)`` the model name may be remapped Returns ``(provider_id, model_name)``. The model name may be remapped
(e.g. bare ``deepseek-chat`` ``deepseek/deepseek-chat`` for OpenRouter). when a static alias or bare provider name resolves to a catalog default.
Returns ``None`` when no confident match is found. Returns ``None`` when no confident match is found.
Priority:
0. Bare provider name switch to that provider's default model
1. Direct provider with credentials (highest)
2. Direct provider without credentials remap to OpenRouter slug
3. OpenRouter catalog match
""" """
name = (model_name or "").strip() name = (model_name or "").strip()
if not name: if not name:
return None return None
name_lower = name.lower() name_lower = name.lower()
current_keys = _provider_keys(current_provider)
alias_match = _resolve_static_model_alias(name_lower, current_keys)
if alias_match:
return alias_match
# --- Step 0: bare provider name typed as model --- # --- Step 0: bare provider name typed as model ---
# If someone types `/model nous` or `/model anthropic`, treat it as a # If someone types `/model nous` or `/model anthropic`, treat it as a
@ -1412,64 +1478,49 @@ def detect_provider_for_model(
if ( if (
resolved_provider in _PROVIDER_LABELS resolved_provider in _PROVIDER_LABELS
and default_models and default_models
and resolved_provider != normalize_provider(current_provider) and resolved_provider not in current_keys
): ):
return (resolved_provider, default_models[0]) return (resolved_provider, default_models[0])
# Aggregators list other providers' models — never auto-switch TO them # Aggregators list other providers' models — never auto-switch TO them
_AGGREGATORS = {"nous", "openrouter", "ai-gateway", "copilot", "kilocode"}
# If the model belongs to the current provider's catalog, don't suggest switching # If the model belongs to the current provider's catalog, don't suggest switching
current_models = _PROVIDER_MODELS.get(current_provider, []) if _model_in_provider_catalog(name_lower, current_keys):
if any(name_lower == m.lower() for m in current_models):
return None return None
# --- Step 1: check static provider catalogs for a direct match --- # --- Step 1: check static provider catalogs for a direct match ---
direct_match: Optional[str] = None
for pid, models in _PROVIDER_MODELS.items(): for pid, models in _PROVIDER_MODELS.items():
if pid == current_provider or pid in _AGGREGATORS: if pid in current_keys or pid in _AGGREGATOR_PROVIDERS:
continue continue
if any(name_lower == m.lower() for m in models): if any(name_lower == m.lower() for m in models):
direct_match = pid return (pid, name)
break
if direct_match: return None
# Check if we have credentials for this provider — env vars,
# credential pool, or auth store entries.
has_creds = False
try:
from hermes_cli.auth import PROVIDER_REGISTRY
pconfig = PROVIDER_REGISTRY.get(direct_match)
if pconfig:
for env_var in pconfig.api_key_env_vars:
if os.getenv(env_var, "").strip():
has_creds = True
break
except Exception:
pass
# Also check credential pool and auth store — covers OAuth,
# Claude Code tokens, and other non-env-var credentials (#10300).
if not has_creds:
try:
from agent.credential_pool import load_pool
pool = load_pool(direct_match)
if pool.has_credentials():
has_creds = True
except Exception:
pass
if not has_creds:
try:
from hermes_cli.auth import _load_auth_store
store = _load_auth_store()
if direct_match in store.get("providers", {}) or direct_match in store.get("credential_pool", {}):
has_creds = True
except Exception:
pass
# Always return the direct provider match. If credentials are
# missing, the client init will give a clear error rather than def detect_provider_for_model(
# silently routing through the wrong provider (#10300). model_name: str,
return (direct_match, name) current_provider: str,
) -> Optional[tuple[str, str]]:
"""Auto-detect the best provider for a model name.
Returns ``(provider_id, model_name)`` the model name may be remapped
(e.g. bare ``deepseek-chat`` ``deepseek/deepseek-chat`` for OpenRouter).
Returns ``None`` when no confident match is found.
Priority:
0. Bare provider name switch to that provider's default model
1. Direct provider static catalog match
2. OpenRouter catalog match
"""
name = (model_name or "").strip()
if not name:
return None
static_match = detect_static_provider_for_model(name, current_provider)
if static_match:
return static_match
if _model_in_provider_catalog(name.lower(), _provider_keys(current_provider)):
return None
# --- Step 2: check OpenRouter catalog --- # --- Step 2: check OpenRouter catalog ---
# First try exact match (handles provider/model format) # First try exact match (handles provider/model format)

View file

@ -256,6 +256,17 @@ class TestDetectProviderForModel:
"""Models belonging to the current provider should not trigger a switch.""" """Models belonging to the current provider should not trigger a switch."""
assert detect_provider_for_model("gpt-5.3-codex", "openai-codex") is None assert detect_provider_for_model("gpt-5.3-codex", "openai-codex") is None
def test_short_alias_resolves_to_static_model(self):
"""Short aliases (e.g. sonnet) should resolve without network lookups."""
with patch(
"hermes_cli.models.fetch_openrouter_models",
side_effect=AssertionError("network lookup should not run"),
):
result = detect_provider_for_model("sonnet", "auto")
assert result is not None
assert result[0] == "anthropic"
assert result[1].startswith("claude-sonnet")
def test_openrouter_slug_match(self): def test_openrouter_slug_match(self):
"""Models in the OpenRouter catalog should be found.""" """Models in the OpenRouter catalog should be found."""
with patch("hermes_cli.models.fetch_openrouter_models", return_value=LIVE_OPENROUTER_MODELS): with patch("hermes_cli.models.fetch_openrouter_models", return_value=LIVE_OPENROUTER_MODELS):

View file

@ -1,4 +1,5 @@
from argparse import Namespace from argparse import Namespace
from pathlib import Path
import sys import sys
import types import types
@ -8,8 +9,11 @@ import pytest
def _args(**overrides): def _args(**overrides):
base = { base = {
"continue_last": None, "continue_last": None,
"model": None,
"provider": None,
"resume": None, "resume": None,
"tui": True, "tui": True,
"tui_dev": False,
} }
base.update(overrides) base.update(overrides)
return Namespace(**base) return Namespace(**base)
@ -31,7 +35,7 @@ def test_cmd_chat_tui_continue_uses_latest_tui_session(monkeypatch, main_mod):
calls.append(source) calls.append(source)
return "20260408_235959_a1b2c3" if source == "tui" else None return "20260408_235959_a1b2c3" if source == "tui" else None
def fake_launch(resume_session_id=None, tui_dev=False): def fake_launch(resume_session_id=None, tui_dev=False, model=None, provider=None):
captured["resume"] = resume_session_id captured["resume"] = resume_session_id
raise SystemExit(0) raise SystemExit(0)
@ -58,7 +62,7 @@ def test_cmd_chat_tui_continue_falls_back_to_latest_cli_session(monkeypatch, mai
return "20260408_235959_d4e5f6" return "20260408_235959_d4e5f6"
return None return None
def fake_launch(resume_session_id=None, tui_dev=False): def fake_launch(resume_session_id=None, tui_dev=False, model=None, provider=None):
captured["resume"] = resume_session_id captured["resume"] = resume_session_id
raise SystemExit(0) raise SystemExit(0)
@ -76,7 +80,7 @@ def test_cmd_chat_tui_continue_falls_back_to_latest_cli_session(monkeypatch, mai
def test_cmd_chat_tui_resume_resolves_title_before_launch(monkeypatch, main_mod): def test_cmd_chat_tui_resume_resolves_title_before_launch(monkeypatch, main_mod):
captured = {} captured = {}
def fake_launch(resume_session_id=None, tui_dev=False): def fake_launch(resume_session_id=None, tui_dev=False, model=None, provider=None):
captured["resume"] = resume_session_id captured["resume"] = resume_session_id
raise SystemExit(0) raise SystemExit(0)
@ -89,6 +93,60 @@ def test_cmd_chat_tui_resume_resolves_title_before_launch(monkeypatch, main_mod)
assert captured["resume"] == "20260409_000000_aa11bb" assert captured["resume"] == "20260409_000000_aa11bb"
def test_cmd_chat_tui_passes_model_and_provider(monkeypatch, main_mod):
captured = {}
def fake_launch(resume_session_id=None, tui_dev=False, model=None, provider=None):
captured.update(
{
"model": model,
"provider": provider,
"resume": resume_session_id,
"tui_dev": tui_dev,
}
)
raise SystemExit(0)
monkeypatch.setattr(main_mod, "_launch_tui", fake_launch)
with pytest.raises(SystemExit):
main_mod.cmd_chat(
_args(model="anthropic/claude-sonnet-4.6", provider="anthropic")
)
assert captured == {
"model": "anthropic/claude-sonnet-4.6",
"provider": "anthropic",
"resume": None,
"tui_dev": False,
}
def test_launch_tui_exports_model_and_provider(monkeypatch, main_mod):
captured = {}
monkeypatch.setattr(
main_mod,
"_make_tui_argv",
lambda tui_dir, tui_dev: (["node", "dist/entry.js"], Path(".")),
)
def fake_call(argv, cwd=None, env=None):
captured.update({"argv": argv, "cwd": cwd, "env": env})
return 1
monkeypatch.setattr(main_mod.subprocess, "call", fake_call)
with pytest.raises(SystemExit):
main_mod._launch_tui(model="nous/hermes-test", provider="nous")
env = captured["env"]
assert env["HERMES_MODEL"] == "nous/hermes-test"
assert env["HERMES_INFERENCE_MODEL"] == "nous/hermes-test"
assert env["HERMES_TUI_PROVIDER"] == "nous"
assert env["HERMES_INFERENCE_PROVIDER"] == "nous"
def test_print_tui_exit_summary_includes_resume_and_token_totals(monkeypatch, capsys): def test_print_tui_exit_summary_includes_resume_and_token_totals(monkeypatch, capsys):
import hermes_cli.main as main_mod import hermes_cli.main as main_mod

View file

@ -83,6 +83,100 @@ def test_status_callback_accepts_single_message_argument():
) )
def test_resolve_model_uses_inference_model_env(monkeypatch):
monkeypatch.delenv("HERMES_MODEL", raising=False)
monkeypatch.setenv("HERMES_INFERENCE_MODEL", " anthropic/claude-sonnet-4.6\n")
assert server._resolve_model() == "anthropic/claude-sonnet-4.6"
def test_resolve_model_strips_config_model(monkeypatch):
monkeypatch.delenv("HERMES_MODEL", raising=False)
monkeypatch.delenv("HERMES_INFERENCE_MODEL", raising=False)
monkeypatch.setattr(
server, "_load_cfg", lambda: {"model": {"default": " nous/hermes-test "}}
)
assert server._resolve_model() == "nous/hermes-test"
def test_startup_runtime_uses_tui_provider_env(monkeypatch):
monkeypatch.setenv("HERMES_MODEL", "nous/hermes-test")
monkeypatch.setenv("HERMES_TUI_PROVIDER", "nous")
monkeypatch.delenv("HERMES_INFERENCE_PROVIDER", raising=False)
assert server._resolve_startup_runtime() == ("nous/hermes-test", "nous")
def test_startup_runtime_does_not_treat_inference_provider_as_explicit(monkeypatch):
monkeypatch.setenv("HERMES_MODEL", "nous/hermes-test")
monkeypatch.delenv("HERMES_TUI_PROVIDER", raising=False)
monkeypatch.setenv("HERMES_INFERENCE_PROVIDER", "nous")
monkeypatch.setattr(
"hermes_cli.models.detect_static_provider_for_model",
lambda model, provider: None,
)
assert server._resolve_startup_runtime() == ("nous/hermes-test", None)
def test_startup_runtime_detects_provider_for_model_env(monkeypatch):
monkeypatch.setenv("HERMES_MODEL", "sonnet")
monkeypatch.delenv("HERMES_TUI_PROVIDER", raising=False)
monkeypatch.delenv("HERMES_INFERENCE_PROVIDER", raising=False)
monkeypatch.setattr(server, "_load_cfg", lambda: {"model": {"provider": "auto"}})
def fake_detect(model, current_provider):
assert model == "sonnet"
assert current_provider == "auto"
return "anthropic", "anthropic/claude-sonnet-4.6"
monkeypatch.setattr(
"hermes_cli.models.detect_static_provider_for_model", fake_detect
)
assert server._resolve_startup_runtime() == (
"anthropic/claude-sonnet-4.6",
"anthropic",
)
def test_startup_runtime_resolves_short_alias_without_network(monkeypatch):
monkeypatch.setenv("HERMES_MODEL", "sonnet")
monkeypatch.delenv("HERMES_TUI_PROVIDER", raising=False)
monkeypatch.delenv("HERMES_INFERENCE_PROVIDER", raising=False)
monkeypatch.setattr(server, "_load_cfg", lambda: {"model": {"provider": "auto"}})
monkeypatch.setattr(
"hermes_cli.models.fetch_openrouter_models",
lambda *_args, **_kwargs: (_ for _ in ()).throw(
AssertionError("network lookup should not run")
),
)
model, provider = server._resolve_startup_runtime()
assert provider == "anthropic"
assert model.startswith("claude-sonnet")
def test_startup_runtime_does_not_call_network_detector(monkeypatch):
monkeypatch.setenv("HERMES_MODEL", "sonnet")
monkeypatch.delenv("HERMES_TUI_PROVIDER", raising=False)
monkeypatch.delenv("HERMES_INFERENCE_PROVIDER", raising=False)
monkeypatch.setattr(server, "_load_cfg", lambda: {"model": {"provider": "auto"}})
monkeypatch.setattr(
"hermes_cli.models.detect_provider_for_model",
lambda *_args, **_kwargs: (_ for _ in ()).throw(
AssertionError("network detector called")
),
)
model, provider = server._resolve_startup_runtime()
assert model
assert provider in {None, "anthropic"}
def _session(agent=None, **extra): def _session(agent=None, **extra):
return { return {
"agent": agent if agent is not None else types.SimpleNamespace(), "agent": agent if agent is not None else types.SimpleNamespace(),
@ -245,6 +339,14 @@ def test_setup_status_reports_provider_config(monkeypatch):
assert resp["result"]["provider_configured"] is False assert resp["result"]["provider_configured"] is False
def test_complete_slash_includes_provider_alias():
resp = server.handle_request(
{"id": "1", "method": "complete.slash", "params": {"text": "/pro"}}
)
assert any(item["text"] == "provider" for item in resp["result"]["items"])
def test_config_set_reasoning_updates_live_session_and_agent(tmp_path, monkeypatch): def test_config_set_reasoning_updates_live_session_and_agent(tmp_path, monkeypatch):
monkeypatch.setattr(server, "_hermes_home", tmp_path) monkeypatch.setattr(server, "_hermes_home", tmp_path)
agent = types.SimpleNamespace(reasoning_config=None) agent = types.SimpleNamespace(reasoning_config=None)
@ -415,6 +517,57 @@ def test_config_set_model_syncs_inference_provider_env(monkeypatch):
assert os.environ["HERMES_INFERENCE_PROVIDER"] == "anthropic" assert os.environ["HERMES_INFERENCE_PROVIDER"] == "anthropic"
def test_config_set_model_syncs_tui_provider_env(monkeypatch):
class Agent:
model = "gpt-5.3-codex"
provider = "openai-codex"
base_url = ""
api_key = ""
def switch_model(self, **kwargs):
self.model = kwargs["new_model"]
self.provider = kwargs["new_provider"]
agent = Agent()
server._sessions["sid"] = _session(agent=agent)
monkeypatch.setenv("HERMES_TUI_PROVIDER", "openai-codex")
monkeypatch.setattr(server, "_restart_slash_worker", lambda session: None)
monkeypatch.setattr(server, "_emit", lambda *args, **kwargs: None)
def fake_switch_model(**kwargs):
return types.SimpleNamespace(
success=True,
new_model="anthropic/claude-sonnet-4.6",
target_provider="anthropic",
api_key="key",
base_url="https://api.anthropic.com",
api_mode="anthropic_messages",
warning_message="",
)
monkeypatch.setattr("hermes_cli.model_switch.switch_model", fake_switch_model)
try:
resp = server.handle_request(
{
"id": "1",
"method": "config.set",
"params": {
"session_id": "sid",
"key": "model",
"value": "anthropic/claude-sonnet-4.6 --provider anthropic",
},
}
)
assert resp["result"]["value"] == "anthropic/claude-sonnet-4.6"
assert os.environ["HERMES_TUI_PROVIDER"] == "anthropic"
assert os.environ["HERMES_MODEL"] == "anthropic/claude-sonnet-4.6"
assert os.environ["HERMES_INFERENCE_MODEL"] == "anthropic/claude-sonnet-4.6"
finally:
server._sessions.clear()
def test_config_set_personality_rejects_unknown_name(monkeypatch): def test_config_set_personality_rejects_unknown_name(monkeypatch):
monkeypatch.setattr( monkeypatch.setattr(
server, server,

View file

@ -560,17 +560,55 @@ def resolve_skin() -> dict:
def _resolve_model() -> str: def _resolve_model() -> str:
env = os.environ.get("HERMES_MODEL", "") env = (
os.environ.get("HERMES_MODEL", "")
or os.environ.get("HERMES_INFERENCE_MODEL", "")
).strip()
if env: if env:
return env return env
m = _load_cfg().get("model", "") m = _load_cfg().get("model", "")
if isinstance(m, dict): if isinstance(m, dict):
return m.get("default", "") return str(m.get("default", "") or "").strip()
if isinstance(m, str) and m: if isinstance(m, str) and m:
return m return m.strip()
return "anthropic/claude-sonnet-4" return "anthropic/claude-sonnet-4"
def _resolve_startup_runtime() -> tuple[str, str | None]:
model = _resolve_model()
explicit_provider = os.environ.get("HERMES_TUI_PROVIDER", "").strip()
if explicit_provider:
return model, explicit_provider
explicit_model = (
os.environ.get("HERMES_MODEL", "")
or os.environ.get("HERMES_INFERENCE_MODEL", "")
).strip()
if not explicit_model:
return model, None
try:
from hermes_cli.models import detect_static_provider_for_model
cfg = _load_cfg().get("model") or {}
current_provider = (
(
str(cfg.get("provider") or "").strip().lower()
if isinstance(cfg, dict)
else ""
)
or os.environ.get("HERMES_INFERENCE_PROVIDER", "").strip().lower()
or "auto"
)
detected = detect_static_provider_for_model(explicit_model, current_provider)
if detected:
provider, detected_model = detected
return detected_model, provider
except Exception:
pass
return model, None
def _write_config_key(key_path: str, value): def _write_config_key(key_path: str, value):
cfg = _load_cfg() cfg = _load_cfg()
current = cfg current = cfg
@ -736,12 +774,15 @@ def _apply_model_switch(sid: str, session: dict, raw_input: str) -> dict:
_emit("session.info", sid, _session_info(agent)) _emit("session.info", sid, _session_info(agent))
os.environ["HERMES_MODEL"] = result.new_model os.environ["HERMES_MODEL"] = result.new_model
os.environ["HERMES_INFERENCE_MODEL"] = result.new_model
# Keep the process-level provider env var in sync with the user's explicit # Keep the process-level provider env var in sync with the user's explicit
# choice so any ambient re-resolution (credential pool refresh, compressor # choice so any ambient re-resolution (credential pool refresh, compressor
# rebuild, aux clients) resolves to the new provider instead of the # rebuild, aux clients) resolves to the new provider instead of the
# original one persisted in config or env. # original one persisted in config or env.
if result.target_provider: if result.target_provider:
os.environ["HERMES_INFERENCE_PROVIDER"] = result.target_provider os.environ["HERMES_INFERENCE_PROVIDER"] = result.target_provider
if os.environ.get("HERMES_TUI_PROVIDER"):
os.environ["HERMES_TUI_PROVIDER"] = result.target_provider
if persist_global: if persist_global:
_persist_model_switch(result) _persist_model_switch(result)
return {"value": result.new_model, "warning": result.warning_message or ""} return {"value": result.new_model, "warning": result.warning_message or ""}
@ -1277,9 +1318,13 @@ def _make_agent(sid: str, key: str, session_id: str | None = None):
cfg = _load_cfg() cfg = _load_cfg()
system_prompt = ((cfg.get("agent") or {}).get("system_prompt", "") or "").strip() system_prompt = ((cfg.get("agent") or {}).get("system_prompt", "") or "").strip()
runtime = resolve_runtime_provider(requested=None) model, requested_provider = _resolve_startup_runtime()
runtime = resolve_runtime_provider(
requested=requested_provider,
target_model=model or None,
)
return AIAgent( return AIAgent(
model=_resolve_model(), model=model,
provider=runtime.get("provider"), provider=runtime.get("provider"),
base_url=runtime.get("base_url"), base_url=runtime.get("base_url"),
api_key=runtime.get("api_key"), api_key=runtime.get("api_key"),

View file

@ -53,7 +53,11 @@ export function AlternateScreen(t0: Props) {
} }
writeRaw( writeRaw(
ENTER_ALT_SCREEN + ERASE_SCROLLBACK + ERASE_SCREEN + CURSOR_HOME + (mouseTracking ? ENABLE_MOUSE_TRACKING : DISABLE_MOUSE_TRACKING) ENTER_ALT_SCREEN +
ERASE_SCROLLBACK +
ERASE_SCREEN +
CURSOR_HOME +
(mouseTracking ? ENABLE_MOUSE_TRACKING : DISABLE_MOUSE_TRACKING)
) )
ink?.setAltScreenActive(true, mouseTracking) ink?.setAltScreenActive(true, mouseTracking)

View file

@ -323,27 +323,39 @@ const measureTextNode = function (
widthMode: LayoutMeasureMode widthMode: LayoutMeasureMode
): { width: number; height: number } { ): { width: number; height: number } {
const elem = node.nodeName !== '#text' ? (node as DOMElement) : node.parentNode const elem = node.nodeName !== '#text' ? (node as DOMElement) : node.parentNode
if (elem && elem.nodeName === 'ink-text') { if (elem && elem.nodeName === 'ink-text') {
let cache = elem._textMeasureCache let cache = elem._textMeasureCache
if (!cache) { if (!cache) {
cache = { gen: 0, entries: new Map() } cache = { gen: 0, entries: new Map() }
elem._textMeasureCache = cache elem._textMeasureCache = cache
} }
const key = `${width}|${widthMode}` const key = `${width}|${widthMode}`
const hit = cache.entries.get(key) const hit = cache.entries.get(key)
if (hit && hit._gen === cache.gen) { if (hit && hit._gen === cache.gen) {
return hit.result return hit.result
} }
const result = computeTextMeasure(node, width, widthMode) const result = computeTextMeasure(node, width, widthMode)
// Enforce cap with FIFO eviction to avoid unbounded growth during // Enforce cap with FIFO eviction to avoid unbounded growth during
// pathological frames where yoga probes many widths. // pathological frames where yoga probes many widths.
if (cache.entries.size >= MEASURE_CACHE_CAP) { if (cache.entries.size >= MEASURE_CACHE_CAP) {
const firstKey = cache.entries.keys().next().value const firstKey = cache.entries.keys().next().value
cache.entries.delete(firstKey)
if (firstKey !== undefined) {
cache.entries.delete(firstKey)
}
} }
cache.entries.set(key, { _gen: cache.gen, result }) cache.entries.set(key, { _gen: cache.gen, result })
return result return result
} }
return computeTextMeasure(node, width, widthMode) return computeTextMeasure(node, width, widthMode)
} }
@ -475,6 +487,7 @@ export const clearYogaNodeReferences = (node: DOMElement | TextNode): void => {
for (const child of node.childNodes) { for (const child of node.childNodes) {
clearYogaNodeReferences(child) clearYogaNodeReferences(child)
} }
node._textMeasureCache = undefined node._textMeasureCache = undefined
} }

View file

@ -9,18 +9,21 @@ describe('shouldEmitClipboardSequence', () => {
}) })
it('keeps OSC enabled for remote or plain local terminals', () => { it('keeps OSC enabled for remote or plain local terminals', () => {
expect(shouldEmitClipboardSequence({ SSH_CONNECTION: '1', TMUX: '/tmp/tmux-1/default,1,0' } as NodeJS.ProcessEnv)).toBe( expect(
true shouldEmitClipboardSequence({ SSH_CONNECTION: '1', TMUX: '/tmp/tmux-1/default,1,0' } as NodeJS.ProcessEnv)
) ).toBe(true)
expect(shouldEmitClipboardSequence({ TERM: 'xterm-256color' } as NodeJS.ProcessEnv)).toBe(true) expect(shouldEmitClipboardSequence({ TERM: 'xterm-256color' } as NodeJS.ProcessEnv)).toBe(true)
}) })
it('honors explicit env override', () => { it('honors explicit env override', () => {
expect(shouldEmitClipboardSequence({ HERMES_TUI_CLIPBOARD_OSC52: '1', TMUX: '/tmp/tmux-1/default,1,0' } as NodeJS.ProcessEnv)).toBe( expect(
true shouldEmitClipboardSequence({
) HERMES_TUI_CLIPBOARD_OSC52: '1',
expect(shouldEmitClipboardSequence({ HERMES_TUI_COPY_OSC52: '0', TERM: 'xterm-256color' } as NodeJS.ProcessEnv)).toBe( TMUX: '/tmp/tmux-1/default,1,0'
false } as NodeJS.ProcessEnv)
) ).toBe(true)
expect(
shouldEmitClipboardSequence({ HERMES_TUI_COPY_OSC52: '0', TERM: 'xterm-256color' } as NodeJS.ProcessEnv)
).toBe(false)
}) })
}) })

View file

@ -226,7 +226,10 @@ describe('createGatewayEventHandler', () => {
const inlineDiff = '--- a/foo.ts\n+++ b/foo.ts\n@@\n-old\n+new' const inlineDiff = '--- a/foo.ts\n+++ b/foo.ts\n@@\n-old\n+new'
const assistantText = 'Done. Clean swap:\n\n```diff\n-old\n+new\n```' const assistantText = 'Done. Clean swap:\n\n```diff\n-old\n+new\n```'
onEvent({ payload: { inline_diff: inlineDiff, summary: 'patched', tool_id: 'tool-1' }, type: 'tool.complete' } as any) onEvent({
payload: { inline_diff: inlineDiff, summary: 'patched', tool_id: 'tool-1' },
type: 'tool.complete'
} as any)
onEvent({ payload: { text: assistantText }, type: 'message.complete' } as any) onEvent({ payload: { text: assistantText }, type: 'message.complete' } as any)
expect(appended).toHaveLength(1) expect(appended).toHaveLength(1)

View file

@ -17,6 +17,14 @@ describe('createSlashHandler', () => {
expect(getOverlayState().picker).toBe(true) expect(getOverlayState().picker).toBe(true)
}) })
it('treats /provider as a local /model alias', () => {
const ctx = buildCtx()
expect(createSlashHandler(ctx)('/provider')).toBe(true)
expect(getOverlayState().modelPicker).toBe(true)
expect(ctx.gateway.gw.request).not.toHaveBeenCalled()
})
it('opens the skills hub locally for bare /skills', () => { it('opens the skills hub locally for bare /skills', () => {
const ctx = buildCtx() const ctx = buildCtx()
@ -118,9 +126,7 @@ describe('createSlashHandler', () => {
const ctx = buildCtx() const ctx = buildCtx()
createSlashHandler(ctx)('/details tools blink') createSlashHandler(ctx)('/details tools blink')
expect(getUiState().sections.tools).toBeUndefined() expect(getUiState().sections.tools).toBeUndefined()
expect(ctx.transcript.sys).toHaveBeenCalledWith( expect(ctx.transcript.sys).toHaveBeenCalledWith('usage: /details <section> [hidden|collapsed|expanded|reset]')
'usage: /details <section> [hidden|collapsed|expanded|reset]'
)
}) })
it('shows tool enable usage when names are missing', () => { it('shows tool enable usage when names are missing', () => {

View file

@ -1,6 +1,6 @@
import { describe, expect, it } from 'vitest' import { describe, expect, it } from 'vitest'
import { isSectionName, parseDetailsMode, resolveSections, sectionMode, SECTION_NAMES } from '../domain/details.js' import { isSectionName, parseDetailsMode, resolveSections, SECTION_NAMES, sectionMode } from '../domain/details.js'
describe('parseDetailsMode', () => { describe('parseDetailsMode', () => {
it('accepts the canonical modes case-insensitively', () => { it('accepts the canonical modes case-insensitively', () => {

View file

@ -1,8 +1,8 @@
import { useStore } from '@nanostores/react' import { useStore } from '@nanostores/react'
import { GatewayProvider } from './app/gatewayContext.js' import { GatewayProvider } from './app/gatewayContext.js'
import { useMainApp } from './app/useMainApp.js'
import { $uiState } from './app/uiStore.js' import { $uiState } from './app/uiStore.js'
import { useMainApp } from './app/useMainApp.js'
import { AppLayout } from './components/appLayout.js' import { AppLayout } from './components/appLayout.js'
import type { GatewayClient } from './gatewayClient.js' import type { GatewayClient } from './gatewayClient.js'

View file

@ -1,7 +1,7 @@
import { NO_CONFIRM_DESTRUCTIVE } from '../../../config/env.js' import { NO_CONFIRM_DESTRUCTIVE } from '../../../config/env.js'
import { dailyFortune, randomFortune } from '../../../content/fortunes.js' import { dailyFortune, randomFortune } from '../../../content/fortunes.js'
import { HOTKEYS } from '../../../content/hotkeys.js' import { HOTKEYS } from '../../../content/hotkeys.js'
import { SECTION_NAMES, isSectionName, nextDetailsMode, parseDetailsMode } from '../../../domain/details.js' import { isSectionName, nextDetailsMode, parseDetailsMode, SECTION_NAMES } from '../../../domain/details.js'
import type { import type {
ConfigGetValueResponse, ConfigGetValueResponse,
ConfigSetResponse, ConfigSetResponse,
@ -40,8 +40,10 @@ const flagFromArg = (arg: string, current: boolean): boolean | null => {
const RESET_WORDS = new Set(['reset', 'clear', 'default']) const RESET_WORDS = new Set(['reset', 'clear', 'default'])
const CYCLE_WORDS = new Set(['cycle', 'toggle']) const CYCLE_WORDS = new Set(['cycle', 'toggle'])
const DETAILS_USAGE = const DETAILS_USAGE =
'usage: /details [hidden|collapsed|expanded|cycle] or /details <section> [hidden|collapsed|expanded|reset]' 'usage: /details [hidden|collapsed|expanded|cycle] or /details <section> [hidden|collapsed|expanded|reset]'
const DETAILS_SECTION_USAGE = 'usage: /details <section> [hidden|collapsed|expanded|reset]' const DETAILS_SECTION_USAGE = 'usage: /details <section> [hidden|collapsed|expanded|reset]'
export const coreCommands: SlashCommand[] = [ export const coreCommands: SlashCommand[] = [
@ -97,9 +99,7 @@ export const coreCommands: SlashCommand[] = [
} }
patchUiState({ mouseTracking: next }) patchUiState({ mouseTracking: next })
ctx.gateway ctx.gateway.rpc<ConfigSetResponse>('config.set', { key: 'mouse', value: next ? 'on' : 'off' }).catch(() => {})
.rpc<ConfigSetResponse>('config.set', { key: 'mouse', value: next ? 'on' : 'off' })
.catch(() => {})
queueMicrotask(() => ctx.transcript.sys(`mouse tracking ${next ? 'on' : 'off'}`)) queueMicrotask(() => ctx.transcript.sys(`mouse tracking ${next ? 'on' : 'off'}`))
} }
@ -178,7 +178,9 @@ export const coreCommands: SlashCommand[] = [
gateway gateway
.rpc<ConfigGetValueResponse>('config.get', { key: 'details_mode' }) .rpc<ConfigGetValueResponse>('config.get', { key: 'details_mode' })
.then(r => { .then(r => {
if (ctx.stale()) return if (ctx.stale()) {
return
}
const mode = parseDetailsMode(r?.value) ?? ui.detailsMode const mode = parseDetailsMode(r?.value) ?? ui.detailsMode
patchUiState({ detailsMode: mode }) patchUiState({ detailsMode: mode })

View file

@ -58,6 +58,7 @@ export const sessionCommands: SlashCommand[] = [
{ {
help: 'change or show model', help: 'change or show model',
aliases: ['provider'],
name: 'model', name: 'model',
run: (arg, ctx) => { run: (arg, ctx) => {
if (ctx.session.guardBusySessionSwitch('change models')) { if (ctx.session.guardBusySessionSwitch('change models')) {

View file

@ -5,18 +5,6 @@ import { runExternalSetup } from '../../setupHandoff.js'
import type { SlashCommand } from '../types.js' import type { SlashCommand } from '../types.js'
export const setupCommands: SlashCommand[] = [ export const setupCommands: SlashCommand[] = [
{
help: 'configure LLM provider + model (launches `hermes model`)',
name: 'provider',
run: (_arg, ctx) =>
void runExternalSetup({
args: ['model'],
ctx,
done: 'provider updated — starting session…',
launcher: launchHermesCommand,
suspend: withInkSuspended
})
},
{ {
help: 'run full setup wizard (launches `hermes setup`)', help: 'run full setup wizard (launches `hermes setup`)',
name: 'setup', name: 'setup',

View file

@ -300,6 +300,7 @@ class TurnController {
const hasDiffSegment = segments.some(msg => msg.kind === 'diff') const hasDiffSegment = segments.some(msg => msg.kind === 'diff')
const detailsBelongBeforeDiff = hasDiffSegment && (tools.length > 0 || Boolean(savedReasoning)) const detailsBelongBeforeDiff = hasDiffSegment && (tools.length > 0 || Boolean(savedReasoning))
const finalMessages = detailsBelongBeforeDiff const finalMessages = detailsBelongBeforeDiff
? insertBeforeFirstDiff(segments, { ? insertBeforeFirstDiff(segments, {
kind: 'trail', kind: 'trail',

View file

@ -1,8 +1,8 @@
import { atom } from 'nanostores' import { atom } from 'nanostores'
import { MOUSE_TRACKING } from '../config/env.js'
import { ZERO } from '../domain/usage.js' import { ZERO } from '../domain/usage.js'
import { DEFAULT_THEME } from '../theme.js' import { DEFAULT_THEME } from '../theme.js'
import { MOUSE_TRACKING } from '../config/env.js'
import type { UiState } from './interfaces.js' import type { UiState } from './interfaces.js'

View file

@ -159,16 +159,14 @@ export function useInputHandlers(ctx: InputHandlerContext): InputHandlerResult {
voice.setProcessing(false) voice.setProcessing(false)
} }
gateway gateway.rpc<VoiceRecordResponse>('voice.record', { action }).catch((e: Error) => {
.rpc<VoiceRecordResponse>('voice.record', { action }) // Revert optimistic UI on failure.
.catch((e: Error) => { if (starting) {
// Revert optimistic UI on failure. voice.setRecording(false)
if (starting) { }
voice.setRecording(false)
}
actions.sys(`voice error: ${e.message}`) actions.sys(`voice error: ${e.message}`)
}) })
} }
useInput((ch, key) => { useInput((ch, key) => {

View file

@ -640,14 +640,14 @@ export function useMainApp(gw: GatewayClient) {
const showProgressArea = anyPanelVisible const showProgressArea = anyPanelVisible
? Boolean( ? Boolean(
ui.busy || ui.busy ||
turn.outcome || turn.outcome ||
turn.streamPendingTools.length || turn.streamPendingTools.length ||
turn.streamSegments.length || turn.streamSegments.length ||
turn.subagents.length || turn.subagents.length ||
turn.tools.length || turn.tools.length ||
turn.turnTrail.length || turn.turnTrail.length ||
hasReasoning || hasReasoning ||
turn.activity.length turn.activity.length
) )
: turn.activity.some(item => item.tone !== 'info') : turn.activity.some(item => item.tone !== 'info')

View file

@ -218,11 +218,7 @@ export function StatusRule({
{voiceLabel ? ( {voiceLabel ? (
<Text <Text
color={ color={
voiceLabel.startsWith('●') voiceLabel.startsWith('●') ? t.color.error : voiceLabel.startsWith('◉') ? t.color.warn : t.color.dim
? t.color.error
: voiceLabel.startsWith('◉')
? t.color.warn
: t.color.dim
} }
> >
{' │ '} {' │ '}

View file

@ -9,6 +9,7 @@ import { $uiState } from '../app/uiStore.js'
import { FloatBox } from './appChrome.js' import { FloatBox } from './appChrome.js'
import { MaskedPrompt } from './maskedPrompt.js' import { MaskedPrompt } from './maskedPrompt.js'
import { ModelPicker } from './modelPicker.js' import { ModelPicker } from './modelPicker.js'
import { OverlayHint } from './overlayControls.js'
import { ApprovalPrompt, ClarifyPrompt, ConfirmPrompt } from './prompts.js' import { ApprovalPrompt, ClarifyPrompt, ConfirmPrompt } from './prompts.js'
import { SessionPicker } from './sessionPicker.js' import { SessionPicker } from './sessionPicker.js'
import { SkillsHub } from './skillsHub.js' import { SkillsHub } from './skillsHub.js'
@ -162,11 +163,11 @@ export function FloatingOverlays({
))} ))}
<Box marginTop={1}> <Box marginTop={1}>
<Text color={ui.theme.color.dim}> <OverlayHint t={ui.theme}>
{overlay.pager.offset + pagerPageSize < overlay.pager.lines.length {overlay.pager.offset + pagerPageSize < overlay.pager.lines.length
? `↑↓/jk line · Enter/Space/PgDn page · b/PgUp back · g/G top/bottom · q close (${Math.min(overlay.pager.offset + pagerPageSize, overlay.pager.lines.length)}/${overlay.pager.lines.length})` ? `↑↓/jk line · Enter/Space/PgDn page · b/PgUp back · g/G top/bottom · Esc/q close (${Math.min(overlay.pager.offset + pagerPageSize, overlay.pager.lines.length)}/${overlay.pager.lines.length})`
: `end · ↑↓/jk · b/PgUp back · g top · q close (${overlay.pager.lines.length} lines)`} : `end · ↑↓/jk · b/PgUp back · g top · Esc/q close (${overlay.pager.lines.length} lines)`}
</Text> </OverlayHint>
</Box> </Box>
</Box> </Box>
</FloatBox> </FloatBox>

View file

@ -1,8 +1,8 @@
import { Ansi, Box, NoSelect, Text } from '@hermes/ink' import { Ansi, Box, NoSelect, Text } from '@hermes/ink'
import { memo } from 'react' import { memo } from 'react'
import { sectionMode } from '../domain/details.js'
import { LONG_MSG } from '../config/limits.js' import { LONG_MSG } from '../config/limits.js'
import { sectionMode } from '../domain/details.js'
import { userDisplay } from '../domain/messages.js' import { userDisplay } from '../domain/messages.js'
import { ROLE } from '../domain/roles.js' import { ROLE } from '../domain/roles.js'
import { compactPreview, hasAnsi, isPasteBackedText, stripAnsi } from '../lib/text.js' import { compactPreview, hasAnsi, isPasteBackedText, stripAnsi } from '../lib/text.js'
@ -72,8 +72,7 @@ export const MessageLine = memo(function MessageLine({
const { body, glyph, prefix } = ROLE[msg.role](t) const { body, glyph, prefix } = ROLE[msg.role](t)
const showDetails = const showDetails =
(toolsMode !== 'hidden' && Boolean(msg.tools?.length)) || (toolsMode !== 'hidden' && Boolean(msg.tools?.length)) || (thinkingMode !== 'hidden' && Boolean(thinking))
(thinkingMode !== 'hidden' && Boolean(thinking))
const content = (() => { const content = (() => {
if (msg.kind === 'slash') { if (msg.kind === 'slash') {

View file

@ -7,18 +7,12 @@ import type { ModelOptionProvider, ModelOptionsResponse } from '../gatewayTypes.
import { asRpcResult, rpcErrorMessage } from '../lib/rpc.js' import { asRpcResult, rpcErrorMessage } from '../lib/rpc.js'
import type { Theme } from '../theme.js' import type { Theme } from '../theme.js'
import { OverlayHint, useOverlayKeys, windowItems, windowOffset } from './overlayControls.js'
const VISIBLE = 12 const VISIBLE = 12
const MIN_WIDTH = 40 const MIN_WIDTH = 40
const MAX_WIDTH = 90 const MAX_WIDTH = 90
const pageOffset = (count: number, sel: number) => Math.max(0, Math.min(sel - Math.floor(VISIBLE / 2), count - VISIBLE))
const visibleItems = (items: string[], sel: number) => {
const off = pageOffset(items.length, sel)
return { items: items.slice(off, off + VISIBLE), off }
}
export function ModelPicker({ gw, onCancel, onSelect, sessionId, t }: ModelPickerProps) { export function ModelPicker({ gw, onCancel, onSelect, sessionId, t }: ModelPickerProps) {
const [providers, setProviders] = useState<ModelOptionProvider[]>([]) const [providers, setProviders] = useState<ModelOptionProvider[]>([])
const [currentModel, setCurrentModel] = useState('') const [currentModel, setCurrentModel] = useState('')
@ -71,20 +65,20 @@ export function ModelPicker({ gw, onCancel, onSelect, sessionId, t }: ModelPicke
const models = provider?.models ?? [] const models = provider?.models ?? []
const names = useMemo(() => providerDisplayNames(providers), [providers]) const names = useMemo(() => providerDisplayNames(providers), [providers])
useInput((ch, key) => { const back = () => {
if (key.escape) { if (stage === 'model') {
if (stage === 'model') { setStage('provider')
setStage('provider') setModelIdx(0)
setModelIdx(0)
return
}
onCancel()
return return
} }
onCancel()
}
useOverlayKeys({ onBack: back, onClose: onCancel })
useInput((ch, key) => {
const count = stage === 'provider' ? providers.length : models.length const count = stage === 'provider' ? providers.length : models.length
const sel = stage === 'provider' ? providerIdx : modelIdx const sel = stage === 'provider' ? providerIdx : modelIdx
const setSel = stage === 'provider' ? setProviderIdx : setModelIdx const setSel = stage === 'provider' ? setProviderIdx : setModelIdx
@ -133,16 +127,16 @@ export function ModelPicker({ gw, onCancel, onSelect, sessionId, t }: ModelPicke
const n = ch === '0' ? 10 : parseInt(ch, 10) const n = ch === '0' ? 10 : parseInt(ch, 10)
if (!Number.isNaN(n) && n >= 1 && n <= Math.min(10, count)) { if (!Number.isNaN(n) && n >= 1 && n <= Math.min(10, count)) {
const off = pageOffset(count, sel) const offset = windowOffset(count, sel, VISIBLE)
if (stage === 'provider') { if (stage === 'provider') {
const next = off + n - 1 const next = offset + n - 1
if (providers[next]) { if (providers[next]) {
setProviderIdx(next) setProviderIdx(next)
} }
} else if (provider && models[off + n - 1]) { } else if (provider && models[offset + n - 1]) {
onSelect(`${models[off + n - 1]} --provider ${provider.slug}${persistGlobal ? ' --global' : ''}`) onSelect(`${models[offset + n - 1]} --provider ${provider.slug}${persistGlobal ? ' --global' : ''}`)
} }
} }
}) })
@ -155,7 +149,7 @@ export function ModelPicker({ gw, onCancel, onSelect, sessionId, t }: ModelPicke
return ( return (
<Box flexDirection="column"> <Box flexDirection="column">
<Text color={t.color.label}>error: {err}</Text> <Text color={t.color.label}>error: {err}</Text>
<Text color={t.color.dim}>Esc to cancel</Text> <OverlayHint t={t}>Esc/q cancel</OverlayHint>
</Box> </Box>
) )
} }
@ -164,7 +158,7 @@ export function ModelPicker({ gw, onCancel, onSelect, sessionId, t }: ModelPicke
return ( return (
<Box flexDirection="column"> <Box flexDirection="column">
<Text color={t.color.dim}>no authenticated providers</Text> <Text color={t.color.dim}>no authenticated providers</Text>
<Text color={t.color.dim}>Esc to cancel</Text> <OverlayHint t={t}>Esc/q cancel</OverlayHint>
</Box> </Box>
) )
} }
@ -174,7 +168,7 @@ export function ModelPicker({ gw, onCancel, onSelect, sessionId, t }: ModelPicke
(p, i) => `${p.is_current ? '*' : ' '} ${names[i]} · ${p.total_models ?? p.models?.length ?? 0} models` (p, i) => `${p.is_current ? '*' : ' '} ${names[i]} · ${p.total_models ?? p.models?.length ?? 0} models`
) )
const { items, off } = visibleItems(rows, providerIdx) const { items, offset } = windowItems(rows, providerIdx, VISIBLE)
return ( return (
<Box flexDirection="column" width={width}> <Box flexDirection="column" width={width}>
@ -189,12 +183,12 @@ export function ModelPicker({ gw, onCancel, onSelect, sessionId, t }: ModelPicke
{provider?.warning ? `warning: ${provider.warning}` : ' '} {provider?.warning ? `warning: ${provider.warning}` : ' '}
</Text> </Text>
<Text color={t.color.dim} wrap="truncate-end"> <Text color={t.color.dim} wrap="truncate-end">
{off > 0 ? `${off} more` : ' '} {offset > 0 ? `${offset} more` : ' '}
</Text> </Text>
{Array.from({ length: VISIBLE }, (_, i) => { {Array.from({ length: VISIBLE }, (_, i) => {
const row = items[i] const row = items[i]
const idx = off + i const idx = offset + i
return row ? ( return row ? (
<Text <Text
@ -215,20 +209,18 @@ export function ModelPicker({ gw, onCancel, onSelect, sessionId, t }: ModelPicke
})} })}
<Text color={t.color.dim} wrap="truncate-end"> <Text color={t.color.dim} wrap="truncate-end">
{off + VISIBLE < rows.length ? `${rows.length - off - VISIBLE} more` : ' '} {offset + VISIBLE < rows.length ? `${rows.length - offset - VISIBLE} more` : ' '}
</Text> </Text>
<Text color={t.color.dim} wrap="truncate-end"> <Text color={t.color.dim} wrap="truncate-end">
persist: {persistGlobal ? 'global' : 'session'} · g toggle persist: {persistGlobal ? 'global' : 'session'} · g toggle
</Text> </Text>
<Text color={t.color.dim} wrap="truncate-end"> <OverlayHint t={t}>/ select · Enter choose · 1-9,0 quick · Esc/q cancel</OverlayHint>
/ select · Enter choose · 1-9,0 quick · Esc cancel
</Text>
</Box> </Box>
) )
} }
const { items, off } = visibleItems(models, modelIdx) const { items, offset } = windowItems(models, modelIdx, VISIBLE)
return ( return (
<Box flexDirection="column" width={width}> <Box flexDirection="column" width={width}>
@ -243,12 +235,12 @@ export function ModelPicker({ gw, onCancel, onSelect, sessionId, t }: ModelPicke
{provider?.warning ? `warning: ${provider.warning}` : ' '} {provider?.warning ? `warning: ${provider.warning}` : ' '}
</Text> </Text>
<Text color={t.color.dim} wrap="truncate-end"> <Text color={t.color.dim} wrap="truncate-end">
{off > 0 ? `${off} more` : ' '} {offset > 0 ? `${offset} more` : ' '}
</Text> </Text>
{Array.from({ length: VISIBLE }, (_, i) => { {Array.from({ length: VISIBLE }, (_, i) => {
const row = items[i] const row = items[i]
const idx = off + i const idx = offset + i
if (!row) { if (!row) {
return !models.length && i === 0 ? ( return !models.length && i === 0 ? (
@ -277,15 +269,15 @@ export function ModelPicker({ gw, onCancel, onSelect, sessionId, t }: ModelPicke
})} })}
<Text color={t.color.dim} wrap="truncate-end"> <Text color={t.color.dim} wrap="truncate-end">
{off + VISIBLE < models.length ? `${models.length - off - VISIBLE} more` : ' '} {offset + VISIBLE < models.length ? `${models.length - offset - VISIBLE} more` : ' '}
</Text> </Text>
<Text color={t.color.dim} wrap="truncate-end"> <Text color={t.color.dim} wrap="truncate-end">
persist: {persistGlobal ? 'global' : 'session'} · g toggle persist: {persistGlobal ? 'global' : 'session'} · g toggle
</Text> </Text>
<Text color={t.color.dim} wrap="truncate-end"> <OverlayHint t={t}>
{models.length ? '↑/↓ select · Enter switch · 1-9,0 quick · Esc back' : 'Enter/Esc back'} {models.length ? '↑/↓ select · Enter switch · 1-9,0 quick · Esc back · q close' : 'Enter/Esc back · q close'}
</Text> </OverlayHint>
</Box> </Box>
) )
} }

View file

@ -0,0 +1,50 @@
import { Text, useInput } from '@hermes/ink'
import type { Theme } from '../theme.js'
export function useOverlayKeys({ disabled = false, onBack, onClose }: OverlayKeysOptions) {
useInput((ch, key) => {
if (disabled) {
return
}
if (ch === 'q') {
return onClose()
}
if (key.escape) {
return onBack ? onBack() : onClose()
}
})
}
export function OverlayHint({ children, t }: OverlayHintProps) {
return (
<Text color={t.color.dim} wrap="truncate-end">
{children}
</Text>
)
}
export const windowOffset = (count: number, selected: number, visible: number) =>
Math.max(0, Math.min(selected - Math.floor(visible / 2), count - visible))
export function windowItems<T>(items: T[], selected: number, visible: number) {
const offset = windowOffset(items.length, selected, visible)
return {
items: items.slice(offset, offset + visible),
offset
}
}
interface OverlayHintProps {
children: string
t: Theme
}
interface OverlayKeysOptions {
disabled?: boolean
onBack?: () => void
onClose: () => void
}

View file

@ -6,6 +6,8 @@ import type { SessionListItem, SessionListResponse } from '../gatewayTypes.js'
import { asRpcResult, rpcErrorMessage } from '../lib/rpc.js' import { asRpcResult, rpcErrorMessage } from '../lib/rpc.js'
import type { Theme } from '../theme.js' import type { Theme } from '../theme.js'
import { OverlayHint, useOverlayKeys, windowOffset } from './overlayControls.js'
const VISIBLE = 15 const VISIBLE = 15
const MIN_WIDTH = 60 const MIN_WIDTH = 60
const MAX_WIDTH = 120 const MAX_WIDTH = 120
@ -33,6 +35,8 @@ export function SessionPicker({ gw, onCancel, onSelect, t }: SessionPickerProps)
const { stdout } = useStdout() const { stdout } = useStdout()
const width = Math.max(MIN_WIDTH, Math.min(MAX_WIDTH, (stdout?.columns ?? 80) - 6)) const width = Math.max(MIN_WIDTH, Math.min(MAX_WIDTH, (stdout?.columns ?? 80) - 6))
useOverlayKeys({ onClose: onCancel })
useEffect(() => { useEffect(() => {
gw.request<SessionListResponse>('session.list', { limit: 20 }) gw.request<SessionListResponse>('session.list', { limit: 20 })
.then(raw => { .then(raw => {
@ -56,10 +60,6 @@ export function SessionPicker({ gw, onCancel, onSelect, t }: SessionPickerProps)
}, [gw]) }, [gw])
useInput((ch, key) => { useInput((ch, key) => {
if (key.escape) {
return onCancel()
}
if (key.upArrow && sel > 0) { if (key.upArrow && sel > 0) {
setSel(s => s - 1) setSel(s => s - 1)
} }
@ -87,7 +87,7 @@ export function SessionPicker({ gw, onCancel, onSelect, t }: SessionPickerProps)
return ( return (
<Box flexDirection="column"> <Box flexDirection="column">
<Text color={t.color.label}>error: {err}</Text> <Text color={t.color.label}>error: {err}</Text>
<Text color={t.color.dim}>Esc to cancel</Text> <OverlayHint t={t}>Esc/q cancel</OverlayHint>
</Box> </Box>
) )
} }
@ -96,12 +96,12 @@ export function SessionPicker({ gw, onCancel, onSelect, t }: SessionPickerProps)
return ( return (
<Box flexDirection="column"> <Box flexDirection="column">
<Text color={t.color.dim}>no previous sessions</Text> <Text color={t.color.dim}>no previous sessions</Text>
<Text color={t.color.dim}>Esc to cancel</Text> <OverlayHint t={t}>Esc/q cancel</OverlayHint>
</Box> </Box>
) )
} }
const off = Math.max(0, Math.min(sel - Math.floor(VISIBLE / 2), items.length - VISIBLE)) const offset = windowOffset(items.length, sel, VISIBLE)
return ( return (
<Box flexDirection="column" width={width}> <Box flexDirection="column" width={width}>
@ -109,10 +109,10 @@ export function SessionPicker({ gw, onCancel, onSelect, t }: SessionPickerProps)
Resume Session Resume Session
</Text> </Text>
{off > 0 && <Text color={t.color.dim}> {off} more</Text>} {offset > 0 && <Text color={t.color.dim}> {offset} more</Text>}
{items.slice(off, off + VISIBLE).map((s, vi) => { {items.slice(offset, offset + VISIBLE).map((s, vi) => {
const i = off + vi const i = offset + vi
const selected = sel === i const selected = sel === i
return ( return (
@ -140,8 +140,8 @@ export function SessionPicker({ gw, onCancel, onSelect, t }: SessionPickerProps)
) )
})} })}
{off + VISIBLE < items.length && <Text color={t.color.dim}> {items.length - off - VISIBLE} more</Text>} {offset + VISIBLE < items.length && <Text color={t.color.dim}> {items.length - offset - VISIBLE} more</Text>}
<Text color={t.color.dim}>/ select · Enter resume · 1-9 quick · Esc cancel</Text> <OverlayHint t={t}>/ select · Enter resume · 1-9 quick · Esc/q cancel</OverlayHint>
</Box> </Box>
) )
} }

View file

@ -5,18 +5,12 @@ import type { GatewayClient } from '../gatewayClient.js'
import { rpcErrorMessage } from '../lib/rpc.js' import { rpcErrorMessage } from '../lib/rpc.js'
import type { Theme } from '../theme.js' import type { Theme } from '../theme.js'
import { OverlayHint, useOverlayKeys, windowItems, windowOffset } from './overlayControls.js'
const VISIBLE = 12 const VISIBLE = 12
const MIN_WIDTH = 40 const MIN_WIDTH = 40
const MAX_WIDTH = 90 const MAX_WIDTH = 90
const pageOffset = (count: number, sel: number) => Math.max(0, Math.min(sel - Math.floor(VISIBLE / 2), count - VISIBLE))
const visibleItems = (items: string[], sel: number) => {
const off = pageOffset(items.length, sel)
return { items: items.slice(off, off + VISIBLE), off }
}
export function SkillsHub({ gw, onClose, t }: SkillsHubProps) { export function SkillsHub({ gw, onClose, t }: SkillsHubProps) {
const [skillsByCat, setSkillsByCat] = useState<Record<string, string[]>>({}) const [skillsByCat, setSkillsByCat] = useState<Record<string, string[]>>({})
const [selectedCat, setSelectedCat] = useState('') const [selectedCat, setSelectedCat] = useState('')
@ -48,6 +42,27 @@ export function SkillsHub({ gw, onClose, t }: SkillsHubProps) {
const skills = selectedCat ? (skillsByCat[selectedCat] ?? []) : [] const skills = selectedCat ? (skillsByCat[selectedCat] ?? []) : []
const skillName = skills[skillIdx] ?? '' const skillName = skills[skillIdx] ?? ''
const back = () => {
if (stage === 'actions') {
setStage('skill')
setInfo(null)
setErr('')
return
}
if (stage === 'skill') {
setStage('category')
setSkillIdx(0)
return
}
onClose()
}
useOverlayKeys({ disabled: installing, onBack: back, onClose })
const inspect = (name: string) => { const inspect = (name: string) => {
setInfo(null) setInfo(null)
setErr('') setErr('')
@ -72,27 +87,6 @@ export function SkillsHub({ gw, onClose, t }: SkillsHubProps) {
return return
} }
if (key.escape) {
if (stage === 'actions') {
setStage('skill')
setInfo(null)
setErr('')
return
}
if (stage === 'skill') {
setStage('category')
setSkillIdx(0)
return
}
onClose()
return
}
if (stage === 'actions') { if (stage === 'actions') {
if (key.return) { if (key.return) {
setStage('skill') setStage('skill')
@ -159,8 +153,7 @@ export function SkillsHub({ gw, onClose, t }: SkillsHubProps) {
const n = ch === '0' ? 10 : parseInt(ch, 10) const n = ch === '0' ? 10 : parseInt(ch, 10)
if (!Number.isNaN(n) && n >= 1 && n <= Math.min(10, count)) { if (!Number.isNaN(n) && n >= 1 && n <= Math.min(10, count)) {
const off = pageOffset(count, sel) const next = windowOffset(count, sel, VISIBLE) + n - 1
const next = off + n - 1
if (stage === 'category') { if (stage === 'category') {
const cat = cats[next] const cat = cats[next]
@ -193,7 +186,7 @@ export function SkillsHub({ gw, onClose, t }: SkillsHubProps) {
return ( return (
<Box flexDirection="column" width={width}> <Box flexDirection="column" width={width}>
<Text color={t.color.label}>error: {err}</Text> <Text color={t.color.label}>error: {err}</Text>
<Text color={t.color.dim}>Esc to cancel</Text> <OverlayHint t={t}>Esc/q cancel</OverlayHint>
</Box> </Box>
) )
} }
@ -202,14 +195,14 @@ export function SkillsHub({ gw, onClose, t }: SkillsHubProps) {
return ( return (
<Box flexDirection="column" width={width}> <Box flexDirection="column" width={width}>
<Text color={t.color.dim}>no skills available</Text> <Text color={t.color.dim}>no skills available</Text>
<Text color={t.color.dim}>Esc to cancel</Text> <OverlayHint t={t}>Esc/q cancel</OverlayHint>
</Box> </Box>
) )
} }
if (stage === 'category') { if (stage === 'category') {
const rows = cats.map(c => `${c} · ${skillsByCat[c]?.length ?? 0} skills`) const rows = cats.map(c => `${c} · ${skillsByCat[c]?.length ?? 0} skills`)
const { items, off } = visibleItems(rows, catIdx) const { items, offset } = windowItems(rows, catIdx, VISIBLE)
return ( return (
<Box flexDirection="column" width={width}> <Box flexDirection="column" width={width}>
@ -218,10 +211,10 @@ export function SkillsHub({ gw, onClose, t }: SkillsHubProps) {
</Text> </Text>
<Text color={t.color.dim}>select a category</Text> <Text color={t.color.dim}>select a category</Text>
{off > 0 && <Text color={t.color.dim}> {off} more</Text>} {offset > 0 && <Text color={t.color.dim}> {offset} more</Text>}
{items.map((row, i) => { {items.map((row, i) => {
const idx = off + i const idx = offset + i
return ( return (
<Text <Text
@ -237,14 +230,14 @@ export function SkillsHub({ gw, onClose, t }: SkillsHubProps) {
) )
})} })}
{off + VISIBLE < rows.length && <Text color={t.color.dim}> {rows.length - off - VISIBLE} more</Text>} {offset + VISIBLE < rows.length && <Text color={t.color.dim}> {rows.length - offset - VISIBLE} more</Text>}
<Text color={t.color.dim}>/ select · Enter open · 1-9,0 quick · Esc cancel</Text> <OverlayHint t={t}>/ select · Enter open · 1-9,0 quick · Esc/q cancel</OverlayHint>
</Box> </Box>
) )
} }
if (stage === 'skill') { if (stage === 'skill') {
const { items, off } = visibleItems(skills, skillIdx) const { items, offset } = windowItems(skills, skillIdx, VISIBLE)
return ( return (
<Box flexDirection="column" width={width}> <Box flexDirection="column" width={width}>
@ -254,10 +247,10 @@ export function SkillsHub({ gw, onClose, t }: SkillsHubProps) {
<Text color={t.color.dim}>{skills.length} skill(s)</Text> <Text color={t.color.dim}>{skills.length} skill(s)</Text>
{!skills.length ? <Text color={t.color.dim}>no skills in this category</Text> : null} {!skills.length ? <Text color={t.color.dim}>no skills in this category</Text> : null}
{off > 0 && <Text color={t.color.dim}> {off} more</Text>} {offset > 0 && <Text color={t.color.dim}> {offset} more</Text>}
{items.map((row, i) => { {items.map((row, i) => {
const idx = off + i const idx = offset + i
return ( return (
<Text <Text
@ -273,10 +266,12 @@ export function SkillsHub({ gw, onClose, t }: SkillsHubProps) {
) )
})} })}
{off + VISIBLE < skills.length && <Text color={t.color.dim}> {skills.length - off - VISIBLE} more</Text>} {offset + VISIBLE < skills.length && (
<Text color={t.color.dim}> <Text color={t.color.dim}> {skills.length - offset - VISIBLE} more</Text>
{skills.length ? '↑/↓ select · Enter open · 1-9,0 quick · Esc back' : 'Esc back'} )}
</Text> <OverlayHint t={t}>
{skills.length ? '↑/↓ select · Enter open · 1-9,0 quick · Esc back · q close' : 'Esc back · q close'}
</OverlayHint>
</Box> </Box>
) )
} }
@ -294,7 +289,7 @@ export function SkillsHub({ gw, onClose, t }: SkillsHubProps) {
{err ? <Text color={t.color.label}>error: {err}</Text> : null} {err ? <Text color={t.color.label}>error: {err}</Text> : null}
{installing ? <Text color={t.color.amber}>installing</Text> : null} {installing ? <Text color={t.color.amber}>installing</Text> : null}
<Text color={t.color.dim}>i reinspect · x reinstall · Enter/Esc back</Text> <OverlayHint t={t}>i reinspect · x reinstall · Enter/Esc back · q close</OverlayHint>
</Box> </Box>
) )
} }

View file

@ -1,5 +1,5 @@
import { Box, NoSelect, Text } from '@hermes/ink' import { Box, NoSelect, Text } from '@hermes/ink'
import { memo, useEffect, useMemo, useState, type ReactNode } from 'react' import { memo, type ReactNode, useEffect, useMemo, useState } from 'react'
import spinners, { type BrailleSpinnerName } from 'unicode-animations' import spinners, { type BrailleSpinnerName } from 'unicode-animations'
import { THINKING_COT_MAX } from '../config/limits.js' import { THINKING_COT_MAX } from '../config/limits.js'
@ -919,13 +919,22 @@ export const ToolTrail = memo(function ToolTrail({
// hidden sections stay hidden so the override is honoured. // hidden sections stay hidden so the override is honoured.
const expandAll = () => { const expandAll = () => {
if (visible.thinking !== 'hidden') setOpenThinking(true) if (visible.thinking !== 'hidden') {
if (visible.tools !== 'hidden') setOpenTools(true) setOpenThinking(true)
}
if (visible.tools !== 'hidden') {
setOpenTools(true)
}
if (visible.subagents !== 'hidden') { if (visible.subagents !== 'hidden') {
setOpenSubagents(true) setOpenSubagents(true)
setDeepSubagents(true) setDeepSubagents(true)
} }
if (visible.activity !== 'hidden') setOpenMeta(true)
if (visible.activity !== 'hidden') {
setOpenMeta(true)
}
} }
const metaTone: 'dim' | 'error' | 'warn' = activity.some(i => i.tone === 'error') const metaTone: 'dim' | 'error' | 'warn' = activity.some(i => i.tone === 'error')

View file

@ -43,7 +43,5 @@ export const isAction = (key: { ctrl: boolean; meta: boolean; super?: boolean },
* accept Cmd+B (the platform action modifier) so existing macOS muscle memory * accept Cmd+B (the platform action modifier) so existing macOS muscle memory
* keeps working. * keeps working.
*/ */
export const isVoiceToggleKey = ( export const isVoiceToggleKey = (key: { ctrl: boolean; meta: boolean; super?: boolean }, ch: string): boolean =>
key: { ctrl: boolean; meta: boolean; super?: boolean }, (key.ctrl || isActionMod(key)) && ch.toLowerCase() === 'b'
ch: string
): boolean => (key.ctrl || isActionMod(key)) && ch.toLowerCase() === 'b'