mirror of
https://github.com/NousResearch/hermes-agent.git
synced 2026-05-07 02:51:50 +00:00
* fix(tui): make /browser connect actually take effect on the live agent Reports were that `/browser connect <url>` (and "changes to CDP url don't get picked up") didn't propagate to the live agent in `--tui`, forcing users to fall back to setting `browser.cdp_url` in `config.yaml` and restarting. Tracing the path on current main shows the protocol wiring is already correct — `/browser` is registered in `ui-tui/src/app/slash/commands/ops.ts` and dispatches `browser.manage` through the gateway RPC, NOT the slash worker (covered by the `browser.manage` row in `slashParity.test.ts`). But three real gaps left the experience flaky: 1. `cleanup_all_browsers()` ran AFTER `os.environ["BROWSER_CDP_URL"]` was rewritten. `_ensure_cdp_supervisor(...)` reads the env to resolve its target URL, so a tool call landing in that brief window could re-attach the supervisor to the OLD CDP endpoint just before we reaped sessions, leaving the agent talking to a dead URL. Reorder to clean first, swap env, clean again so the supervisor for the default task is definitively closed. 2. `browser.manage status` reported only the env var, ignoring `browser.cdp_url` from config.yaml. `_get_cdp_override()` (the resolver the agent itself uses) consults both — match it so `/browser status` answers the same question the next `browser_navigate` will see. Closes a stealth bug where users saw "browser not connected" while their CDP URL was perfectly set in config.yaml. 3. `/browser disconnect` only cleared `BROWSER_CDP_URL` and reaped once, leaving the same swap window as connect. Symmetrical double-cleanup here too. Frontend (`ops.ts`): * Echo "next browser tool call will use this CDP endpoint" on success so users see immediate confirmation that the gateway accepted the swap, even before any tool runs. * Mention `browser.cdp_url` in `config.yaml` in the usage hint and the not-connected status line. Persistent config is the correct fix for some terminal-multiplexer / sub-agent flows where env inheritance is unreliable; surfacing it makes that workaround discoverable. Tests (4 new, all hermetic): * `status` returns the resolved URL when only `browser.cdp_url` is set in config.yaml. * `connect` writes env AND cleans before/after, in that order. * `connect` against an unreachable endpoint does NOT mutate env or reap. * `disconnect` removes env and cleans twice. Validation: scripts/run_tests.sh tests/test_tui_gateway_server.py — 94/94 pass. cd ui-tui && npm run type-check — clean; npm test --run — 389/389. * review(copilot): always defer to _get_cdp_override; normalize bare host:port * review(copilot): collapse discovery-style CDP paths so /json/version isn't duplicated * fix(tui): /browser status must not perform CDP discovery I/O Copilot review on PR #17120: previous version routed through `tools.browser_tool._get_cdp_override`, which calls `_resolve_cdp_override` and performs an HTTP probe to /json/version with a multi-second timeout for discovery-style URLs. That blocks the TUI on `/browser status` whenever the configured host is slow or unreachable. Status now reads env-then-config directly with no network I/O. The WS normalization still happens in `browser_navigate` for actual tool calls, so behaviour-on-call is unchanged. * fix(tui): skip /json/version probe for concrete ws://devtools/browser endpoints Round 2 Copilot review on PR #17120: hosted CDP providers (Browserbase, browserless, etc.) return concrete `ws[s]://.../devtools/browser/<id>` URLs which are already directly connectable but don't serve the HTTP discovery path. The previous `/json/version` probe rejected these valid endpoints with 'could not reach browser CDP'. For `ws[s]://...` URLs whose path starts with `/devtools/browser/` we now do a TCP-level reachability check (`socket.create_connection`) instead of the HTTP probe. The actual CDP handshake happens on the next `browser_navigate` call, so we still surface unreachable hosts as 5031 errors — just without the false negatives. Discovery-style URLs (`http://host:port[/json[/version]]`) keep the HTTP probe path unchanged. Updated existing test + added two new ones (TCP-only success, TCP unreachable → 5031).
616 lines
18 KiB
TypeScript
616 lines
18 KiB
TypeScript
import type {
|
||
BrowserManageResponse,
|
||
DelegationPauseResponse,
|
||
ProcessStopResponse,
|
||
ReloadMcpResponse,
|
||
RollbackDiffResponse,
|
||
RollbackListResponse,
|
||
RollbackRestoreResponse,
|
||
SlashExecResponse,
|
||
SpawnTreeListResponse,
|
||
SpawnTreeLoadResponse,
|
||
ToolsConfigureResponse
|
||
} from '../../../gatewayTypes.js'
|
||
import type { PanelSection } from '../../../types.js'
|
||
import { applyDelegationStatus, getDelegationState } from '../../delegationStore.js'
|
||
import { patchOverlayState } from '../../overlayStore.js'
|
||
import { getSpawnHistory, pushDiskSnapshot, setDiffPair, type SpawnSnapshot } from '../../spawnHistoryStore.js'
|
||
import type { SlashCommand } from '../types.js'
|
||
|
||
interface SkillInfo {
|
||
category?: string
|
||
description?: string
|
||
name?: string
|
||
path?: string
|
||
}
|
||
|
||
interface SkillsListResponse {
|
||
skills?: Record<string, string[]>
|
||
}
|
||
|
||
interface SkillsInspectResponse {
|
||
info?: SkillInfo
|
||
}
|
||
|
||
interface SkillsSearchResponse {
|
||
results?: { description?: string; name: string }[]
|
||
}
|
||
|
||
interface SkillsInstallResponse {
|
||
installed?: boolean
|
||
name?: string
|
||
}
|
||
|
||
interface SkillsBrowseItem {
|
||
description?: string
|
||
name: string
|
||
source?: string
|
||
trust?: string
|
||
}
|
||
|
||
interface SkillsBrowseResponse {
|
||
items?: SkillsBrowseItem[]
|
||
page?: number
|
||
total?: number
|
||
total_pages?: number
|
||
}
|
||
|
||
export const opsCommands: SlashCommand[] = [
|
||
{
|
||
help: 'stop background processes',
|
||
name: 'stop',
|
||
run: (_arg, ctx) => {
|
||
ctx.gateway
|
||
.rpc<ProcessStopResponse>('process.stop', {})
|
||
.then(
|
||
ctx.guarded<ProcessStopResponse>(r => {
|
||
const killed = Number(r.killed ?? 0)
|
||
const noun = killed === 1 ? 'process' : 'processes'
|
||
ctx.transcript.sys(`stopped ${killed} background ${noun}`)
|
||
})
|
||
)
|
||
.catch(ctx.guardedErr)
|
||
}
|
||
},
|
||
|
||
{
|
||
aliases: ['reload_mcp'],
|
||
help: 'reload MCP servers in the live session',
|
||
name: 'reload-mcp',
|
||
run: (_arg, ctx) => {
|
||
ctx.gateway
|
||
.rpc<ReloadMcpResponse>('reload.mcp', { session_id: ctx.sid })
|
||
.then(
|
||
ctx.guarded<ReloadMcpResponse>(r => {
|
||
ctx.transcript.sys(r.status === 'reloaded' ? 'MCP servers reloaded' : 'reload complete')
|
||
})
|
||
)
|
||
.catch(ctx.guardedErr)
|
||
}
|
||
},
|
||
|
||
{
|
||
help: 'manage browser CDP connection [connect|disconnect|status]',
|
||
name: 'browser',
|
||
run: (arg, ctx) => {
|
||
const trimmed = arg.trim()
|
||
const [rawAction, ...rest] = trimmed ? trimmed.split(/\s+/) : ['status']
|
||
const action = (rawAction || 'status').toLowerCase()
|
||
|
||
if (!['connect', 'disconnect', 'status'].includes(action)) {
|
||
return ctx.transcript.sys(
|
||
'usage: /browser [connect|disconnect|status] [url] · persistent: set browser.cdp_url in config.yaml'
|
||
)
|
||
}
|
||
|
||
const payload: Record<string, unknown> = { action }
|
||
const requested = rest.join(' ').trim()
|
||
|
||
if (action === 'connect') {
|
||
payload.url = requested || 'http://localhost:9222'
|
||
}
|
||
|
||
ctx.gateway
|
||
.rpc<BrowserManageResponse>('browser.manage', payload)
|
||
.then(
|
||
ctx.guarded<BrowserManageResponse>(r => {
|
||
if (action === 'status') {
|
||
return ctx.transcript.sys(
|
||
r.connected
|
||
? `browser connected: ${r.url || '(url unavailable)'}`
|
||
: 'browser not connected (try /browser connect <url> or set browser.cdp_url in config.yaml)'
|
||
)
|
||
}
|
||
|
||
if (action === 'connect') {
|
||
if (r.connected) {
|
||
ctx.transcript.sys(`browser connected: ${r.url || '(url unavailable)'}`)
|
||
ctx.transcript.sys('next browser tool call will use this CDP endpoint')
|
||
|
||
return
|
||
}
|
||
|
||
return ctx.transcript.sys('browser connect failed')
|
||
}
|
||
|
||
ctx.transcript.sys('browser disconnected')
|
||
})
|
||
)
|
||
.catch(ctx.guardedErr)
|
||
}
|
||
},
|
||
|
||
{
|
||
help: 'list, diff, or restore checkpoints',
|
||
name: 'rollback',
|
||
run: (arg, ctx) => {
|
||
if (!ctx.sid) {
|
||
return ctx.transcript.sys('no active session — nothing to rollback')
|
||
}
|
||
|
||
const trimmed = arg.trim()
|
||
const [first = '', ...rest] = trimmed.split(/\s+/).filter(Boolean)
|
||
const lower = first.toLowerCase()
|
||
|
||
if (!trimmed || lower === 'list' || lower === 'ls') {
|
||
return ctx.gateway
|
||
.rpc<RollbackListResponse>('rollback.list', { session_id: ctx.sid })
|
||
.then(
|
||
ctx.guarded<RollbackListResponse>(r => {
|
||
if (!r.enabled) {
|
||
return ctx.transcript.sys('checkpoints are not enabled')
|
||
}
|
||
|
||
const checkpoints = r.checkpoints ?? []
|
||
|
||
if (!checkpoints.length) {
|
||
return ctx.transcript.sys('no checkpoints found')
|
||
}
|
||
|
||
ctx.transcript.panel('Rollback checkpoints', [
|
||
{
|
||
rows: checkpoints.map((c, idx) => [
|
||
`${idx + 1}. ${c.hash.slice(0, 10)}`,
|
||
[c.timestamp, c.message].filter(Boolean).join(' · ') || '(no metadata)'
|
||
])
|
||
}
|
||
])
|
||
})
|
||
)
|
||
.catch(ctx.guardedErr)
|
||
}
|
||
|
||
if (lower === 'diff') {
|
||
const hash = rest[0]
|
||
|
||
if (!hash) {
|
||
return ctx.transcript.sys('usage: /rollback diff <checkpoint>')
|
||
}
|
||
|
||
return ctx.gateway
|
||
.rpc<RollbackDiffResponse>('rollback.diff', { hash, session_id: ctx.sid })
|
||
.then(
|
||
ctx.guarded<RollbackDiffResponse>(r => {
|
||
const body = (r.rendered || r.diff || '').trim()
|
||
|
||
if (!body && !r.stat) {
|
||
return ctx.transcript.sys('no changes since this checkpoint')
|
||
}
|
||
|
||
const text = [r.stat || '', body].filter(Boolean).join('\n\n')
|
||
ctx.transcript.page(text, 'Rollback diff')
|
||
})
|
||
)
|
||
.catch(ctx.guardedErr)
|
||
}
|
||
|
||
const hash = first
|
||
const filePath = rest.join(' ').trim()
|
||
|
||
return ctx.gateway
|
||
.rpc<RollbackRestoreResponse>('rollback.restore', {
|
||
...(filePath ? { file_path: filePath } : {}),
|
||
hash,
|
||
session_id: ctx.sid
|
||
})
|
||
.then(
|
||
ctx.guarded<RollbackRestoreResponse>(r => {
|
||
if (!r.success) {
|
||
return ctx.transcript.sys(`rollback failed: ${r.error || r.message || 'unknown error'}`)
|
||
}
|
||
|
||
const target = filePath || 'workspace'
|
||
const detail = r.reason || r.message || r.restored_to || 'restored'
|
||
ctx.transcript.sys(`rollback restored ${target}: ${detail}`)
|
||
|
||
if ((r.history_removed ?? 0) > 0) {
|
||
ctx.transcript.setHistoryItems(prev => ctx.transcript.trimLastExchange(prev))
|
||
}
|
||
})
|
||
)
|
||
.catch(ctx.guardedErr)
|
||
}
|
||
},
|
||
|
||
{
|
||
aliases: ['tasks'],
|
||
help: 'open the spawn-tree dashboard (live audit + kill/pause controls)',
|
||
name: 'agents',
|
||
run: (arg, ctx) => {
|
||
const sub = arg.trim().toLowerCase()
|
||
|
||
// Stay compatible with the gateway `/agents [pause|resume|status]` CLI —
|
||
// explicit subcommands skip the overlay and act directly so scripts and
|
||
// multi-step flows can drive it without entering interactive mode.
|
||
if (sub === 'pause' || sub === 'resume' || sub === 'unpause') {
|
||
const paused = sub === 'pause'
|
||
ctx.gateway.gw
|
||
.request<DelegationPauseResponse>('delegation.pause', { paused })
|
||
.then(r => {
|
||
applyDelegationStatus({ paused: r?.paused })
|
||
ctx.transcript.sys(`delegation · ${r?.paused ? 'paused' : 'resumed'}`)
|
||
})
|
||
.catch(ctx.guardedErr)
|
||
|
||
return
|
||
}
|
||
|
||
if (sub === 'status') {
|
||
const d = getDelegationState()
|
||
ctx.transcript.sys(
|
||
`delegation · ${d.paused ? 'paused' : 'active'} · caps d${d.maxSpawnDepth ?? '?'}/${d.maxConcurrentChildren ?? '?'}`
|
||
)
|
||
|
||
return
|
||
}
|
||
|
||
patchOverlayState({ agents: true, agentsInitialHistoryIndex: 0 })
|
||
}
|
||
},
|
||
|
||
{
|
||
help: 'replay a completed spawn tree · `/replay [N|last|list|load <path>]`',
|
||
name: 'replay',
|
||
run: (arg, ctx) => {
|
||
const history = getSpawnHistory()
|
||
const raw = arg.trim()
|
||
const lower = raw.toLowerCase()
|
||
|
||
// ── Disk-backed listing ─────────────────────────────────────
|
||
if (lower === 'list' || lower === 'ls') {
|
||
ctx.gateway
|
||
.rpc<SpawnTreeListResponse>('spawn_tree.list', {
|
||
limit: 30,
|
||
session_id: ctx.sid ?? 'default'
|
||
})
|
||
.then(
|
||
ctx.guarded<SpawnTreeListResponse>(r => {
|
||
const entries = r.entries ?? []
|
||
|
||
if (!entries.length) {
|
||
return ctx.transcript.sys('no archived spawn trees on disk for this session')
|
||
}
|
||
|
||
const rows: [string, string][] = entries.map(e => {
|
||
const ts = e.finished_at ? new Date(e.finished_at * 1000).toLocaleString() : '?'
|
||
const label = e.label || `${e.count} subagents`
|
||
|
||
return [`${ts} · ${e.count}×`, `${label}\n ${e.path}`]
|
||
})
|
||
|
||
ctx.transcript.panel('Archived spawn trees', [{ rows }])
|
||
})
|
||
)
|
||
.catch(ctx.guardedErr)
|
||
|
||
return
|
||
}
|
||
|
||
// ── Disk-backed load by path ─────────────────────────────────
|
||
if (lower.startsWith('load ')) {
|
||
const path = raw.slice(5).trim()
|
||
|
||
if (!path) {
|
||
return ctx.transcript.sys('usage: /replay load <path>')
|
||
}
|
||
|
||
ctx.gateway
|
||
.rpc<SpawnTreeLoadResponse>('spawn_tree.load', { path })
|
||
.then(
|
||
ctx.guarded<SpawnTreeLoadResponse>(r => {
|
||
if (!r.subagents?.length) {
|
||
return ctx.transcript.sys('snapshot empty or unreadable')
|
||
}
|
||
|
||
// Push onto the in-memory history so the overlay picks it up
|
||
// by index 1 just like any other snapshot.
|
||
pushDiskSnapshot(r, path)
|
||
patchOverlayState({ agents: true, agentsInitialHistoryIndex: 1 })
|
||
})
|
||
)
|
||
.catch(ctx.guardedErr)
|
||
|
||
return
|
||
}
|
||
|
||
// ── In-memory nav (same-session) ─────────────────────────────
|
||
if (!history.length) {
|
||
return ctx.transcript.sys('no completed spawn trees this session · try /replay list')
|
||
}
|
||
|
||
let index = 1
|
||
|
||
if (raw && lower !== 'last') {
|
||
const parsed = parseInt(raw, 10)
|
||
|
||
if (Number.isNaN(parsed) || parsed < 1 || parsed > history.length) {
|
||
return ctx.transcript.sys(`replay: index out of range 1..${history.length} · use /replay list for disk`)
|
||
}
|
||
|
||
index = parsed
|
||
}
|
||
|
||
patchOverlayState({ agents: true, agentsInitialHistoryIndex: index })
|
||
}
|
||
},
|
||
|
||
{
|
||
help: 'diff two completed spawn trees · `/replay-diff <baseline> <candidate>` (indexes from /replay list or history N)',
|
||
name: 'replay-diff',
|
||
run: (arg, ctx) => {
|
||
const parts = arg.trim().split(/\s+/).filter(Boolean)
|
||
|
||
if (parts.length !== 2) {
|
||
return ctx.transcript.sys('usage: /replay-diff <a> <b> (e.g. /replay-diff 1 2 for last two)')
|
||
}
|
||
|
||
const [a, b] = parts
|
||
const history = getSpawnHistory()
|
||
|
||
const resolve = (token: string): null | SpawnSnapshot => {
|
||
const n = parseInt(token!, 10)
|
||
|
||
if (Number.isFinite(n) && n >= 1 && n <= history.length) {
|
||
return history[n - 1] ?? null
|
||
}
|
||
|
||
return null
|
||
}
|
||
|
||
const baseline = resolve(a!)
|
||
const candidate = resolve(b!)
|
||
|
||
if (!baseline || !candidate) {
|
||
return ctx.transcript.sys(`replay-diff: could not resolve indices · history has ${history.length} entries`)
|
||
}
|
||
|
||
setDiffPair({ baseline, candidate })
|
||
patchOverlayState({ agents: true, agentsInitialHistoryIndex: 0 })
|
||
}
|
||
},
|
||
|
||
{
|
||
help: 'browse, inspect, install skills',
|
||
name: 'skills',
|
||
run: (arg, ctx) => {
|
||
const text = arg.trim()
|
||
|
||
if (!text) {
|
||
return patchOverlayState({ skillsHub: true })
|
||
}
|
||
|
||
const [sub, ...rest] = text.split(/\s+/)
|
||
const query = rest.join(' ').trim()
|
||
const { rpc } = ctx.gateway
|
||
const { panel, sys } = ctx.transcript
|
||
|
||
if (sub === 'list') {
|
||
rpc<SkillsListResponse>('skills.manage', { action: 'list' })
|
||
.then(
|
||
ctx.guarded<SkillsListResponse>(r => {
|
||
const cats = Object.entries(r.skills ?? {}).sort()
|
||
|
||
if (!cats.length) {
|
||
return sys('no skills available')
|
||
}
|
||
|
||
panel(
|
||
'Skills',
|
||
cats.map<PanelSection>(([title, items]) => ({ items, title }))
|
||
)
|
||
})
|
||
)
|
||
.catch(ctx.guardedErr)
|
||
|
||
return
|
||
}
|
||
|
||
if (sub === 'inspect') {
|
||
if (!query) {
|
||
return sys('usage: /skills inspect <name>')
|
||
}
|
||
|
||
rpc<SkillsInspectResponse>('skills.manage', { action: 'inspect', query })
|
||
.then(
|
||
ctx.guarded<SkillsInspectResponse>(r => {
|
||
const info = r.info ?? {}
|
||
|
||
if (!info.name) {
|
||
return sys(`unknown skill: ${query}`)
|
||
}
|
||
|
||
const rows: [string, string][] = [
|
||
['Name', String(info.name)],
|
||
['Category', String(info.category ?? '')],
|
||
['Path', String(info.path ?? '')]
|
||
]
|
||
|
||
const sections: PanelSection[] = [{ rows }]
|
||
|
||
if (info.description) {
|
||
sections.push({ text: String(info.description) })
|
||
}
|
||
|
||
panel('Skill', sections)
|
||
})
|
||
)
|
||
.catch(ctx.guardedErr)
|
||
|
||
return
|
||
}
|
||
|
||
if (sub === 'search') {
|
||
if (!query) {
|
||
return sys('usage: /skills search <query>')
|
||
}
|
||
|
||
rpc<SkillsSearchResponse>('skills.manage', { action: 'search', query })
|
||
.then(
|
||
ctx.guarded<SkillsSearchResponse>(r => {
|
||
const results = r.results ?? []
|
||
|
||
if (!results.length) {
|
||
return sys(`no results for: ${query}`)
|
||
}
|
||
|
||
panel(`Search: ${query}`, [{ rows: results.map(s => [s.name, s.description ?? '']) }])
|
||
})
|
||
)
|
||
.catch(ctx.guardedErr)
|
||
|
||
return
|
||
}
|
||
|
||
if (sub === 'install') {
|
||
if (!query) {
|
||
return sys('usage: /skills install <name or url>')
|
||
}
|
||
|
||
sys(`installing ${query}…`)
|
||
|
||
rpc<SkillsInstallResponse>('skills.manage', { action: 'install', query })
|
||
.then(
|
||
ctx.guarded<SkillsInstallResponse>(r =>
|
||
sys(r.installed ? `installed ${r.name ?? query}` : 'install failed')
|
||
)
|
||
)
|
||
.catch(ctx.guardedErr)
|
||
|
||
return
|
||
}
|
||
|
||
if (sub === 'browse') {
|
||
const pageNum = query ? parseInt(query, 10) : 1
|
||
|
||
if (Number.isNaN(pageNum) || pageNum < 1) {
|
||
return sys('usage: /skills browse [page] (page must be a positive number)')
|
||
}
|
||
|
||
sys('fetching community skills (scans 6 sources, may take ~15s)…')
|
||
|
||
rpc<SkillsBrowseResponse>('skills.manage', { action: 'browse', page: pageNum })
|
||
.then(
|
||
ctx.guarded<SkillsBrowseResponse>(r => {
|
||
const items = r.items ?? []
|
||
|
||
if (!items.length) {
|
||
return sys(`no skills on page ${pageNum}${r.total ? ` (total ${r.total})` : ''}`)
|
||
}
|
||
|
||
const rows: [string, string][] = items.map(s => [
|
||
s.trust ? `${s.name} · ${s.trust}` : s.name,
|
||
String(s.description ?? '').slice(0, 160)
|
||
])
|
||
|
||
const footer: string[] = []
|
||
|
||
if (r.page && r.total_pages) {
|
||
footer.push(`page ${r.page} of ${r.total_pages}`)
|
||
}
|
||
|
||
if (r.total) {
|
||
footer.push(`${r.total} skills total`)
|
||
}
|
||
|
||
if (r.page && r.total_pages && r.page < r.total_pages) {
|
||
footer.push(`/skills browse ${r.page + 1} for more`)
|
||
}
|
||
|
||
panel(`Browse Skills${pageNum > 1 ? ` — p${pageNum}` : ''}`, [
|
||
{ rows },
|
||
...(footer.length ? [{ text: footer.join(' · ') }] : [])
|
||
])
|
||
})
|
||
)
|
||
.catch(ctx.guardedErr)
|
||
|
||
return
|
||
}
|
||
|
||
sys('usage: /skills [list | inspect <n> | install <n> | search <q> | browse [page]]')
|
||
}
|
||
},
|
||
|
||
{
|
||
help: 'enable or disable tools (client-side history reset on change)',
|
||
name: 'tools',
|
||
run: (arg, ctx, cmd) => {
|
||
const [subcommand, ...names] = arg.trim().split(/\s+/).filter(Boolean)
|
||
|
||
if (subcommand !== 'disable' && subcommand !== 'enable') {
|
||
ctx.gateway.gw
|
||
.request<SlashExecResponse>('slash.exec', { command: cmd.slice(1), session_id: ctx.sid })
|
||
.then(r => {
|
||
if (ctx.stale()) {
|
||
return
|
||
}
|
||
|
||
const body = r?.output || '/tools: no output'
|
||
const text = r?.warning ? `warning: ${r.warning}\n${body}` : body
|
||
const long = text.length > 180 || text.split('\n').filter(Boolean).length > 2
|
||
|
||
long ? ctx.transcript.page(text, 'Tools') : ctx.transcript.sys(text)
|
||
})
|
||
.catch(ctx.guardedErr)
|
||
|
||
return
|
||
}
|
||
|
||
if (!names.length) {
|
||
ctx.transcript.sys(`usage: /tools ${subcommand} <name> [name ...]`)
|
||
ctx.transcript.sys(`built-in toolset: /tools ${subcommand} web`)
|
||
ctx.transcript.sys(`MCP tool: /tools ${subcommand} github:create_issue`)
|
||
|
||
return
|
||
}
|
||
|
||
ctx.gateway
|
||
.rpc<ToolsConfigureResponse>('tools.configure', { action: subcommand, names, session_id: ctx.sid })
|
||
.then(
|
||
ctx.guarded<ToolsConfigureResponse>(r => {
|
||
if (r.info) {
|
||
ctx.session.setSessionStartedAt(Date.now())
|
||
ctx.session.resetVisibleHistory(r.info)
|
||
}
|
||
|
||
if (r.changed?.length) {
|
||
ctx.transcript.sys(`${subcommand === 'disable' ? 'disabled' : 'enabled'}: ${r.changed.join(', ')}`)
|
||
}
|
||
|
||
if (r.unknown?.length) {
|
||
ctx.transcript.sys(`unknown toolsets: ${r.unknown.join(', ')}`)
|
||
}
|
||
|
||
if (r.missing_servers?.length) {
|
||
ctx.transcript.sys(`missing MCP servers: ${r.missing_servers.join(', ')}`)
|
||
}
|
||
|
||
if (r.reset) {
|
||
ctx.transcript.sys('session reset. new tool configuration is active.')
|
||
}
|
||
})
|
||
)
|
||
.catch(ctx.guardedErr)
|
||
}
|
||
}
|
||
]
|