diff --git a/hermes_cli/main.py b/hermes_cli/main.py index fc29b848a6..489a1652db 100644 --- a/hermes_cli/main.py +++ b/hermes_cli/main.py @@ -1005,15 +1005,15 @@ def _launch_tui(resume_session_id: Optional[str] = None, tui_dev: bool = False): env.setdefault("HERMES_CWD", os.getcwd()) # Guarantee an 8GB V8 heap + exposed GC for the TUI. Default node cap is # ~1.5–4GB depending on version and can fatal-OOM on long sessions with - # large transcripts / reasoning blobs. Append (don't clobber) any user - # NODE_OPTIONS. - _existing_node_opts = env.get("NODE_OPTIONS", "").strip() - _hermes_tui_node_opts = "--max-old-space-size=8192 --expose-gc" - env["NODE_OPTIONS"] = ( - f"{_existing_node_opts} {_hermes_tui_node_opts}".strip() - if _hermes_tui_node_opts not in _existing_node_opts - else _existing_node_opts - ) + # large transcripts / reasoning blobs. Token-level merge: respect any + # user-supplied --max-old-space-size (they may have set it higher) and + # avoid duplicating --expose-gc. + _tokens = env.get("NODE_OPTIONS", "").split() + if not any(t.startswith("--max-old-space-size=") for t in _tokens): + _tokens.append("--max-old-space-size=8192") + if "--expose-gc" not in _tokens: + _tokens.append("--expose-gc") + env["NODE_OPTIONS"] = " ".join(_tokens) if resume_session_id: env["HERMES_TUI_RESUME"] = resume_session_id diff --git a/ui-tui/src/app/slash/commands/debug.ts b/ui-tui/src/app/slash/commands/debug.ts index d44c76f34b..b4bfc16bfb 100644 --- a/ui-tui/src/app/slash/commands/debug.ts +++ b/ui-tui/src/app/slash/commands/debug.ts @@ -3,7 +3,7 @@ import type { SlashCommand } from '../types.js' export const debugCommands: SlashCommand[] = [ { - help: 'write a V8 heap snapshot + memory diagnostics to ~/.hermes/heapdumps', + help: 'write a V8 heap snapshot + memory diagnostics (see HERMES_HEAPDUMP_DIR)', name: 'heapdump', run: (_arg, ctx) => { const { heapUsed, rss } = process.memoryUsage() diff --git a/ui-tui/src/gatewayClient.ts b/ui-tui/src/gatewayClient.ts index bf5210faa8..9bf681f8b2 100644 --- a/ui-tui/src/gatewayClient.ts +++ b/ui-tui/src/gatewayClient.ts @@ -218,7 +218,9 @@ export class GatewayClient extends EventEmitter { this.pending.clear() } - private onTimeout(id: string) { + // Arrow class-field — stable identity, so `setTimeout(this.onTimeout, …, id)` + // doesn't allocate a bound function per request. + private onTimeout = (id: string) => { const p = this.pending.get(id) if (p) { @@ -258,7 +260,7 @@ export class GatewayClient extends EventEmitter { const id = `r${++this.reqId}` return new Promise((resolve, reject) => { - const timeout = setTimeout(this.onTimeout.bind(this), REQUEST_TIMEOUT_MS, id) + const timeout = setTimeout(this.onTimeout, REQUEST_TIMEOUT_MS, id) timeout.unref?.() diff --git a/ui-tui/src/hooks/useVirtualHistory.ts b/ui-tui/src/hooks/useVirtualHistory.ts index c21e25fdae..17bc8dfd3e 100644 --- a/ui-tui/src/hooks/useVirtualHistory.ts +++ b/ui-tui/src/hooks/useVirtualHistory.ts @@ -124,14 +124,17 @@ export function useVirtualHistory( const vp = Math.max(0, scrollRef.current?.getViewportHeight() ?? 0) const sticky = scrollRef.current?.isSticky() ?? true - const frozenRange = freezeRenders.current > 0 ? prevRange.current : null + // During a freeze, drop the frozen range if items shrank past its start + // (/clear, compaction) — clamping would collapse to an empty mount and + // flash blank. Fall through to the normal path in that case. + const frozenRange = + freezeRenders.current > 0 && prevRange.current && prevRange.current[0] < n ? prevRange.current : null let start = 0 let end = n if (frozenRange) { - // Clamp in case items shrank (/clear, compaction) mid-freeze. - start = Math.min(frozenRange[0], n) + start = frozenRange[0] end = Math.min(frozenRange[1], n) } else if (n > 0) { if (vp <= 0) { diff --git a/ui-tui/src/lib/circularBuffer.ts b/ui-tui/src/lib/circularBuffer.ts index 675a508f22..31502fc227 100644 --- a/ui-tui/src/lib/circularBuffer.ts +++ b/ui-tui/src/lib/circularBuffer.ts @@ -4,6 +4,10 @@ export class CircularBuffer { private len = 0 constructor(private capacity: number) { + if (!Number.isInteger(capacity) || capacity <= 0) { + throw new RangeError(`CircularBuffer capacity must be a positive integer, got ${capacity}`) + } + this.buf = new Array(capacity) } diff --git a/ui-tui/src/lib/memory.ts b/ui-tui/src/lib/memory.ts index efeff6eb89..9f157adffc 100644 --- a/ui-tui/src/lib/memory.ts +++ b/ui-tui/src/lib/memory.ts @@ -80,7 +80,10 @@ export async function captureMemoryDiagnostics(trigger: MemoryTrigger): Promise< const smapsRollup = await swallow(() => readFile('/proc/self/smaps_rollup', 'utf8')) const nativeMemory = usage.rss - usage.heapUsed - const bytesPerSecond = uptimeSeconds > 0 ? usage.rss / uptimeSeconds : 0 + // Real growth rate since STARTED_AT (captured at module load) — NOT a lifetime + // average of rss/uptime, which would report phantom "growth" for a stable process. + const elapsed = Math.max(0, uptimeSeconds - STARTED_AT.uptime) + const bytesPerSecond = elapsed > 0 ? (usage.rss - STARTED_AT.rss) / elapsed : 0 const mbPerHour = (bytesPerSecond * 3600) / (1024 * 1024) const potentialLeaks = [ @@ -172,6 +175,8 @@ export function formatBytes(bytes: number): string { const UNITS = ['B', 'KB', 'MB', 'GB', 'TB'] +const STARTED_AT = { rss: process.memoryUsage().rss, uptime: process.uptime() } + // Returns undefined when the probe isn't available (non-Linux paths, sandboxed FS). const swallow = async (fn: () => Promise): Promise => { try {