refactor(tui): /clean pass on memory + resize helpers

KISS/DRY sweep — drops ~90 LOC with no behavior change.

- circularBuffer: drop unused pushAll/toArray/size; fold toArray into drain
- gracefulExit: inline Cleanup type + failsafe const; signal→code as a
  record instead of nested ternary; drop dead .catch on Promise.allSettled;
  drop unused forceExit
- memory: inline heapDumpRoot() + writeSnapshot() (single-use); collapse
  the two fd/smaps try/catch blocks behind one `swallow` helper; build
  potentialLeaks functionally (array+filter) instead of imperative
  push-chain; UNITS at file bottom
- memoryMonitor: inline DEFAULTS; drop unused onSnapshot; collapse
  dumpedHigh/dumpedCritical bools to a single Set; single callback
  dispatch line instead of duplicated if-chains
- entry.tsx: factor `dumpNotice` formatter (used twice by onHigh +
  onCritical)
- useMainApp resize debounce: drop redundant `if (timer)` guards
  (clearTimeout(undefined) is a no-op); init as undefined not null
- useVirtualHistory: trim wall-of-text comment to one-line intent; hoist
  `const n = items.length`; split comma-declared lets; remove the
  `;[start, end] = frozenRange` destructure in favor of direct Math.min
  clamps; hoist `hi` init in upperBound for consistency

Validation: tsc clean (both configs), eslint clean on touched files,
vitest 102/102, build produces shebang-preserved dist/entry.js,
performHeapDump smoke-test still writes valid snapshot + diagnostics.
This commit is contained in:
Brooklyn Nicholson 2026-04-20 18:51:12 -05:00
parent 0078f743e6
commit 82b927777c
7 changed files with 89 additions and 182 deletions

View file

@ -306,15 +306,12 @@ export function useMainApp(gw: GatewayClient) {
return
}
let timer: null | ReturnType<typeof setTimeout> = null
let timer: ReturnType<typeof setTimeout> | undefined
const onResize = () => {
if (timer) {
clearTimeout(timer)
}
clearTimeout(timer)
timer = setTimeout(() => {
timer = null
timer = undefined
void rpc<TerminalResizeResponse>('terminal.resize', { cols: stdout.columns ?? 80, session_id: ui.sid })
}, 100)
}
@ -322,10 +319,7 @@ export function useMainApp(gw: GatewayClient) {
stdout.on('resize', onResize)
return () => {
if (timer) {
clearTimeout(timer)
}
clearTimeout(timer)
stdout.off('resize', onResize)
}
}, [rpc, stdout, ui.sid])

View file

@ -2,8 +2,8 @@
import { bootBanner } from './bootBanner.js'
import { GatewayClient } from './gatewayClient.js'
import { setupGracefulExit } from './lib/gracefulExit.js'
import { formatBytes, performHeapDump } from './lib/memory.js'
import { startMemoryMonitor } from './lib/memoryMonitor.js'
import { formatBytes, type HeapDumpResult, performHeapDump } from './lib/memory.js'
import { type MemorySnapshot, startMemoryMonitor } from './lib/memoryMonitor.js'
if (!process.stdin.isTTY) {
console.log('hermes-tui: no TTY')
@ -16,6 +16,9 @@ const gw = new GatewayClient()
gw.start()
const dumpNotice = (snap: MemorySnapshot, dump: HeapDumpResult | null) =>
`hermes-tui: ${snap.level} memory (${formatBytes(snap.heapUsed)}) — auto heap dump → ${dump?.heapPath ?? '(failed)'}\n`
setupGracefulExit({
cleanups: [() => gw.kill()],
onError: (scope, err) => {
@ -28,16 +31,11 @@ setupGracefulExit({
const stopMemoryMonitor = startMemoryMonitor({
onCritical: (snap, dump) => {
process.stderr.write(
`hermes-tui: critical memory (${formatBytes(snap.heapUsed)}) — auto heap dump → ${dump?.heapPath ?? '(failed)'}\n`
)
process.stderr.write(dumpNotice(snap, dump))
process.stderr.write('hermes-tui: exiting to avoid OOM; restart to recover\n')
process.exit(137)
},
onHigh: (snap, dump) =>
process.stderr.write(
`hermes-tui: high memory (${formatBytes(snap.heapUsed)}) — auto heap dump → ${dump?.heapPath ?? '(failed)'}\n`
)
onHigh: (snap, dump) => process.stderr.write(dumpNotice(snap, dump))
})
if (process.env.HERMES_HEAPDUMP_ON_START === '1') {

View file

@ -18,11 +18,12 @@ const QUANTUM = OVERSCAN >> 1
const FREEZE_RENDERS = 2
const upperBound = (arr: number[], target: number) => {
let lo = 0,
hi = arr.length
let lo = 0
let hi = arr.length
while (lo < hi) {
const mid = (lo + hi) >> 1
arr[mid]! <= target ? (lo = mid + 1) : (hi = mid)
}
@ -42,16 +43,11 @@ export function useVirtualHistory(
const [hasScrollRef, setHasScrollRef] = useState(false)
const metrics = useRef({ sticky: true, top: 0, vp: 0 })
// Resize handling — scale cached heights by oldCols/newCols so post-resize
// offsets stay roughly aligned with (still-unknown) real Yoga heights.
// Clearing the cache instead would force a pessimistic back-walk that mounts
// ~190 rows at once (viewport+overscan at 1-row estimate), each a fresh
// marked.lexer + syntax highlight = ~3ms; ~600ms React commit block. Freeze
// the mount range for FREEZE_RENDERS so warm useMemo results survive while
// the layout effect writes post-resize real heights back into cache.
// skipMeasurement prevents that first post-resize useLayoutEffect from
// poisoning the cache with pre-resize Yoga values (Yoga's stored heights
// are from the frame BEFORE this render's calculateLayout with new width).
// Width change: scale cached heights (not clear — clearing forces a
// pessimistic back-walk mounting ~190 rows at once, each a fresh
// marked.lexer + syntax highlight ≈ 3ms). Freeze mount range for 2
// renders so warm memos survive; skip one measurement so useLayoutEffect
// doesn't poison the scaled cache with pre-resize Yoga heights.
const prevColumns = useRef(columns)
const skipMeasurement = useRef(false)
const prevRange = useRef<null | readonly [number, number]>(null)
@ -122,34 +118,32 @@ export function useVirtualHistory(
return out
}, [estimate, items, ver])
const total = offsets[items.length] ?? 0
const n = items.length
const total = offsets[n] ?? 0
const top = Math.max(0, scrollRef.current?.getScrollTop() ?? 0)
const vp = Math.max(0, scrollRef.current?.getViewportHeight() ?? 0)
const sticky = scrollRef.current?.isSticky() ?? true
const frozenRange = freezeRenders.current > 0 ? prevRange.current : null
let start = 0,
end = items.length
let start = 0
let end = n
if (frozenRange) {
// Columns just changed. Reuse the pre-resize mount range so already-mounted
// MessageRows keep their warm memos (marked.lexer, syntax highlight). Clamp
// to n in case messages were removed (/clear, compaction) mid-freeze.
;[start, end] = frozenRange
start = Math.min(start, items.length)
end = Math.min(end, items.length)
} else if (items.length > 0) {
// Clamp in case items shrank (/clear, compaction) mid-freeze.
start = Math.min(frozenRange[0], n)
end = Math.min(frozenRange[1], n)
} else if (n > 0) {
if (vp <= 0) {
start = Math.max(0, items.length - coldStartCount)
start = Math.max(0, n - coldStartCount)
} else {
start = Math.max(0, Math.min(items.length - 1, upperBound(offsets, Math.max(0, top - overscan)) - 1))
end = Math.max(start + 1, Math.min(items.length, upperBound(offsets, top + vp + overscan)))
start = Math.max(0, Math.min(n - 1, upperBound(offsets, Math.max(0, top - overscan)) - 1))
end = Math.max(start + 1, Math.min(n, upperBound(offsets, top + vp + overscan)))
}
}
if (end - start > maxMounted) {
sticky ? (start = Math.max(0, end - maxMounted)) : (end = Math.min(items.length, start + maxMounted))
sticky ? (start = Math.max(0, end - maxMounted)) : (end = Math.min(n, start + maxMounted))
}
if (freezeRenders.current > 0) {
@ -173,9 +167,6 @@ export function useVirtualHistory(
let dirty = false
if (skipMeasurement.current) {
// First render after a column change — Yoga heights still reflect the
// pre-resize layout. Writing them into cache would overwrite the scaled
// estimates with stale pre-resize values. Next render's Yoga is correct.
skipMeasurement.current = false
} else {
for (let i = start; i < end; i++) {

View file

@ -16,12 +16,6 @@ export class CircularBuffer<T> {
}
}
pushAll(items: readonly T[]) {
for (const item of items) {
this.push(item)
}
}
tail(n = this.len): T[] {
const take = Math.min(Math.max(0, n), this.len)
const start = this.len < this.capacity ? 0 : this.head
@ -34,12 +28,8 @@ export class CircularBuffer<T> {
return out
}
toArray(): T[] {
return this.tail(this.len)
}
drain(): T[] {
const out = this.toArray()
const out = this.tail()
this.clear()
@ -51,8 +41,4 @@ export class CircularBuffer<T> {
this.head = 0
this.len = 0
}
get size() {
return this.len
}
}

View file

@ -1,22 +1,19 @@
type Cleanup = () => Promise<void> | void
interface SetupOptions {
cleanups?: Cleanup[]
cleanups?: (() => Promise<void> | void)[]
failsafeMs?: number
onError?: (scope: 'uncaughtException' | 'unhandledRejection', err: unknown) => void
onSignal?: (signal: NodeJS.Signals) => void
}
const DEFAULT_FAILSAFE_MS = 4000
const SIGNAL_EXIT_CODE: Record<'SIGHUP' | 'SIGINT' | 'SIGTERM', number> = {
SIGHUP: 129,
SIGINT: 130,
SIGTERM: 143
}
let wired = false
export function setupGracefulExit({
cleanups = [],
failsafeMs = DEFAULT_FAILSAFE_MS,
onError,
onSignal
}: SetupOptions = {}) {
export function setupGracefulExit({ cleanups = [], failsafeMs = 4000, onError, onSignal }: SetupOptions = {}) {
if (wired) {
return
}
@ -36,28 +33,15 @@ export function setupGracefulExit({
onSignal?.(signal)
}
const failsafe = setTimeout(() => process.exit(code), failsafeMs)
setTimeout(() => process.exit(code), failsafeMs).unref?.()
failsafe.unref?.()
void Promise.allSettled(cleanups.map(fn => Promise.resolve().then(fn)))
.catch(() => {})
.finally(() => process.exit(code))
void Promise.allSettled(cleanups.map(fn => Promise.resolve().then(fn))).finally(() => process.exit(code))
}
for (const sig of ['SIGINT', 'SIGTERM', 'SIGHUP'] as const) {
process.on(sig, () => exit(sig === 'SIGINT' ? 130 : sig === 'SIGTERM' ? 143 : 129, sig))
process.on(sig, () => exit(SIGNAL_EXIT_CODE[sig], sig))
}
process.on('uncaughtException', err => {
onError?.('uncaughtException', err)
})
process.on('unhandledRejection', reason => {
onError?.('unhandledRejection', reason)
})
}
export function forceExit(code = 0) {
process.exit(code)
process.on('uncaughtException', err => onError?.('uncaughtException', err))
process.on('unhandledRejection', reason => onError?.('unhandledRejection', reason))
}

View file

@ -5,7 +5,7 @@ import { join } from 'node:path'
import { pipeline } from 'node:stream/promises'
import { getHeapSnapshot, getHeapSpaceStatistics, getHeapStatistics } from 'node:v8'
export type MemoryTrigger = 'auto-high' | 'auto-critical' | 'manual'
export type MemoryTrigger = 'auto-critical' | 'auto-high' | 'manual'
export interface MemoryDiagnostics {
activeHandles: number
@ -54,74 +54,43 @@ export interface HeapDumpResult {
success: boolean
}
const heapDumpRoot = () =>
process.env.HERMES_HEAPDUMP_DIR?.trim() || join(homedir() || tmpdir(), '.hermes', 'heapdumps')
const processInternals = process as unknown as {
_getActiveHandles: () => unknown[]
_getActiveRequests: () => unknown[]
}
export async function captureMemoryDiagnostics(trigger: MemoryTrigger): Promise<MemoryDiagnostics> {
const usage = process.memoryUsage()
const heapStats = getHeapStatistics()
const resourceUsage = process.resourceUsage()
const uptimeSeconds = process.uptime()
// Not available on Bun / older Node.
let heapSpaces: ReturnType<typeof getHeapSpaceStatistics> | undefined
try {
heapSpaces = getHeapSpaceStatistics()
} catch {
/* Bun / older Node — ignore */
/* noop */
}
const activeHandles = processInternals._getActiveHandles().length
const activeRequests = processInternals._getActiveRequests().length
let openFileDescriptors: number | undefined
try {
openFileDescriptors = (await readdir('/proc/self/fd')).length
} catch {
/* non-Linux */
const internals = process as unknown as {
_getActiveHandles: () => unknown[]
_getActiveRequests: () => unknown[]
}
let smapsRollup: string | undefined
try {
smapsRollup = await readFile('/proc/self/smaps_rollup', 'utf8')
} catch {
/* non-Linux / no access */
}
const activeHandles = internals._getActiveHandles().length
const activeRequests = internals._getActiveRequests().length
const openFileDescriptors = await swallow(async () => (await readdir('/proc/self/fd')).length)
const smapsRollup = await swallow(() => readFile('/proc/self/smaps_rollup', 'utf8'))
const nativeMemory = usage.rss - usage.heapUsed
const bytesPerSecond = uptimeSeconds > 0 ? usage.rss / uptimeSeconds : 0
const mbPerHour = (bytesPerSecond * 3600) / (1024 * 1024)
const potentialLeaks: string[] = []
if (heapStats.number_of_detached_contexts > 0) {
potentialLeaks.push(
`${heapStats.number_of_detached_contexts} detached context(s) — possible component/closure leak`
)
}
if (activeHandles > 100) {
potentialLeaks.push(`${activeHandles} active handles — possible timer/socket leak`)
}
if (nativeMemory > usage.heapUsed) {
potentialLeaks.push('Native memory > heap — leak may be in native addons')
}
if (mbPerHour > 100) {
potentialLeaks.push(`High memory growth rate: ${mbPerHour.toFixed(1)} MB/hour`)
}
if (openFileDescriptors && openFileDescriptors > 500) {
potentialLeaks.push(`${openFileDescriptors} open FDs — possible file/socket leak`)
}
const potentialLeaks = [
heapStats.number_of_detached_contexts > 0 &&
`${heapStats.number_of_detached_contexts} detached context(s) — possible component/closure leak`,
activeHandles > 100 && `${activeHandles} active handles — possible timer/socket leak`,
nativeMemory > usage.heapUsed && 'Native memory > heap — leak may be in native addons',
mbPerHour > 100 && `High memory growth rate: ${mbPerHour.toFixed(1)} MB/hour`,
openFileDescriptors && openFileDescriptors > 500 && `${openFileDescriptors} open FDs — possible file/socket leak`
].filter((s): s is string => typeof s === 'string')
return {
activeHandles,
@ -170,18 +139,19 @@ export async function captureMemoryDiagnostics(trigger: MemoryTrigger): Promise<
export async function performHeapDump(trigger: MemoryTrigger = 'manual'): Promise<HeapDumpResult> {
try {
// Diagnostics first — heap-snapshot serialization can crash on very large
// heaps, and the JSON sidecar is the most actionable artifact if so.
const diagnostics = await captureMemoryDiagnostics(trigger)
const dir = heapDumpRoot()
const dir = process.env.HERMES_HEAPDUMP_DIR?.trim() || join(homedir() || tmpdir(), '.hermes', 'heapdumps')
await mkdir(dir, { recursive: true })
const stamp = new Date().toISOString().replace(/[:.]/g, '-')
const base = `hermes-${stamp}-${process.pid}-${trigger}`
const base = `hermes-${new Date().toISOString().replace(/[:.]/g, '-')}-${process.pid}-${trigger}`
const heapPath = join(dir, `${base}.heapsnapshot`)
const diagPath = join(dir, `${base}.diagnostics.json`)
await writeFile(diagPath, JSON.stringify(diagnostics, null, 2), { mode: 0o600 })
await writeSnapshot(heapPath)
await pipeline(getHeapSnapshot(), createWriteStream(heapPath, { mode: 0o600 }))
return { diagPath, heapPath, success: true }
} catch (e) {
@ -194,15 +164,19 @@ export function formatBytes(bytes: number): string {
return '0B'
}
const units = ['B', 'KB', 'MB', 'GB', 'TB']
const exp = Math.min(units.length - 1, Math.floor(Math.log10(bytes) / 3))
const exp = Math.min(UNITS.length - 1, Math.floor(Math.log10(bytes) / 3))
const value = bytes / 1024 ** exp
return `${value >= 100 ? value.toFixed(0) : value.toFixed(1)}${units[exp]}`
return `${value >= 100 ? value.toFixed(0) : value.toFixed(1)}${UNITS[exp]}`
}
async function writeSnapshot(filepath: string) {
const stream = createWriteStream(filepath, { mode: 0o600 })
const UNITS = ['B', 'KB', 'MB', 'GB', 'TB']
await pipeline(getHeapSnapshot(), stream)
// Returns undefined when the probe isn't available (non-Linux paths, sandboxed FS).
const swallow = async <T>(fn: () => Promise<T>): Promise<T | undefined> => {
try {
return await fn()
} catch {
return undefined
}
}

View file

@ -14,57 +14,37 @@ export interface MemoryMonitorOptions {
intervalMs?: number
onCritical?: (snap: MemorySnapshot, dump: HeapDumpResult | null) => void
onHigh?: (snap: MemorySnapshot, dump: HeapDumpResult | null) => void
onSnapshot?: (snap: MemorySnapshot) => void
}
const GB = 1024 ** 3
const DEFAULTS = {
criticalBytes: 2.5 * GB,
highBytes: 1.5 * GB,
intervalMs: 10_000
}
export function startMemoryMonitor({
criticalBytes = DEFAULTS.criticalBytes,
highBytes = DEFAULTS.highBytes,
intervalMs = DEFAULTS.intervalMs,
criticalBytes = 2.5 * GB,
highBytes = 1.5 * GB,
intervalMs = 10_000,
onCritical,
onHigh,
onSnapshot
onHigh
}: MemoryMonitorOptions = {}): () => void {
let dumpedHigh = false
let dumpedCritical = false
const dumped = new Set<Exclude<MemoryLevel, 'normal'>>()
const tick = async () => {
const { heapUsed, rss } = process.memoryUsage()
const level: MemoryLevel = heapUsed >= criticalBytes ? 'critical' : heapUsed >= highBytes ? 'high' : 'normal'
const snap: MemorySnapshot = { heapUsed, level, rss }
onSnapshot?.(snap)
if (level === 'normal') {
dumpedHigh = false
dumpedCritical = false
return void dumped.clear()
}
if (dumped.has(level)) {
return
}
if (level === 'high' && !dumpedHigh) {
dumpedHigh = true
const dump = await performHeapDump('auto-high').catch(() => null)
dumped.add(level)
const dump = await performHeapDump(level === 'critical' ? 'auto-critical' : 'auto-high').catch(() => null)
onHigh?.(snap, dump)
const snap: MemorySnapshot = { heapUsed, level, rss }
return
}
if (level === 'critical' && !dumpedCritical) {
dumpedCritical = true
const dump = await performHeapDump('auto-critical').catch(() => null)
onCritical?.(snap, dump)
}
;(level === 'critical' ? onCritical : onHigh)?.(snap, dump)
}
const handle = setInterval(() => void tick(), intervalMs)