import { useEffect, useRef, useState } from "react"; import { Brain, Eye, Gauge, Lightbulb, Wrench, Loader2, } from "lucide-react"; import { api } from "@/lib/api"; import type { ModelInfoResponse } from "@/lib/api"; import { formatTokenCount } from "@/lib/format"; interface ModelInfoCardProps { /** Current model string from config state — used to detect changes */ currentModel: string; /** Bumped after config saves to trigger re-fetch */ refreshKey?: number; } export function ModelInfoCard({ currentModel, refreshKey = 0 }: ModelInfoCardProps) { const [info, setInfo] = useState(null); const [loading, setLoading] = useState(false); const lastFetchKeyRef = useRef(""); useEffect(() => { if (!currentModel) return; // Re-fetch when model changes OR when refreshKey bumps (after save) const fetchKey = `${currentModel}:${refreshKey}`; if (fetchKey === lastFetchKeyRef.current) return; lastFetchKeyRef.current = fetchKey; setLoading(true); api .getModelInfo() .then(setInfo) .catch(() => setInfo(null)) .finally(() => setLoading(false)); }, [currentModel, refreshKey]); if (loading) { return (
Loading model info…
); } if (!info || !info.model || info.effective_context_length <= 0) return null; const caps = info.capabilities; const hasCaps = caps && Object.keys(caps).length > 0; return (
{/* Context window */}
Context Window
{formatTokenCount(info.effective_context_length)} {info.config_context_length > 0 ? ( (override — auto: {formatTokenCount(info.auto_context_length)}) ) : ( auto-detected )}
{/* Max output */} {hasCaps && caps.max_output_tokens && caps.max_output_tokens > 0 && (
Max Output
{formatTokenCount(caps.max_output_tokens)}
)} {/* Capability badges */} {hasCaps && (
{caps.supports_tools && ( Tools )} {caps.supports_vision && ( Vision )} {caps.supports_reasoning && ( Reasoning )} {caps.model_family && ( {caps.model_family} )}
)}
); }