import type { OverlayModeType } from "./terminal-chat.js"; import type { TerminalHeaderProps } from "./terminal-header.js"; import type { GroupedResponseItem } from "./use-message-grouping.js"; import type { ResponseItem } from "openai/resources/responses/responses.mjs"; import type { FileOpenerScheme } from "src/utils/config.js"; import TerminalChatResponseItem from "./terminal-chat-response-item.js"; import TerminalHeader from "./terminal-header.js"; import { Box, Static } from "ink"; import React, { useMemo } from "react"; // A batch entry can either be a standalone response item or a grouped set of // items (e.g. auto‑approved tool‑call batches) that should be rendered // together. type BatchEntry = { item?: ResponseItem; group?: GroupedResponseItem }; type TerminalMessageHistoryProps = { batch: Array; groupCounts: Record; items: Array; userMsgCount: number; confirmationPrompt: React.ReactNode; loading: boolean; thinkingSeconds: number; headerProps: TerminalHeaderProps; fullStdout: boolean; setOverlayMode: React.Dispatch>; fileOpener: FileOpenerScheme | undefined; }; const TerminalMessageHistory: React.FC = ({ batch, headerProps, // `loading` and `thinkingSeconds` handled by input component now. loading: _loading, thinkingSeconds: _thinkingSeconds, fullStdout, setOverlayMode, fileOpener, }) => { // Flatten batch entries to response items. const messages = useMemo(() => batch.map(({ item }) => item!), [batch]); return ( {/* The dedicated thinking indicator in the input area now displays the elapsed time, so we no longer render a separate counter here. */} {(item, index) => { if (item === "header") { return ; } // After the guard above, item is a ResponseItem const message = item as ResponseItem; // Suppress empty reasoning updates (i.e. items with an empty summary). const msg = message as unknown as { summary?: Array }; if (msg.summary?.length === 0) { return null; } return ( ); }} ); }; export default React.memo(TerminalMessageHistory);