Skip to content

Commit 1ad51e6

Browse files
authored
Various improvements to chat interface (#18)
* Fix overwriting final round text * Separate reasoning blocks * Fix double spacing * Separate ContentRound component * Vertical border per-round * Display artifacts within rounds * Don't jump to bottom at end * Fix virtualizer pushing scroll down * Remove tool call indicator * Add compact mode * Show rounds on hover * Review fixes * Storybook fixes * Better constructs for rounds * Show thinking / processing interstitials between rounds * Higher-level management of streaming status indications * Chat UI details as two buttons * Make thinking indicator nicer * Fix missing reasoning indicator * Update compact UI * Fix storybook * Review fixes * Add ContentRound story
1 parent e92c8fc commit 1ad51e6

18 files changed

Lines changed: 1187 additions & 728 deletions

ui/src/components/ChatMessageList/ChatMessageList.tsx

Lines changed: 81 additions & 93 deletions
Original file line numberDiff line numberDiff line change
@@ -281,6 +281,11 @@ export function ChatMessageList({
281281
enabled: messageGroups.length > 0,
282282
});
283283

284+
// Don't adjust scroll position when the actively-streaming item grows —
285+
// the default correction pushes the user further down on every token.
286+
virtualizer.shouldAdjustScrollPositionOnItemSizeChange = (item, _delta, instance) =>
287+
!(item.index === instance.options.count - 1 && hasStreamingResponses);
288+
284289
// Track message count to detect new user messages
285290
const prevMessagesLengthRef = useRef(messages.length);
286291

@@ -347,15 +352,32 @@ export function ChatMessageList({
347352
<div
348353
className="relative"
349354
style={{
350-
// Use max of virtualizer size and estimated size to prevent layout jumps
351-
height:
352-
Math.max(virtualizer.getTotalSize(), messageGroups.length * 200) +
353-
(hasStreamingResponses ? 200 : 0),
355+
height: Math.max(virtualizer.getTotalSize(), messageGroups.length * 200),
354356
}}
355357
>
356-
{/* Virtualized message groups */}
357358
{virtualizer.getVirtualItems().map((virtualItem) => {
358359
const group = messageGroups[virtualItem.index];
360+
const isLastGroup = virtualItem.index === messageGroups.length - 1;
361+
const activeStreamingIds =
362+
isLastGroup && hasStreamingResponses
363+
? new Set(filteredModelResponses.map((r) => r.instanceId ?? r.model))
364+
: null;
365+
const committedInstanceIds = new Set(
366+
group.assistantResponses
367+
.filter((r) => !activeStreamingIds?.has(r.instanceId ?? r.model ?? ""))
368+
.map((r) => r.instanceId ?? r.model ?? "")
369+
);
370+
const showStreaming =
371+
isLastGroup &&
372+
hasStreamingResponses &&
373+
filteredModelResponses.some(
374+
(r) => !committedInstanceIds.has(r.instanceId ?? r.model)
375+
);
376+
const committedResponses = activeStreamingIds
377+
? group.assistantResponses.filter(
378+
(r) => !activeStreamingIds.has(r.instanceId ?? r.model ?? "")
379+
)
380+
: group.assistantResponses;
359381
return (
360382
<div
361383
key={group.id}
@@ -370,7 +392,58 @@ export function ChatMessageList({
370392
onSaveEdit={onEditAndRerun}
371393
onRegenerate={onRegenerateAll}
372394
/>
373-
{group.assistantResponses.length > 0 && (
395+
{showStreaming && (
396+
<>
397+
<RoutingDecision />
398+
<ChainProgress
399+
models={selectedModels.filter((m) => !disabledModels.includes(m))}
400+
/>
401+
<SynthesisProgress
402+
allModels={selectedModels.filter((m) => !disabledModels.includes(m))}
403+
/>
404+
<RefinementProgress />
405+
<CritiqueProgress />
406+
<ElectedProgress
407+
allModels={selectedModels.filter((m) => !disabledModels.includes(m))}
408+
/>
409+
<TournamentProgress
410+
allModels={selectedModels.filter((m) => !disabledModels.includes(m))}
411+
/>
412+
<ConsensusProgress
413+
allModels={selectedModels.filter((m) => !disabledModels.includes(m))}
414+
/>
415+
<DebateProgress
416+
allModels={selectedModels.filter((m) => !disabledModels.includes(m))}
417+
/>
418+
<CouncilProgress
419+
allModels={selectedModels.filter((m) => !disabledModels.includes(m))}
420+
/>
421+
<HierarchicalProgress />
422+
<ScattershotProgress />
423+
<ExplainerProgress />
424+
<ConfidenceProgress
425+
allModels={selectedModels.filter((m) => !disabledModels.includes(m))}
426+
/>
427+
<div
428+
key={streamingSessionIdRef.current}
429+
className="animate-slide-up-bounce"
430+
>
431+
<MultiModelResponse
432+
responses={filteredModelResponses.map((r) => {
433+
const instanceId = r.instanceId ?? r.model;
434+
return {
435+
...r,
436+
instanceId,
437+
label: instanceLabels.get(instanceId),
438+
};
439+
})}
440+
timestamp={streamingTimestampRef.current}
441+
actionConfig={actionConfig}
442+
/>
443+
</div>
444+
</>
445+
)}
446+
{committedResponses.length > 0 && (
374447
<>
375448
{/* Show persisted mode indicators for chained/routed messages */}
376449
{group.assistantResponses[0].modeMetadata?.mode === "routed" && (
@@ -543,7 +616,7 @@ export function ChatMessageList({
543616
</div>
544617
)}
545618
<MultiModelResponse
546-
responses={group.assistantResponses.map((m) => {
619+
responses={committedResponses.map((m) => {
547620
// Use instanceId if set, otherwise fall back to model for backwards compat
548621
const instanceId = m.instanceId ?? m.model ?? "unknown";
549622
return {
@@ -560,6 +633,7 @@ export function ChatMessageList({
560633
citations: m.citations,
561634
artifacts: m.artifacts,
562635
toolExecutionRounds: m.toolExecutionRounds,
636+
completedRounds: m.completedRounds,
563637
debugMessageId: m.debugMessageId,
564638
};
565639
})}
@@ -587,92 +661,6 @@ export function ChatMessageList({
587661
</div>
588662
);
589663
})}
590-
591-
{/*
592-
STREAMING SECTION - Outside Virtualization
593-
594-
Active streaming responses render here, positioned absolutely at the bottom.
595-
This is intentionally outside the virtualized list because:
596-
1. Streaming content height changes constantly (every token)
597-
2. Virtualization re-measures heights, which would cause jank
598-
3. The streaming section should always be visible (no virtualization cutoff)
599-
600-
The key={streamingSessionIdRef.current} ensures animation only plays once
601-
per streaming session, not on every content update.
602-
*/}
603-
{/* Show streaming section when we have streaming responses */}
604-
{hasStreamingResponses && (
605-
<div
606-
className="absolute left-0 right-0"
607-
style={{
608-
// Use virtualizer total size, with fallback to estimated size for unmeasured groups
609-
transform: `translateY(${Math.max(virtualizer.getTotalSize(), messageGroups.length * 200)}px)`,
610-
}}
611-
>
612-
{/* Routing decision indicator for routed mode */}
613-
<RoutingDecision />
614-
{/* Chain progress indicator for chained mode */}
615-
<ChainProgress
616-
models={selectedModels.filter((m) => !disabledModels.includes(m))}
617-
/>
618-
{/* Synthesis progress indicator for synthesized mode */}
619-
<SynthesisProgress
620-
allModels={selectedModels.filter((m) => !disabledModels.includes(m))}
621-
/>
622-
{/* Refinement progress indicator for refined mode */}
623-
<RefinementProgress />
624-
{/* Critique progress indicator for critiqued mode */}
625-
<CritiqueProgress />
626-
{/* Election progress indicator for elected mode */}
627-
<ElectedProgress
628-
allModels={selectedModels.filter((m) => !disabledModels.includes(m))}
629-
/>
630-
{/* Tournament progress indicator for tournament mode */}
631-
<TournamentProgress
632-
allModels={selectedModels.filter((m) => !disabledModels.includes(m))}
633-
/>
634-
{/* Consensus progress indicator for consensus mode */}
635-
<ConsensusProgress
636-
allModels={selectedModels.filter((m) => !disabledModels.includes(m))}
637-
/>
638-
{/* Debate progress indicator for debated mode */}
639-
<DebateProgress
640-
allModels={selectedModels.filter((m) => !disabledModels.includes(m))}
641-
/>
642-
{/* Council progress indicator for council mode */}
643-
<CouncilProgress
644-
allModels={selectedModels.filter((m) => !disabledModels.includes(m))}
645-
/>
646-
{/* Hierarchical progress indicator for hierarchical mode */}
647-
<HierarchicalProgress />
648-
{/* Scattershot progress indicator for scattershot mode */}
649-
<ScattershotProgress />
650-
{/* Explainer progress indicator for explainer mode */}
651-
<ExplainerProgress />
652-
{/* Confidence-weighted progress indicator for confidence-weighted mode */}
653-
<ConfidenceProgress
654-
allModels={selectedModels.filter((m) => !disabledModels.includes(m))}
655-
/>
656-
{/* Key ensures animation only plays once per streaming session */}
657-
{hasStreamingResponses && (
658-
<div key={streamingSessionIdRef.current} className="animate-slide-up-bounce">
659-
<MultiModelResponse
660-
responses={filteredModelResponses.map((r) => {
661-
// Use instanceId if set, otherwise fall back to model
662-
const instanceId = r.instanceId ?? r.model;
663-
return {
664-
...r,
665-
instanceId,
666-
label: instanceLabels.get(instanceId),
667-
};
668-
})}
669-
timestamp={streamingTimestampRef.current}
670-
actionConfig={actionConfig}
671-
/>
672-
</div>
673-
)}
674-
</div>
675-
)}
676664
</div>
677665
)}
678666
</div>

ui/src/components/ChatView/ChatView.stories.tsx

Lines changed: 21 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -100,23 +100,27 @@ const meta: Meta<typeof ChatView> = {
100100
},
101101
},
102102
decorators: [
103-
(Story) => (
104-
<QueryClientProvider client={queryClient}>
105-
<ConfigProvider>
106-
<AuthProvider>
107-
<PreferencesProvider>
108-
<ToastProvider>
109-
<TooltipProvider>
110-
<div className="h-screen">
111-
<Story />
112-
</div>
113-
</TooltipProvider>
114-
</ToastProvider>
115-
</PreferencesProvider>
116-
</AuthProvider>
117-
</ConfigProvider>
118-
</QueryClientProvider>
119-
),
103+
(Story) => {
104+
// Show reasoning & tools in tests
105+
useChatUIStore.setState({ compactMode: false });
106+
return (
107+
<QueryClientProvider client={queryClient}>
108+
<ConfigProvider>
109+
<AuthProvider>
110+
<PreferencesProvider>
111+
<ToastProvider>
112+
<TooltipProvider>
113+
<div className="h-screen">
114+
<Story />
115+
</div>
116+
</TooltipProvider>
117+
</ToastProvider>
118+
</PreferencesProvider>
119+
</AuthProvider>
120+
</ConfigProvider>
121+
</QueryClientProvider>
122+
);
123+
},
120124
],
121125
};
122126

0 commit comments

Comments
 (0)