Skip to content

Commit c9ff513

Browse files
authored
Merge pull request #1098 from objectstack-ai/claude/improve-ai-chat-thinking-phase
Fix AI Chat Reasoning Display with Vercel AI SDK Data Stream Protocol
2 parents 83fb82e + cb04ed8 commit c9ff513

File tree

3 files changed

+244
-4
lines changed

3 files changed

+244
-4
lines changed

apps/studio/src/components/AiChatPanel.tsx

Lines changed: 173 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@ import type { UIMessage } from 'ai';
77
import {
88
Bot, X, Send, Trash2, Sparkles,
99
Wrench, CheckCircle2, XCircle, Loader2, ShieldAlert,
10+
ChevronDown, ChevronRight, Brain, Zap,
1011
} from 'lucide-react';
1112
import { Button } from '@/components/ui/button';
1213
import { ScrollArea } from '@/components/ui/scroll-area';
@@ -33,6 +34,15 @@ interface AgentSummary {
3334
role: string;
3435
}
3536

37+
/**
38+
* Track active thinking/reasoning state during streaming.
39+
*/
40+
interface ThinkingState {
41+
reasoning: string[];
42+
activeSteps: Map<string, { stepName: string; startedAt: number }>;
43+
completedSteps: string[];
44+
}
45+
3646
/**
3747
* Extract the text content from a UIMessage's parts array.
3848
*/
@@ -160,6 +170,88 @@ function useAgentList(baseUrl: string) {
160170

161171
// ── Tool Invocation State Labels ────────────────────────────────────
162172

173+
/**
174+
* Display reasoning/thinking information in a collapsible section.
175+
*/
176+
interface ReasoningDisplayProps {
177+
reasoning: string[];
178+
}
179+
180+
function ReasoningDisplay({ reasoning }: ReasoningDisplayProps) {
181+
const [isExpanded, setIsExpanded] = useState(false);
182+
183+
if (reasoning.length === 0) return null;
184+
185+
return (
186+
<div
187+
data-testid="reasoning-display"
188+
className="flex flex-col gap-1 rounded-md border border-border/30 bg-muted/30 px-2.5 py-2 text-xs"
189+
>
190+
<button
191+
onClick={() => setIsExpanded(!isExpanded)}
192+
className="flex items-center gap-1.5 text-left text-muted-foreground hover:text-foreground transition-colors"
193+
>
194+
{isExpanded ? (
195+
<ChevronDown className="h-3 w-3 shrink-0" />
196+
) : (
197+
<ChevronRight className="h-3 w-3 shrink-0" />
198+
)}
199+
<Brain className="h-3 w-3 shrink-0" />
200+
<span className="font-medium">Thinking</span>
201+
<span className="text-[10px] opacity-60">
202+
({reasoning.length} step{reasoning.length !== 1 ? 's' : ''})
203+
</span>
204+
</button>
205+
{isExpanded && (
206+
<div className="mt-1 space-y-1 pl-5 text-muted-foreground italic border-l-2 border-border/30">
207+
{reasoning.map((step, idx) => (
208+
<p key={idx} className="text-[11px] leading-relaxed">
209+
{step}
210+
</p>
211+
))}
212+
</div>
213+
)}
214+
</div>
215+
);
216+
}
217+
218+
/**
219+
* Display active step progress indicators.
220+
*/
221+
interface StepProgressProps {
222+
activeSteps: Map<string, { stepName: string; startedAt: number }>;
223+
completedSteps: string[];
224+
}
225+
226+
function StepProgress({ activeSteps, completedSteps }: StepProgressProps) {
227+
if (activeSteps.size === 0) return null;
228+
229+
const totalSteps = completedSteps.length + activeSteps.size;
230+
const currentStep = completedSteps.length + 1;
231+
232+
return (
233+
<div
234+
data-testid="step-progress"
235+
className="flex flex-col gap-1.5 rounded-md border border-blue-500/30 bg-blue-500/5 px-2.5 py-2 text-xs"
236+
>
237+
<div className="flex items-center gap-2">
238+
<Zap className="h-3 w-3 shrink-0 text-blue-600 dark:text-blue-400" />
239+
<span className="font-medium text-blue-700 dark:text-blue-300">
240+
Step {currentStep} of {totalSteps}
241+
</span>
242+
</div>
243+
{Array.from(activeSteps.values()).map((step, idx) => (
244+
<div key={idx} className="flex items-center gap-2 pl-5">
245+
<Loader2 className="h-3 w-3 shrink-0 animate-spin text-blue-600 dark:text-blue-400" />
246+
<span className="text-blue-700 dark:text-blue-300">{step.stepName}</span>
247+
</div>
248+
))}
249+
</div>
250+
);
251+
}
252+
253+
// ── Tool Invocation State Labels ────────────────────────────────────
254+
163255
interface ToolInvocationDisplayProps {
164256
part: Extract<UIMessage['parts'][number], { type: 'dynamic-tool' }>;
165257
onApprove?: (approvalId: string) => void;
@@ -175,6 +267,21 @@ function ToolInvocationDisplay({ part, onApprove, onDeny }: ToolInvocationDispla
175267

176268
switch (part.state) {
177269
case 'input-streaming':
270+
return (
271+
<div
272+
data-testid="tool-invocation-planning"
273+
className="flex items-start gap-2 rounded-md border border-blue-500/40 bg-blue-500/10 px-2.5 py-2 text-xs"
274+
>
275+
<Loader2 className="mt-0.5 h-3.5 w-3.5 shrink-0 animate-spin text-blue-600 dark:text-blue-400" />
276+
<div className="min-w-0">
277+
<span className="font-medium text-blue-700 dark:text-blue-300">Planning to call {toolLabel}</span>
278+
{argsText && (
279+
<p className="mt-0.5 truncate text-blue-600/80 dark:text-blue-300/80">{argsText}</p>
280+
)}
281+
</div>
282+
</div>
283+
);
284+
178285
case 'input-available':
179286
return (
180287
<div
@@ -289,6 +396,11 @@ export function AiChatPanel() {
289396
const { isOpen, setOpen, toggle } = useAiChatPanel();
290397
const [input, setInput] = useState('');
291398
const [selectedAgent, setSelectedAgent] = useState<string>(loadSelectedAgent);
399+
const [thinkingState, setThinkingState] = useState<ThinkingState>({
400+
reasoning: [],
401+
activeSteps: new Map(),
402+
completedSteps: [],
403+
});
292404
const scrollRef = useRef<HTMLDivElement>(null);
293405
const inputRef = useRef<HTMLTextAreaElement>(null);
294406
const baseUrl = getApiBaseUrl();
@@ -316,10 +428,47 @@ export function AiChatPanel() {
316428
const { messages, sendMessage, setMessages, status, error, addToolApprovalResponse } = useChat({
317429
transport,
318430
messages: initialMessages,
431+
onFinish: () => {
432+
// Reset thinking state when stream completes
433+
setThinkingState({
434+
reasoning: [],
435+
activeSteps: new Map(),
436+
completedSteps: [],
437+
});
438+
},
319439
});
320440

321441
const isStreaming = status === 'streaming' || status === 'submitted';
322442

443+
// Extract reasoning and step progress from the latest assistant message parts
444+
useEffect(() => {
445+
if (!isStreaming || messages.length === 0) return;
446+
447+
// Get the latest message
448+
const lastMessage = messages[messages.length - 1];
449+
if (lastMessage.role !== 'assistant') return;
450+
451+
// Process message parts for reasoning and steps
452+
const reasoning: string[] = [];
453+
const activeSteps = new Map<string, { stepName: string; startedAt: number }>();
454+
const completedSteps: string[] = [];
455+
456+
(lastMessage.parts || []).forEach((part: any) => {
457+
if (part.type === 'reasoning-delta' || part.type === 'reasoning') {
458+
reasoning.push(part.text);
459+
} else if (part.type === 'step-start') {
460+
activeSteps.set(part.stepId, {
461+
stepName: part.stepName,
462+
startedAt: Date.now(),
463+
});
464+
} else if (part.type === 'step-finish') {
465+
completedSteps.push(part.stepName);
466+
}
467+
});
468+
469+
setThinkingState({ reasoning, activeSteps, completedSteps });
470+
}, [messages, isStreaming]);
471+
323472
// Persist messages to localStorage whenever they change
324473
useEffect(() => {
325474
if (messages.length > 0) {
@@ -513,10 +662,30 @@ export function AiChatPanel() {
513662
);
514663
})}
515664
{isStreaming && (
516-
<div className="mr-8 flex items-center gap-2 rounded-lg bg-muted px-3 py-2 text-sm text-muted-foreground">
517-
<span className="inline-block h-2 w-2 animate-pulse rounded-full bg-primary" />
518-
Thinking…
519-
</div>
665+
<>
666+
{/* Show reasoning if available */}
667+
{thinkingState.reasoning.length > 0 && (
668+
<div className="mr-8">
669+
<ReasoningDisplay reasoning={thinkingState.reasoning} />
670+
</div>
671+
)}
672+
{/* Show step progress if available */}
673+
{thinkingState.activeSteps.size > 0 && (
674+
<div className="mr-8">
675+
<StepProgress
676+
activeSteps={thinkingState.activeSteps}
677+
completedSteps={thinkingState.completedSteps}
678+
/>
679+
</div>
680+
)}
681+
{/* Default thinking indicator when no detailed state available */}
682+
{thinkingState.reasoning.length === 0 && thinkingState.activeSteps.size === 0 && (
683+
<div className="mr-8 flex items-center gap-2 rounded-lg bg-muted px-3 py-2 text-sm text-muted-foreground">
684+
<span className="inline-block h-2 w-2 animate-pulse rounded-full bg-primary" />
685+
Thinking…
686+
</div>
687+
)}
688+
</>
520689
)}
521690
{error && (
522691
<div className="flex items-start gap-2 rounded-lg border border-destructive/50 bg-destructive/10 px-3 py-2 text-sm text-destructive">

packages/services/service-ai/src/__tests__/vercel-stream-encoder.test.ts

Lines changed: 47 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -128,6 +128,53 @@ describe('encodeStreamPart', () => {
128128
const part = { type: 'unknown-internal' } as unknown as TextStreamPart<ToolSet>;
129129
expect(encodeStreamPart(part)).toBe('');
130130
});
131+
132+
it('should encode reasoning-start part with g: prefix', () => {
133+
const part = {
134+
type: 'reasoning-start',
135+
id: 'r1',
136+
} as unknown as TextStreamPart<ToolSet>;
137+
138+
const frame = encodeStreamPart(part);
139+
expect(frame).toBe('g:{"text":""}\n');
140+
});
141+
142+
it('should encode reasoning-delta part with g: prefix', () => {
143+
const part = {
144+
type: 'reasoning-delta',
145+
id: 'r1',
146+
text: 'Let me think through this step by step...',
147+
} as unknown as TextStreamPart<ToolSet>;
148+
149+
const frame = encodeStreamPart(part);
150+
expect(frame).toBe('g:{"text":"Let me think through this step by step..."}\n');
151+
});
152+
153+
it('should encode reasoning-end part as empty (no specific end marker)', () => {
154+
const part = {
155+
type: 'reasoning-end',
156+
id: 'r1',
157+
} as unknown as TextStreamPart<ToolSet>;
158+
159+
const frame = encodeStreamPart(part);
160+
expect(frame).toBe('');
161+
});
162+
163+
it('should pass through custom step events', () => {
164+
const part = {
165+
type: 'step-start',
166+
stepId: 'step_1',
167+
stepName: 'Query database',
168+
} as unknown as TextStreamPart<ToolSet>;
169+
170+
const frame = encodeStreamPart(part);
171+
const payload = parseSSE(frame);
172+
expect(payload).toEqual({
173+
type: 'step-start',
174+
stepId: 'step_1',
175+
stepName: 'Query database',
176+
});
177+
});
131178
});
132179

133180
// ─────────────────────────────────────────────────────────────────

packages/services/service-ai/src/stream/vercel-stream-encoder.ts

Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,14 @@ function sse(data: object): string {
2323
return `data: ${JSON.stringify(data)}\n\n`;
2424
}
2525

26+
/**
27+
* Encode data using Vercel AI SDK Data Stream Protocol prefixes.
28+
* @see https://ai-sdk.dev/docs/ai-sdk-ui/stream-protocol
29+
*/
30+
function dataStreamLine(prefix: string, data: object): string {
31+
return `${prefix}:${JSON.stringify(data)}\n`;
32+
}
33+
2634
// ── Public API ──────────────────────────────────────────────────────
2735

2836
/**
@@ -71,8 +79,24 @@ export function encodeStreamPart(part: TextStreamPart<ToolSet>): string {
7179
errorText: String(part.error),
7280
});
7381

82+
// Handle reasoning/thinking streams (DeepSeek R1, o1-style models)
83+
// Use 'g:' prefix for reasoning content per Vercel AI SDK protocol
84+
case 'reasoning-start':
85+
return dataStreamLine('g', { text: '' });
86+
87+
case 'reasoning-delta':
88+
return dataStreamLine('g', { text: part.text });
89+
90+
case 'reasoning-end':
91+
return ''; // No specific end marker needed for reasoning
92+
7493
// finish-step and finish are handled by the generator, not here
7594
default:
95+
// Pass through any unknown event types that might be custom
96+
// (e.g., step-start, step-finish from custom providers)
97+
if ((part as any).type?.startsWith('step-')) {
98+
return sse(part as any);
99+
}
76100
return '';
77101
}
78102
}

0 commit comments

Comments
 (0)