Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 8 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,14 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
## [Unreleased]

### Fixed
- **Agent Chat: Vercel SSE Data Stream support** — The agent chat endpoint
(`/api/v1/ai/agents/:agentName/chat`) now returns Vercel AI SDK v6 UI Message Stream Protocol
(SSE) by default, matching the general chat endpoint behaviour. Previously, the agent chat route
only returned plain JSON, causing `DefaultChatTransport` (used by `@ai-sdk/react` `useChat`) to
fail silently — the API responded correctly but the Studio AI Chat Panel rendered no content.
The endpoint now uses `streamChatWithTools` + `encodeVercelDataStream` for `stream !== false`
requests (the default), and falls back to JSON only when `stream: false` is explicitly set.
Studio's error UI is also enhanced to surface SSE parse failures clearly instead of silent failure.
- **Agent Chat: Vercel AI SDK v6 `parts` format support** — The agent chat endpoint
(`/api/v1/ai/agents/:agentName/chat`) now accepts Vercel AI SDK v6 `parts`-based message
format in addition to the legacy `content` string format. Previously, sending messages
Expand Down
16 changes: 14 additions & 2 deletions apps/studio/src/components/AiChatPanel.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -519,8 +519,20 @@ export function AiChatPanel() {
</div>
)}
{error && (
<div className="rounded-lg border border-destructive/50 bg-destructive/10 px-3 py-2 text-sm text-destructive">
Error: {error.message || 'Something went wrong'}
<div className="flex items-start gap-2 rounded-lg border border-destructive/50 bg-destructive/10 px-3 py-2 text-sm text-destructive">
<ShieldAlert className="mt-0.5 h-4 w-4 shrink-0" />
<div>
<p className="font-medium">Chat Error</p>
<p className="mt-0.5 text-xs opacity-80">
{error.message || 'Something went wrong'}
</p>
{error.message && /unexpected|json|parse|stream/i.test(error.message) && (
<p className="mt-1 text-xs opacity-70">
The server may not be returning the expected Vercel AI Data Stream format.
Ensure the backend endpoint supports SSE streaming.
</p>
)}
</div>
</div>
)}
</div>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -807,13 +807,14 @@ describe('Agent Routes', () => {
expect((resp.body as any).error).toContain('not active');
});

it('should return 200 with agent response for valid request', async () => {
it('should return 200 with agent response for valid request (stream=false)', async () => {
const chatRoute = routes.find(r => r.method === 'POST')!;
const resp = await chatRoute.handler({
params: { agentName: 'data_chat' },
body: {
messages: [{ role: 'user', content: 'List all tables' }],
context: { objectName: 'account' },
stream: false,
},
});
expect(resp.status).toBe(200);
Expand Down Expand Up @@ -862,6 +863,7 @@ describe('Agent Routes', () => {
params: { agentName: 'data_chat' },
body: {
messages: [{ role: 'user', content: 'test' }],
stream: false,
options: {
tools: [{ name: 'injected_tool', description: 'Evil', parameters: {} }],
toolChoice: 'injected_tool',
Expand All @@ -882,6 +884,7 @@ describe('Agent Routes', () => {
const resp = await chatRoute.handler({
params: { agentName: 'data_chat' },
body: {
stream: false,
messages: [
{
role: 'user',
Expand All @@ -899,6 +902,7 @@ describe('Agent Routes', () => {
const resp = await chatRoute.handler({
params: { agentName: 'data_chat' },
body: {
stream: false,
messages: [
{ role: 'user', content: 'Hello' },
{
Expand All @@ -920,6 +924,7 @@ describe('Agent Routes', () => {
const resp = await chatRoute.handler({
params: { agentName: 'data_chat' },
body: {
stream: false,
messages: [
{
role: 'assistant',
Expand All @@ -946,6 +951,66 @@ describe('Agent Routes', () => {
expect(resp.status).toBe(400);
expect((resp.body as any).error).toContain('content');
});

// ── Vercel Data Stream Protocol (SSE) ──

it('should default to Vercel Data Stream mode when stream is not specified', async () => {
const chatRoute = routes.find(r => r.method === 'POST')!;
const resp = await chatRoute.handler({
params: { agentName: 'data_chat' },
body: {
messages: [{ role: 'user', content: 'List all tables' }],
},
});
expect(resp.status).toBe(200);
expect(resp.stream).toBe(true);
expect(resp.vercelDataStream).toBe(true);
expect(resp.events).toBeDefined();

// Consume the Vercel Data Stream events
const events: unknown[] = [];
for await (const event of resp.events!) {
events.push(event);
}
expect(events.length).toBeGreaterThan(0);
// Must contain standard SSE lifecycle events
const eventsStr = events.join('');
expect(eventsStr).toContain('"type":"start"');
expect(eventsStr).toContain('"type":"text-delta"');
expect(eventsStr).toContain('"type":"finish"');
expect(eventsStr).toContain('data: [DONE]');
});

it('should return Vercel Data Stream when stream=true explicitly', async () => {
const chatRoute = routes.find(r => r.method === 'POST')!;
const resp = await chatRoute.handler({
params: { agentName: 'data_chat' },
body: {
messages: [{ role: 'user', content: 'Hello agent' }],
stream: true,
},
});
expect(resp.status).toBe(200);
expect(resp.stream).toBe(true);
expect(resp.vercelDataStream).toBe(true);
expect(resp.events).toBeDefined();
});

it('should return JSON when stream=false', async () => {
const chatRoute = routes.find(r => r.method === 'POST')!;
const resp = await chatRoute.handler({
params: { agentName: 'data_chat' },
body: {
messages: [{ role: 'user', content: 'Hello agent' }],
stream: false,
},
});
expect(resp.status).toBe(200);
expect(resp.stream).toBeUndefined();
expect(resp.vercelDataStream).toBeUndefined();
expect(resp.body).toBeDefined();
expect((resp.body as any).content).toBeDefined();
});
});

// ═══════════════════════════════════════════════════════════════════
Expand Down
42 changes: 37 additions & 5 deletions packages/services/service-ai/src/routes/agent-routes.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ import type { AIService } from '../ai-service.js';
import type { AgentRuntime, AgentChatContext } from '../agent-runtime.js';
import type { RouteDefinition } from './ai-routes.js';
import { normalizeMessage, validateMessageContent } from './message-utils.js';
import { encodeVercelDataStream } from '../stream/vercel-stream-encoder.js';

/**
* Allowed message roles for the agent chat endpoint.
Expand Down Expand Up @@ -68,10 +69,15 @@ export function buildAgentRoutes(
},

// ── Chat with a specific agent ──────────────────────────────
//
// Dual-mode endpoint matching the general chat route behaviour:
// • `stream !== false` → Vercel Data Stream Protocol (SSE)
// • `stream === false` → JSON response (legacy)
//
{
method: 'POST',
path: '/api/v1/ai/agents/:agentName/chat',
description: 'Chat with a specific AI agent',
description: 'Chat with a specific AI agent (supports Vercel AI Data Stream Protocol)',
Comment on lines +72 to +80
Copy link

Copilot AI Apr 8, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The endpoint comments/description call this the “Vercel Data Stream Protocol (SSE)”, but the encoder being used (stream/vercel-stream-encoder.ts) explicitly implements the Vercel AI SDK v6 UI Message Stream Protocol (SSE data: frames with JSON payloads). Please align the wording here (and/or naming like vercelDataStream) to the actual wire protocol to avoid confusion when debugging transports.

Copilot uses AI. Check for mistakes.
auth: true,
permissions: ['ai:chat', 'ai:agents'],
handler: async (req) => {
Expand All @@ -81,11 +87,12 @@ export function buildAgentRoutes(
}

// Parse request body
const body = (req.body ?? {}) as Record<string, unknown>;
const {
messages: rawMessages,
context: chatContext,
options: extraOptions,
} = (req.body ?? {}) as {
} = body as {
messages?: unknown[];
context?: AgentChatContext;
options?: Record<string, unknown>;
Expand Down Expand Up @@ -138,12 +145,37 @@ export function buildAgentRoutes(
...rawMessages.map(m => normalizeMessage(m as Record<string, unknown>)),
];

// Use chatWithTools for automatic tool resolution
const result = await aiService.chatWithTools(fullMessages, {
const chatWithToolsOptions = {
...mergedOptions,
maxIterations: agent.planning?.maxIterations,
});
};

// ── Choose response mode ─────────────────────────────
const wantStream = body.stream !== false;

if (wantStream) {
// Vercel Data Stream Protocol (SSE) — matches general chat behaviour
if (!aiService.streamChatWithTools) {
return { status: 501, body: { error: 'Streaming is not supported by the configured AI service' } };
}
const events = aiService.streamChatWithTools(fullMessages, chatWithToolsOptions);
return {
status: 200,
stream: true,
vercelDataStream: true,
contentType: 'text/event-stream',
headers: {
'Content-Type': 'text/event-stream',
'Cache-Control': 'no-cache',
'Connection': 'keep-alive',
'x-vercel-ai-ui-message-stream': 'v1',
},
events: encodeVercelDataStream(events),
Comment on lines +162 to +173
Copy link

Copilot AI Apr 8, 2026

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The streaming response is returning contentType and headers, but RouteResponse (from routes/ai-routes.ts) doesn’t define those fields and the runtime HttpDispatcher.handleAI() streaming path ignores route-provided headers/contentType entirely (it constructs its own headers from vercelDataStream). Either remove these fields here to avoid dead/misleading data, or extend RouteResponse + update HttpDispatcher.handleAI() to propagate streaming headers (e.g., the x-vercel-ai-ui-message-stream header) when present.

Copilot uses AI. Check for mistakes.
};
}

// JSON response (non-streaming / legacy)
const result = await aiService.chatWithTools(fullMessages, chatWithToolsOptions);
return { status: 200, body: result };
} catch (err) {
logger.error(
Expand Down
Loading