diff --git a/CHANGELOG.md b/CHANGELOG.md
index 6bb46fd81..ce3c100f0 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -8,6 +8,14 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
## [Unreleased]
### Fixed
+- **Agent Chat: Vercel SSE Data Stream support** — The agent chat endpoint
+ (`/api/v1/ai/agents/:agentName/chat`) now returns Vercel AI SDK v6 UI Message Stream Protocol
+ (SSE) by default, matching the general chat endpoint behaviour. Previously, the agent chat route
+ only returned plain JSON, causing `DefaultChatTransport` (used by `@ai-sdk/react` `useChat`) to
+ fail silently — the API responded correctly but the Studio AI Chat Panel rendered no content.
+ The endpoint now uses `streamChatWithTools` + `encodeVercelDataStream` for `stream !== false`
+ requests (the default), and falls back to JSON only when `stream: false` is explicitly set.
+ Studio's error UI is also enhanced to surface SSE parse failures clearly instead of silent failure.
- **Agent Chat: Vercel AI SDK v6 `parts` format support** — The agent chat endpoint
(`/api/v1/ai/agents/:agentName/chat`) now accepts Vercel AI SDK v6 `parts`-based message
format in addition to the legacy `content` string format. Previously, sending messages
diff --git a/apps/studio/src/components/AiChatPanel.tsx b/apps/studio/src/components/AiChatPanel.tsx
index ced84f16d..b69a68721 100644
--- a/apps/studio/src/components/AiChatPanel.tsx
+++ b/apps/studio/src/components/AiChatPanel.tsx
@@ -519,8 +519,20 @@ export function AiChatPanel() {
)}
{error && (
-
- Error: {error.message || 'Something went wrong'}
+
+
+
+
Chat Error
+
+ {error.message || 'Something went wrong'}
+
+ {error.message && /unexpected|json|parse|stream/i.test(error.message) && (
+
+ The server may not be returning the expected Vercel AI Data Stream format.
+ Ensure the backend endpoint supports SSE streaming.
+
+ )}
+
)}
diff --git a/packages/services/service-ai/src/__tests__/chatbot-features.test.ts b/packages/services/service-ai/src/__tests__/chatbot-features.test.ts
index 7566608c7..ff12501dd 100644
--- a/packages/services/service-ai/src/__tests__/chatbot-features.test.ts
+++ b/packages/services/service-ai/src/__tests__/chatbot-features.test.ts
@@ -807,13 +807,14 @@ describe('Agent Routes', () => {
expect((resp.body as any).error).toContain('not active');
});
- it('should return 200 with agent response for valid request', async () => {
+ it('should return 200 with agent response for valid request (stream=false)', async () => {
const chatRoute = routes.find(r => r.method === 'POST')!;
const resp = await chatRoute.handler({
params: { agentName: 'data_chat' },
body: {
messages: [{ role: 'user', content: 'List all tables' }],
context: { objectName: 'account' },
+ stream: false,
},
});
expect(resp.status).toBe(200);
@@ -862,6 +863,7 @@ describe('Agent Routes', () => {
params: { agentName: 'data_chat' },
body: {
messages: [{ role: 'user', content: 'test' }],
+ stream: false,
options: {
tools: [{ name: 'injected_tool', description: 'Evil', parameters: {} }],
toolChoice: 'injected_tool',
@@ -882,6 +884,7 @@ describe('Agent Routes', () => {
const resp = await chatRoute.handler({
params: { agentName: 'data_chat' },
body: {
+ stream: false,
messages: [
{
role: 'user',
@@ -899,6 +902,7 @@ describe('Agent Routes', () => {
const resp = await chatRoute.handler({
params: { agentName: 'data_chat' },
body: {
+ stream: false,
messages: [
{ role: 'user', content: 'Hello' },
{
@@ -920,6 +924,7 @@ describe('Agent Routes', () => {
const resp = await chatRoute.handler({
params: { agentName: 'data_chat' },
body: {
+ stream: false,
messages: [
{
role: 'assistant',
@@ -946,6 +951,66 @@ describe('Agent Routes', () => {
expect(resp.status).toBe(400);
expect((resp.body as any).error).toContain('content');
});
+
+ // ── Vercel Data Stream Protocol (SSE) ──
+
+ it('should default to Vercel Data Stream mode when stream is not specified', async () => {
+ const chatRoute = routes.find(r => r.method === 'POST')!;
+ const resp = await chatRoute.handler({
+ params: { agentName: 'data_chat' },
+ body: {
+ messages: [{ role: 'user', content: 'List all tables' }],
+ },
+ });
+ expect(resp.status).toBe(200);
+ expect(resp.stream).toBe(true);
+ expect(resp.vercelDataStream).toBe(true);
+ expect(resp.events).toBeDefined();
+
+ // Consume the Vercel Data Stream events
+ const events: unknown[] = [];
+ for await (const event of resp.events!) {
+ events.push(event);
+ }
+ expect(events.length).toBeGreaterThan(0);
+ // Must contain standard SSE lifecycle events
+ const eventsStr = events.join('');
+ expect(eventsStr).toContain('"type":"start"');
+ expect(eventsStr).toContain('"type":"text-delta"');
+ expect(eventsStr).toContain('"type":"finish"');
+ expect(eventsStr).toContain('data: [DONE]');
+ });
+
+ it('should return Vercel Data Stream when stream=true explicitly', async () => {
+ const chatRoute = routes.find(r => r.method === 'POST')!;
+ const resp = await chatRoute.handler({
+ params: { agentName: 'data_chat' },
+ body: {
+ messages: [{ role: 'user', content: 'Hello agent' }],
+ stream: true,
+ },
+ });
+ expect(resp.status).toBe(200);
+ expect(resp.stream).toBe(true);
+ expect(resp.vercelDataStream).toBe(true);
+ expect(resp.events).toBeDefined();
+ });
+
+ it('should return JSON when stream=false', async () => {
+ const chatRoute = routes.find(r => r.method === 'POST')!;
+ const resp = await chatRoute.handler({
+ params: { agentName: 'data_chat' },
+ body: {
+ messages: [{ role: 'user', content: 'Hello agent' }],
+ stream: false,
+ },
+ });
+ expect(resp.status).toBe(200);
+ expect(resp.stream).toBeUndefined();
+ expect(resp.vercelDataStream).toBeUndefined();
+ expect(resp.body).toBeDefined();
+ expect((resp.body as any).content).toBeDefined();
+ });
});
// ═══════════════════════════════════════════════════════════════════
diff --git a/packages/services/service-ai/src/routes/agent-routes.ts b/packages/services/service-ai/src/routes/agent-routes.ts
index a5277d562..5d07c705b 100644
--- a/packages/services/service-ai/src/routes/agent-routes.ts
+++ b/packages/services/service-ai/src/routes/agent-routes.ts
@@ -6,6 +6,7 @@ import type { AIService } from '../ai-service.js';
import type { AgentRuntime, AgentChatContext } from '../agent-runtime.js';
import type { RouteDefinition } from './ai-routes.js';
import { normalizeMessage, validateMessageContent } from './message-utils.js';
+import { encodeVercelDataStream } from '../stream/vercel-stream-encoder.js';
/**
* Allowed message roles for the agent chat endpoint.
@@ -68,10 +69,15 @@ export function buildAgentRoutes(
},
// ── Chat with a specific agent ──────────────────────────────
+ //
+ // Dual-mode endpoint matching the general chat route behaviour:
+ // • `stream !== false` → Vercel Data Stream Protocol (SSE)
+ // • `stream === false` → JSON response (legacy)
+ //
{
method: 'POST',
path: '/api/v1/ai/agents/:agentName/chat',
- description: 'Chat with a specific AI agent',
+ description: 'Chat with a specific AI agent (supports Vercel AI Data Stream Protocol)',
auth: true,
permissions: ['ai:chat', 'ai:agents'],
handler: async (req) => {
@@ -81,11 +87,12 @@ export function buildAgentRoutes(
}
// Parse request body
+ const body = (req.body ?? {}) as Record;
const {
messages: rawMessages,
context: chatContext,
options: extraOptions,
- } = (req.body ?? {}) as {
+ } = body as {
messages?: unknown[];
context?: AgentChatContext;
options?: Record;
@@ -138,12 +145,37 @@ export function buildAgentRoutes(
...rawMessages.map(m => normalizeMessage(m as Record)),
];
- // Use chatWithTools for automatic tool resolution
- const result = await aiService.chatWithTools(fullMessages, {
+ const chatWithToolsOptions = {
...mergedOptions,
maxIterations: agent.planning?.maxIterations,
- });
+ };
+ // ── Choose response mode ─────────────────────────────
+ const wantStream = body.stream !== false;
+
+ if (wantStream) {
+ // Vercel Data Stream Protocol (SSE) — matches general chat behaviour
+ if (!aiService.streamChatWithTools) {
+ return { status: 501, body: { error: 'Streaming is not supported by the configured AI service' } };
+ }
+ const events = aiService.streamChatWithTools(fullMessages, chatWithToolsOptions);
+ return {
+ status: 200,
+ stream: true,
+ vercelDataStream: true,
+ contentType: 'text/event-stream',
+ headers: {
+ 'Content-Type': 'text/event-stream',
+ 'Cache-Control': 'no-cache',
+ 'Connection': 'keep-alive',
+ 'x-vercel-ai-ui-message-stream': 'v1',
+ },
+ events: encodeVercelDataStream(events),
+ };
+ }
+
+ // JSON response (non-streaming / legacy)
+ const result = await aiService.chatWithTools(fullMessages, chatWithToolsOptions);
return { status: 200, body: result };
} catch (err) {
logger.error(