Skip to content

Commit aff2159

Browse files
Copilothotlong
andauthored
Address all review comments: validate array content shape, validate systemPrompt type, forward all AIRequestOptions to Vercel adapter
- validateMessage: structural validation for array content elements (non-null objects with string type, text property for type:text) - systemPrompt: runtime string validation, returns 400 for non-string values - VercelLLMAdapter: buildVercelOptions helper forwards stop, tools, toolChoice to chat/complete/streamChat Agent-Logs-Url: https://github.com/objectstack-ai/spec/sessions/ac262d54-6f0d-47a1-967b-c1f4b7636378 Co-authored-by: hotlong <50353452+hotlong@users.noreply.github.com>
1 parent 50ec638 commit aff2159

2 files changed

Lines changed: 63 additions & 12 deletions

File tree

packages/services/service-ai/src/adapters/vercel-adapter.ts

Lines changed: 38 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -8,8 +8,42 @@ import type {
88
ToolSet,
99
} from '@objectstack/spec/contracts';
1010
import type { LLMAdapter } from '@objectstack/spec/contracts';
11+
import type { AIToolDefinition } from '@objectstack/spec/contracts';
1112
import type { LanguageModelV3 } from '@ai-sdk/provider';
12-
import { generateText, streamText } from 'ai';
13+
import { generateText, streamText, tool as vercelTool, jsonSchema } from 'ai';
14+
15+
/**
16+
* Convert ObjectStack `AIRequestOptions` into the subset of Vercel AI SDK
17+
* options supported by `generateText` / `streamText`.
18+
*
19+
* Forwards: temperature, maxTokens, stop (→ stopSequences), tools, toolChoice.
20+
*/
21+
function buildVercelOptions(options?: AIRequestOptions): Record<string, unknown> {
22+
if (!options) return {};
23+
24+
const opts: Record<string, unknown> = {};
25+
26+
if (options.temperature != null) opts.temperature = options.temperature;
27+
if (options.maxTokens != null) opts.maxTokens = options.maxTokens;
28+
if (options.stop?.length) opts.stopSequences = options.stop;
29+
30+
if (options.tools?.length) {
31+
const tools: Record<string, unknown> = {};
32+
for (const t of options.tools as AIToolDefinition[]) {
33+
tools[t.name] = vercelTool({
34+
description: t.description,
35+
inputSchema: jsonSchema(t.parameters as any),
36+
});
37+
}
38+
opts.tools = tools;
39+
}
40+
41+
if (options.toolChoice != null) {
42+
opts.toolChoice = options.toolChoice;
43+
}
44+
45+
return opts;
46+
}
1347

1448
/**
1549
* VercelLLMAdapter — Production LLM adapter powered by the Vercel AI SDK.
@@ -39,8 +73,7 @@ export class VercelLLMAdapter implements LLMAdapter {
3973
const result = await generateText({
4074
model: this.model,
4175
messages,
42-
temperature: options?.temperature,
43-
maxTokens: options?.maxTokens,
76+
...buildVercelOptions(options),
4477
});
4578

4679
return {
@@ -59,8 +92,7 @@ export class VercelLLMAdapter implements LLMAdapter {
5992
const result = await generateText({
6093
model: this.model,
6194
prompt,
62-
temperature: options?.temperature,
63-
maxTokens: options?.maxTokens,
95+
...buildVercelOptions(options),
6496
});
6597

6698
return {
@@ -81,8 +113,7 @@ export class VercelLLMAdapter implements LLMAdapter {
81113
const result = streamText({
82114
model: this.model,
83115
messages,
84-
temperature: options?.temperature,
85-
maxTokens: options?.maxTokens,
116+
...buildVercelOptions(options),
86117
});
87118

88119
for await (const part of result.fullStream) {

packages/services/service-ai/src/routes/ai-routes.ts

Lines changed: 25 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -91,11 +91,27 @@ function validateMessage(raw: unknown): string | null {
9191
if (typeof msg.role !== 'string' || !VALID_ROLES.has(msg.role)) {
9292
return `message.role must be one of ${[...VALID_ROLES].map(r => `"${r}"`).join(', ')}`;
9393
}
94-
// Accept string content (legacy) or array content (Vercel multi-part)
95-
if (typeof msg.content !== 'string' && !Array.isArray(msg.content)) {
96-
return 'message.content must be a string or an array';
94+
const content = msg.content;
95+
if (typeof content === 'string') {
96+
return null;
9797
}
98-
return null;
98+
if (Array.isArray(content)) {
99+
const parts = content as unknown[];
100+
for (const part of parts) {
101+
if (typeof part !== 'object' || part === null) {
102+
return 'message.content array elements must be non-null objects';
103+
}
104+
const partObj = part as Record<string, unknown>;
105+
if (typeof partObj.type !== 'string') {
106+
return 'each message.content array element must have a string "type" property';
107+
}
108+
if (partObj.type === 'text' && typeof partObj.text !== 'string') {
109+
return 'message.content elements with type "text" must have a string "text" property';
110+
}
111+
}
112+
return null;
113+
}
114+
return 'message.content must be a string or an array';
99115
}
100116

101117
/**
@@ -167,7 +183,11 @@ export function buildAIRoutes(
167183
// ── Prepend system prompt ────────────────────────────
168184
// Vercel useChat sends `system` (or the deprecated `systemPrompt`)
169185
// as a top-level field. We prepend it as a system message.
170-
const systemPrompt = (body.system ?? body.systemPrompt) as string | undefined;
186+
const rawSystemPrompt = body.system ?? body.systemPrompt;
187+
if (rawSystemPrompt != null && typeof rawSystemPrompt !== 'string') {
188+
return { status: 400, body: { error: 'system/systemPrompt must be a string' } };
189+
}
190+
const systemPrompt = rawSystemPrompt as string | undefined;
171191
const finalMessages: ModelMessage[] = [
172192
...(systemPrompt
173193
? [{ role: 'system' as const, content: systemPrompt }]

0 commit comments

Comments
 (0)