Skip to content

Commit fce3c6f

Browse files
authored
test: add responses api more cases (#99)
1 parent cc43c0c commit fce3c6f

1 file changed

Lines changed: 339 additions & 0 deletions

File tree

tests/proxy/responses.test.ts

Lines changed: 339 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -229,6 +229,345 @@ describe('proxy /v1/responses', () => {
229229
}
230230
});
231231

232+
test('stream response emits the exact text lifecycle with terminal usage and rewritten ids', async () => {
233+
const resp = await proxyPost(
234+
'/v1/responses',
235+
{
236+
model: mockModelName,
237+
input: 'stream exact lifecycle once',
238+
stream: true,
239+
},
240+
AUTHORIZED_KEY,
241+
{ responseType: 'text' },
242+
);
243+
244+
expect(resp.status).toBe(200);
245+
246+
const events = parseResponsesSseEvents(String(resp.data));
247+
expect(events.map((event) => event.event)).toEqual([
248+
'response.created',
249+
'response.in_progress',
250+
'response.output_item.added',
251+
'response.content_part.added',
252+
'response.output_text.delta',
253+
'response.output_text.delta',
254+
'response.output_text.done',
255+
'response.content_part.done',
256+
'response.output_item.done',
257+
'response.completed',
258+
]);
259+
260+
const completed = JSON.parse(events.at(-1)?.data ?? '{}') as {
261+
response?: {
262+
id?: string;
263+
usage?: {
264+
input_tokens?: number;
265+
output_tokens?: number;
266+
total_tokens?: number;
267+
};
268+
output?: Array<{ id?: string; type?: string }>;
269+
};
270+
};
271+
272+
expect(completed.response?.id).toMatch(/^aresp_/);
273+
expect(completed.response?.usage).toEqual({
274+
input_tokens: 10,
275+
output_tokens: 8,
276+
total_tokens: 18,
277+
});
278+
expect(completed.response?.output?.[0]?.type).toBe('message');
279+
expect(completed.response?.output?.[0]?.id).toBe(
280+
`${completed.response?.id}_message_0`,
281+
);
282+
});
283+
284+
test('store and metadata survive the first response and replay through previous_response_id', async () => {
285+
const firstResp = await proxyPost(
286+
'/v1/responses',
287+
{
288+
model: mockModelName,
289+
input: 'hello with metadata',
290+
store: true,
291+
metadata: {
292+
trace_id: 'trace-123',
293+
},
294+
},
295+
AUTHORIZED_KEY,
296+
);
297+
298+
expect(firstResp.status).toBe(200);
299+
expect(firstResp.data.metadata.trace_id).toBe('trace-123');
300+
301+
const secondResp = await proxyPost(
302+
'/v1/responses',
303+
{
304+
model: mockModelName,
305+
input: 'follow up with replay',
306+
previous_response_id: firstResp.data.id,
307+
metadata: {
308+
trace_id: 'trace-456',
309+
},
310+
},
311+
AUTHORIZED_KEY,
312+
);
313+
314+
expect(secondResp.status).toBe(200);
315+
expect(secondResp.data.previous_response_id).toBe(firstResp.data.id);
316+
expect(secondResp.data.metadata.trace_id).toBe('trace-456');
317+
318+
const recorded = upstream?.takeRecordedRequests() ?? [];
319+
expect(recorded).toHaveLength(2);
320+
321+
const replayBody = recorded[1]?.bodyJson as {
322+
messages: Array<{ role: string; content: string }>;
323+
};
324+
325+
expect(replayBody.messages[0]?.content).toBe('hello with metadata');
326+
expect(replayBody.messages[1]?.content).toBe('hello from mock upstream');
327+
expect(replayBody.messages[2]?.content).toBe('follow up with replay');
328+
});
329+
330+
test('store false prevents previous_response_id replay from being persisted', async () => {
331+
const firstResp = await proxyPost(
332+
'/v1/responses',
333+
{
334+
model: mockModelName,
335+
input: 'do not store this response',
336+
store: false,
337+
},
338+
AUTHORIZED_KEY,
339+
);
340+
341+
expect(firstResp.status).toBe(200);
342+
343+
const secondResp = await proxyPost(
344+
'/v1/responses',
345+
{
346+
model: mockModelName,
347+
input: 'try replay after store false',
348+
previous_response_id: firstResp.data.id,
349+
},
350+
AUTHORIZED_KEY,
351+
);
352+
353+
expect(secondResp.status).toBe(400);
354+
expect(secondResp.data.error.type).toBe('invalid_request_error');
355+
expect(secondResp.data.error.message).toContain(
356+
'previous_response_not_found',
357+
);
358+
359+
const recorded = upstream?.takeRecordedRequests() ?? [];
360+
expect(recorded).toHaveLength(1);
361+
});
362+
363+
test('file-id-only input images are rejected before upstream dispatch on bridge mode', async () => {
364+
const resp = await proxyPost(
365+
'/v1/responses',
366+
{
367+
model: mockModelName,
368+
input: [
369+
{
370+
type: 'message',
371+
role: 'user',
372+
content: [
373+
{ type: 'input_text', text: 'Describe the uploaded file' },
374+
{
375+
type: 'input_image',
376+
file_id: 'file_123',
377+
detail: 'high',
378+
},
379+
],
380+
},
381+
],
382+
},
383+
AUTHORIZED_KEY,
384+
);
385+
386+
expect(resp.status).toBe(400);
387+
expect(resp.data.error.type).toBe('invalid_request_error');
388+
expect(resp.data.error.message).toContain('file_id');
389+
expect(upstream?.takeRecordedRequests() ?? []).toHaveLength(0);
390+
});
391+
392+
test('stream response preserves mixed text and function call output items', async () => {
393+
upstream?.configure({
394+
streamEvents: [
395+
{
396+
id: 'chatcmpl-responses-mixed-e2e-mock',
397+
object: 'chat.completion.chunk',
398+
created: 1,
399+
model: UPSTREAM_MODEL,
400+
choices: [
401+
{
402+
index: 0,
403+
delta: { role: 'assistant', content: 'Calling tool' },
404+
finish_reason: null,
405+
},
406+
],
407+
},
408+
{
409+
id: 'chatcmpl-responses-mixed-e2e-mock',
410+
object: 'chat.completion.chunk',
411+
created: 1,
412+
model: UPSTREAM_MODEL,
413+
choices: [
414+
{
415+
index: 0,
416+
delta: {
417+
tool_calls: [
418+
{
419+
index: 0,
420+
id: 'call_weather_1',
421+
type: 'function',
422+
function: {
423+
name: 'get_weather',
424+
arguments: '{"city"',
425+
},
426+
},
427+
],
428+
},
429+
finish_reason: null,
430+
},
431+
],
432+
},
433+
{
434+
id: 'chatcmpl-responses-mixed-e2e-mock',
435+
object: 'chat.completion.chunk',
436+
created: 1,
437+
model: UPSTREAM_MODEL,
438+
choices: [
439+
{
440+
index: 0,
441+
delta: {
442+
tool_calls: [
443+
{
444+
index: 0,
445+
function: {
446+
arguments: ':"SF"}',
447+
},
448+
},
449+
],
450+
},
451+
finish_reason: 'tool_calls',
452+
},
453+
],
454+
},
455+
{
456+
id: 'chatcmpl-responses-mixed-e2e-mock',
457+
object: 'chat.completion.chunk',
458+
created: 1,
459+
model: UPSTREAM_MODEL,
460+
choices: [],
461+
usage: {
462+
prompt_tokens: 12,
463+
completion_tokens: 7,
464+
total_tokens: 19,
465+
},
466+
},
467+
'[DONE]',
468+
],
469+
});
470+
471+
const resp = await proxyPost(
472+
'/v1/responses',
473+
{
474+
model: mockModelName,
475+
input: 'stream mixed output items',
476+
stream: true,
477+
},
478+
AUTHORIZED_KEY,
479+
{ responseType: 'text' },
480+
);
481+
482+
expect(resp.status).toBe(200);
483+
484+
const events = parseResponsesSseEvents(String(resp.data));
485+
expect(
486+
events.some((event) => event.event === 'response.output_text.delta'),
487+
).toBe(true);
488+
expect(
489+
events.some(
490+
(event) => event.event === 'response.function_call_arguments.delta',
491+
),
492+
).toBe(true);
493+
494+
const addedItems = events
495+
.filter((event) => event.event === 'response.output_item.added')
496+
.map((event) => JSON.parse(event.data) as { item?: { type?: string } });
497+
const completed = JSON.parse(events.at(-1)?.data ?? '{}') as {
498+
response?: {
499+
output?: Array<{
500+
type?: string;
501+
arguments?: string;
502+
content?: Array<{ text?: string }>;
503+
}>;
504+
};
505+
};
506+
507+
expect(addedItems.map((event) => event.item?.type)).toEqual([
508+
'message',
509+
'function_call',
510+
]);
511+
expect(completed.response?.output?.map((item) => item.type)).toEqual([
512+
'message',
513+
'function_call',
514+
]);
515+
expect(completed.response?.output?.[0]?.content?.[0]?.text).toBe(
516+
'Calling tool',
517+
);
518+
expect(completed.response?.output?.[1]?.arguments).toBe('{"city":"SF"}');
519+
});
520+
521+
test('stream response emits responses error events and stops before response.completed on bridge failure', async () => {
522+
upstream?.configure({
523+
streamEvents: [
524+
{
525+
id: 'chatcmpl-responses-error-e2e-mock',
526+
object: 'chat.completion.chunk',
527+
created: 1,
528+
model: UPSTREAM_MODEL,
529+
choices: [
530+
{
531+
index: 1,
532+
delta: { role: 'assistant', content: 'invalid choice index' },
533+
finish_reason: null,
534+
},
535+
],
536+
},
537+
'[DONE]',
538+
],
539+
});
540+
541+
const resp = await proxyPost(
542+
'/v1/responses',
543+
{
544+
model: mockModelName,
545+
input: 'emit invalid choice index stream',
546+
stream: true,
547+
},
548+
AUTHORIZED_KEY,
549+
{ responseType: 'text' },
550+
);
551+
552+
expect(resp.status).toBe(200);
553+
554+
const events = parseResponsesSseEvents(String(resp.data));
555+
const errorEvent = events.find((event) => event.event === 'error');
556+
557+
expect(errorEvent).toBeDefined();
558+
expect(events.some((event) => event.event === 'response.completed')).toBe(
559+
false,
560+
);
561+
562+
const errorPayload = JSON.parse(errorEvent?.data ?? '{}') as {
563+
type?: string;
564+
message?: string;
565+
};
566+
567+
expect(errorPayload.type).toBe('error');
568+
expect(errorPayload.message).toContain('choice index 0');
569+
});
570+
232571
test('previous_response_id replays session history through proxy gateway wiring', async () => {
233572
const firstResp = await proxyPost(
234573
'/v1/responses',

0 commit comments

Comments
 (0)