Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
40 changes: 37 additions & 3 deletions src/lib/ResponsesParser.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,8 @@ import {
type ResponseCreateParamsBase,
type ResponseCreateParamsNonStreaming,
type ResponseFunctionToolCall,
type ResponseInputItem,
type ResponseOutputItem,
type Tool,
} from '../resources/responses/responses';
import { type AutoParseableTextFormat, isAutoParsableResponseFormat } from '../lib/parser';
Expand All @@ -30,6 +32,13 @@ export function maybeParseResponse<
Params extends ResponseCreateParamsBase | null,
ParsedT = Params extends null ? null : ExtractParsedContentFromParams<NonNullable<Params>>,
>(response: Response, params: Params): ParsedResponse<ParsedT> {
if (!Object.getOwnPropertyDescriptor(response, 'output_text')) {
addOutputText(response);
}
if (!Object.getOwnPropertyDescriptor(response, 'output_as_input')) {
addOutputAsInput(response);
}

if (!params || !hasAutoParseableInput(params)) {
return {
...response,
Expand Down Expand Up @@ -95,9 +104,6 @@ export function parseResponse<
);

const parsed: Omit<ParsedResponse<ParsedT>, 'output_parsed'> = Object.assign({}, response, { output });
if (!Object.getOwnPropertyDescriptor(response, 'output_text')) {
addOutputText(parsed);
}

Object.defineProperty(parsed, 'output_parsed', {
enumerable: true,
Expand Down Expand Up @@ -263,3 +269,31 @@ export function addOutputText(rsp: Response): void {

rsp.output_text = texts.join('');
}

export function addOutputAsInput(rsp: Response): void {
rsp.output_as_input = rsp.output.map(outputItemToInputItem);
}

function outputItemToInputItem(item: ResponseOutputItem): ResponseInputItem {
if (item.type === 'computer_call_output') {
const inputItem: ResponseInputItem.ComputerCallOutput = {
call_id: item.call_id,
output: item.output,
type: 'computer_call_output',
};

if (item.id) {
inputItem.id = item.id;
}
if (item.acknowledged_safety_checks) {
inputItem.acknowledged_safety_checks = item.acknowledged_safety_checks;
}
if (item.status !== 'failed') {
inputItem.status = item.status;
}

return inputItem;
}

return item;
}
28 changes: 25 additions & 3 deletions src/resources/responses/responses.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

import {
type ExtractParsedContentFromParams,
addOutputAsInput,
parseResponse,
type ResponseCreateParamsWithTools,
addOutputText,
Expand Down Expand Up @@ -74,6 +75,12 @@ export class Responses extends APIResource {
inputItems: InputItemsAPI.InputItems = new InputItemsAPI.InputItems(this._client);
inputTokens: InputTokensAPI.InputTokens = new InputTokensAPI.InputTokens(this._client);

private hydrateResponseHelpers(response: Response): Response {
addOutputText(response);
addOutputAsInput(response);
return response;
}

/**
* Creates a model response. Provide
* [text](https://platform.openai.com/docs/guides/text) or
Expand Down Expand Up @@ -111,7 +118,7 @@ export class Responses extends APIResource {
| APIPromise<Stream<ResponseStreamEvent>>
)._thenUnwrap((rsp) => {
if ('object' in rsp && rsp.object === 'response') {
addOutputText(rsp as Response);
return this.hydrateResponseHelpers(rsp as Response);
}

return rsp;
Expand Down Expand Up @@ -156,7 +163,7 @@ export class Responses extends APIResource {
}) as APIPromise<Response> | APIPromise<Stream<ResponseStreamEvent>>
)._thenUnwrap((rsp) => {
if ('object' in rsp && rsp.object === 'response') {
addOutputText(rsp as Response);
return this.hydrateResponseHelpers(rsp as Response);
}

return rsp;
Expand Down Expand Up @@ -212,7 +219,13 @@ export class Responses extends APIResource {
* ```
*/
cancel(responseID: string, options?: RequestOptions): APIPromise<Response> {
return this._client.post(path`/responses/${responseID}/cancel`, options);
return this._client.post(path`/responses/${responseID}/cancel`, options)._thenUnwrap((rsp) => {
if ('object' in rsp && rsp.object === 'response') {
return this.hydrateResponseHelpers(rsp as Response);
}

return rsp;
}) as APIPromise<Response>;
}

/**
Expand Down Expand Up @@ -954,6 +967,15 @@ export interface Response {

output_text: string;

/**
* A replay-safe version of `output` for manual multi-turn conversations.
*
* This preserves the original item ordering so reasoning/message pairs stay
* adjacent, and normalizes output-only item shapes such as
* `computer_call_output` before they are passed back as `input`.
*/
output_as_input: Array<ResponseInputItem>;

/**
* An error object returned when the model fails to generate a Response.
*/
Expand Down
4 changes: 4 additions & 0 deletions tests/api-resources/responses/responses.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,8 @@ describe('resource responses', () => {

expect(response).toHaveProperty('output_text');
expect(typeof response.output_text).toBe('string');
expect(response).toHaveProperty('output_as_input');
expect(Array.isArray(response.output_as_input)).toBe(true);
});

test('retrieve', async () => {
Expand All @@ -34,6 +36,8 @@ describe('resource responses', () => {

expect(response).toHaveProperty('output_text');
expect(typeof response.output_text).toBe('string');
expect(response).toHaveProperty('output_as_input');
expect(Array.isArray(response.output_as_input)).toBe(true);
});

test('retrieve: request options and params are passed correctly', async () => {
Expand Down
28 changes: 28 additions & 0 deletions tests/responses.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -54,6 +54,34 @@ describe('request id', () => {
expect(JSON.stringify(rsp)).toBe('{"id":"bar"}');
});

test('responses.cancel hydrates output helpers', async () => {
const client = new OpenAI({
apiKey: 'dummy',
fetch: async () =>
new Response(
JSON.stringify({
id: 'resp_123',
object: 'response',
output: [
{
id: 'msg_123',
type: 'message',
role: 'assistant',
status: 'completed',
content: [{ type: 'output_text', text: 'hello', annotations: [] }],
},
],
}),
{ headers: { 'content-type': 'application/json' } },
),
});

const response = await client.responses.cancel('resp_123');

expect(response.output_text).toBe('hello');
expect(response.output_as_input).toEqual(response.output);
});

test('envelope response', async () => {
const promise = new APIPromise<{ data: { foo: string } }>(
client,
Expand Down
54 changes: 46 additions & 8 deletions tests/responsesItems.test.ts
Original file line number Diff line number Diff line change
@@ -1,16 +1,54 @@
import OpenAI from 'openai/index';
const openai = new OpenAI({ apiKey: 'example-api-key' });
import { addOutputAsInput } from '../src/lib/ResponsesParser';

function isInputCompatibleOutputItem(
item: OpenAI.Responses.ResponseOutputItem,
): item is Exclude<OpenAI.Responses.ResponseOutputItem, OpenAI.Responses.ResponseComputerToolCallOutputItem> {
return item.type !== 'computer_call_output';
}
const openai = new OpenAI({ apiKey: 'example-api-key' });

describe('responses item types', () => {
test('response output items are compatible with input items', async () => {
expect(true).toBe(true);
});

test('output_as_input preserves reasoning/message order and normalizes computer outputs', () => {
const response = {
output: [
{ id: 'rs_1', type: 'reasoning', summary: [], status: 'completed' },
{
id: 'msg_1',
type: 'message',
role: 'assistant',
status: 'completed',
content: [{ type: 'output_text', text: 'hello', annotations: [] }],
},
{
id: 'cco_1',
type: 'computer_call_output',
call_id: 'call_1',
output: { type: 'computer_screenshot', image_url: 'https://example.com/shot.png' },
status: 'failed',
created_by: 'assistant',
},
],
} as OpenAI.Responses.Response;

addOutputAsInput(response);

expect(response.output_as_input).toEqual([
{ id: 'rs_1', type: 'reasoning', summary: [], status: 'completed' },
{
id: 'msg_1',
type: 'message',
role: 'assistant',
status: 'completed',
content: [{ type: 'output_text', text: 'hello', annotations: [] }],
},
{
id: 'cco_1',
type: 'computer_call_output',
call_id: 'call_1',
output: { type: 'computer_screenshot', image_url: 'https://example.com/shot.png' },
},
]);
});
});

const unused = async () => {
Expand All @@ -20,8 +58,8 @@ const unused = async () => {
});
await openai.responses.create({
model: 'gpt-5.1',
// check type compatibility
input: response.output.filter(isInputCompatibleOutputItem),
// check type compatibility for a replay-safe helper
input: response.output_as_input,
});
expect(true).toBe(true);
};