Skip to content

Commit feff4df

Browse files
committed
fix: keep responses reasoning adjacent to messages
1 parent a1ead04 commit feff4df

File tree

2 files changed

+51
-2
lines changed

2 files changed

+51
-2
lines changed

core/llm/openaiTypeConverters.test.ts

Lines changed: 49 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -489,6 +489,55 @@ describe("openaiTypeConverters", () => {
489489
expect(functionCalls[0].id).toBe("fc_001");
490490
});
491491

492+
it("should emit assistant message before function_call when reasoning and tool calls share a turn", () => {
493+
const messages: ChatMessage[] = [
494+
{
495+
role: "thinking",
496+
content: "",
497+
reasoning_details: [
498+
{ type: "reasoning_id", id: "rs_001" },
499+
{
500+
type: "encrypted_content",
501+
encrypted_content: "encrypted_data_here",
502+
},
503+
],
504+
metadata: { reasoningId: "rs_001" },
505+
} as ChatMessage,
506+
{
507+
role: "assistant",
508+
content: "I'll inspect the file first.",
509+
toolCalls: [
510+
{
511+
id: "call_001",
512+
type: "function",
513+
function: { name: "read_file", arguments: '{"path":"a.txt"}' },
514+
},
515+
],
516+
metadata: {
517+
responsesOutputItemIds: ["msg_001", "fc_001"],
518+
responsesOutputItemId: "fc_001",
519+
},
520+
} as ChatMessage,
521+
];
522+
523+
const result = toResponsesInput(messages);
524+
525+
expect(result[0]).toMatchObject({
526+
type: "reasoning",
527+
id: "rs_001",
528+
});
529+
expect(result[1]).toMatchObject({
530+
type: "message",
531+
role: "assistant",
532+
id: "msg_001",
533+
});
534+
expect(result[2]).toMatchObject({
535+
type: "function_call",
536+
id: "fc_001",
537+
call_id: "call_001",
538+
});
539+
});
540+
492541
it("should strip fc_ id from function_calls after removed reasoning", () => {
493542
const messages: ChatMessage[] = [
494543
{

core/llm/openaiTypeConverters.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1038,8 +1038,6 @@ export function toResponsesInput(messages: ChatMessage[]): ResponseInput {
10381038
(respId?.startsWith("msg_") ? respId : undefined);
10391039

10401040
if (Array.isArray(toolCalls) && toolCalls.length > 0) {
1041-
emitFunctionCallsFromToolCalls(toolCalls, fcIds, input);
1042-
10431041
if (text && text.trim()) {
10441042
if (msgId) {
10451043
const outputMessageItem: ResponseOutputMessage = {
@@ -1060,6 +1058,8 @@ export function toResponsesInput(messages: ChatMessage[]): ResponseInput {
10601058
pushMessage("assistant", text);
10611059
}
10621060
}
1061+
1062+
emitFunctionCallsFromToolCalls(toolCalls, fcIds, input);
10631063
} else if (msgId) {
10641064
const outputMessageItem: ResponseOutputMessage = {
10651065
id: msgId,

0 commit comments

Comments
 (0)