Skip to content

Commit 6f5d5ef

Browse files
committed
fix: 修复 OpenAI provider 下 MCP 工具不可见
1 parent ad09f38 commit 6f5d5ef

3 files changed

Lines changed: 160 additions & 5 deletions

File tree

src/services/api/claude.ts

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1340,7 +1340,10 @@ async function* queryModel(
13401340
// media stripping) but before Anthropic-specific logic (betas, thinking, caching).
13411341
if (getAPIProvider() === 'openai') {
13421342
const { queryModelOpenAI } = await import('./openai/index.js')
1343-
yield* queryModelOpenAI(messagesForAPI, systemPrompt, filteredTools, signal, options)
1343+
// OpenAI emulates Anthropic's dynamic tool loading client-side. It needs
1344+
// the full tool pool so ToolSearchTool can search deferred MCP tools that
1345+
// were intentionally filtered out of the initial API tool list above.
1346+
yield* queryModelOpenAI(messagesForAPI, systemPrompt, tools, signal, options)
13441347
return
13451348
}
13461349

src/services/api/openai/__tests__/queryModelOpenAI.isolated.ts

Lines changed: 117 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -196,10 +196,52 @@ async function runQueryModel(
196196
// We mock at module level. Bun's mock.module replaces the module for the
197197
// entire file, so we configure the stream per-test via a shared variable.
198198
let _nextEvents: BetaRawMessageStreamEvent[] = []
199+
let _toolSearchEnabled = false
199200

200201
/** Captured arguments from the last chat.completions.create() call */
201202
let _lastCreateArgs: Record<string, any> | null = null
202203

204+
mock.module('@ant/model-provider', () => ({
205+
resolveOpenAIModel: (m: string) => m,
206+
adaptOpenAIStreamToAnthropic: (_stream: any, _model: string) =>
207+
eventStream(_nextEvents),
208+
anthropicMessagesToOpenAI: (messages: any[]) =>
209+
messages.map(msg => ({
210+
role: msg.message?.role ?? 'user',
211+
content: msg.message?.content ?? '',
212+
})),
213+
anthropicToolsToOpenAI: (tools: any[]) =>
214+
tools.map(tool => ({
215+
type: 'function',
216+
function: {
217+
name: tool.name,
218+
description: tool.description ?? '',
219+
parameters: tool.input_schema ?? { type: 'object', properties: {} },
220+
},
221+
})),
222+
anthropicToolChoiceToOpenAI: () => undefined,
223+
}))
224+
225+
mock.module('../../../../utils/envUtils.js', () => ({
226+
isEnvTruthy: (value: string | undefined) =>
227+
value === '1' || value === 'true' || value === 'yes' || value === 'on',
228+
isEnvDefinedFalsy: (value: string | undefined) =>
229+
value === '0' || value === 'false' || value === 'no' || value === 'off',
230+
}))
231+
232+
mock.module('../../../../services/analytics/growthbook.js', () => ({
233+
getFeatureValue_CACHED_MAY_BE_STALE: (_key: string, fallback: unknown) =>
234+
fallback,
235+
}))
236+
237+
mock.module('src/bootstrap/state.js', () => ({
238+
isReplBridgeActive: () => false,
239+
}))
240+
241+
mock.module('bun:bundle', () => ({
242+
feature: () => false,
243+
}))
244+
203245
mock.module('../client.js', () => ({
204246
getOpenAIClient: () => ({
205247
chat: {
@@ -252,6 +294,13 @@ mock.module('../../../../utils/context.js', () => ({
252294
mock.module('../../../../utils/messages.js', () => ({
253295
normalizeMessagesForAPI: (msgs: any) => msgs,
254296
normalizeContentFromAPI: (blocks: any[]) => blocks,
297+
createUserMessage: (opts: any) => ({
298+
type: 'user',
299+
message: { role: 'user', content: opts.content },
300+
uuid: 'user-uuid',
301+
timestamp: new Date().toISOString(),
302+
isMeta: opts.isMeta,
303+
}),
255304
createAssistantAPIErrorMessage: (opts: any) => ({
256305
type: 'assistant',
257306
message: {
@@ -268,8 +317,9 @@ mock.module('../../../../utils/api.js', () => ({
268317
}))
269318

270319
mock.module('../../../../utils/toolSearch.js', () => ({
271-
isToolSearchEnabled: async () => false,
320+
isToolSearchEnabled: async () => _toolSearchEnabled,
272321
extractDiscoveredToolNames: () => new Set(),
322+
isDeferredToolsDeltaEnabled: () => false,
273323
}))
274324

275325
mock.module('../../../../tools/ToolSearchTool/prompt.js', () => ({
@@ -297,6 +347,16 @@ mock.module('../../../../utils/modelCost.js', () => ({
297347
getModelPricingString: () => undefined,
298348
}))
299349

350+
mock.module('../../../../services/langfuse/tracing.js', () => ({
351+
recordLLMObservation: () => {},
352+
}))
353+
354+
mock.module('../../../../services/langfuse/convert.js', () => ({
355+
convertMessagesToLangfuse: () => [],
356+
convertOutputToLangfuse: () => ({}),
357+
convertToolsToLangfuse: () => [],
358+
}))
359+
300360
mock.module('../../../../utils/debug.js', () => ({
301361
logForDebugging: () => {},
302362
logAntError: () => {},
@@ -543,3 +603,59 @@ describe('queryModelOpenAI — max_tokens forwarded to request', () => {
543603
expect(_lastCreateArgs!.max_tokens).toBe(8192)
544604
})
545605
})
606+
607+
describe('queryModelOpenAI — deferred MCP tool visibility', () => {
608+
test('prepends available deferred MCP tools to OpenAI messages', async () => {
609+
_toolSearchEnabled = true
610+
_nextEvents = [makeMessageStart(), makeMessageStop()]
611+
612+
try {
613+
const { queryModelOpenAI } = await import('../index.js')
614+
const tools: any[] = [
615+
{
616+
name: 'ToolSearch',
617+
isMcp: false,
618+
input_schema: { type: 'object', properties: {} },
619+
prompt: async () => 'Search deferred tools',
620+
},
621+
{
622+
name: 'mcp__wechat__send_message',
623+
isMcp: true,
624+
input_schema: { type: 'object', properties: {} },
625+
prompt: async () => 'Send a WeChat message',
626+
},
627+
]
628+
629+
const options: any = {
630+
model: 'test-model',
631+
tools: [],
632+
agents: [],
633+
querySource: 'main_loop',
634+
getToolPermissionContext: async () => ({
635+
alwaysAllow: [],
636+
alwaysDeny: [],
637+
needsPermission: [],
638+
mode: 'default',
639+
isBypassingPermissions: false,
640+
}),
641+
}
642+
643+
for await (const _item of queryModelOpenAI(
644+
[],
645+
{ type: 'text', text: '' } as any,
646+
tools as any,
647+
new AbortController().signal,
648+
options,
649+
)) {
650+
// Exhaust generator so request body is built.
651+
}
652+
653+
expect(_lastCreateArgs).not.toBeNull()
654+
expect(JSON.stringify(_lastCreateArgs!.messages)).toContain(
655+
'<available-deferred-tools>\\nmcp__wechat__send_message\\n</available-deferred-tools>',
656+
)
657+
} finally {
658+
_toolSearchEnabled = false
659+
}
660+
})
661+
})

src/services/api/openai/index.ts

Lines changed: 39 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -32,18 +32,46 @@ import type { Options } from '../claude.js'
3232
import { randomUUID } from 'crypto'
3333
import {
3434
createAssistantAPIErrorMessage,
35+
createUserMessage,
3536
normalizeContentFromAPI,
3637
} from '../../../utils/messages.js'
3738
import type { SDKAssistantMessageError } from '../../../entrypoints/agentSdkTypes.js'
3839
import {
3940
isToolSearchEnabled,
4041
extractDiscoveredToolNames,
42+
isDeferredToolsDeltaEnabled,
4143
} from '../../../utils/toolSearch.js'
4244
import {
45+
formatDeferredToolLine,
4346
isDeferredTool,
4447
TOOL_SEARCH_TOOL_NAME,
4548
} from '@claude-code-best/builtin-tools/tools/ToolSearchTool/prompt.js'
4649

50+
function prependDeferredToolListIfNeeded(
51+
messages: Message[],
52+
tools: Tools,
53+
deferredToolNames: Set<string>,
54+
useToolSearch: boolean,
55+
): Message[] {
56+
if (!useToolSearch || isDeferredToolsDeltaEnabled()) return messages
57+
58+
const deferredToolList = tools
59+
.filter(tool => deferredToolNames.has(tool.name))
60+
.map(formatDeferredToolLine)
61+
.sort()
62+
.join('\n')
63+
64+
if (!deferredToolList) return messages
65+
66+
return [
67+
createUserMessage({
68+
content: `<available-deferred-tools>\n${deferredToolList}\n</available-deferred-tools>`,
69+
isMeta: true,
70+
}),
71+
...messages,
72+
]
73+
}
74+
4775
/**
4876
* Assemble the final AssistantMessage (and optional max_tokens error) from
4977
* accumulated stream state. Extracted to avoid duplication between the
@@ -176,9 +204,17 @@ export async function* queryModelOpenAI(
176204

177205
// 8. Convert messages and tools to OpenAI format
178206
const enableThinking = isOpenAIThinkingEnabled(openaiModel)
179-
const openaiMessages = anthropicMessagesToOpenAI(messagesForAPI, systemPrompt, {
180-
enableThinking,
181-
})
207+
const messagesWithDeferredToolList = prependDeferredToolListIfNeeded(
208+
messagesForAPI,
209+
tools,
210+
deferredToolNames,
211+
useToolSearch,
212+
)
213+
const openaiMessages = anthropicMessagesToOpenAI(
214+
messagesWithDeferredToolList,
215+
systemPrompt,
216+
{ enableThinking },
217+
)
182218
const openaiTools = anthropicToolsToOpenAI(standardTools)
183219
const openaiToolChoice = anthropicToolChoiceToOpenAI(options.toolChoice)
184220

0 commit comments

Comments
 (0)