@@ -196,10 +196,52 @@ async function runQueryModel(
196196// We mock at module level. Bun's mock.module replaces the module for the
197197// entire file, so we configure the stream per-test via a shared variable.
198198let _nextEvents : BetaRawMessageStreamEvent [ ] = [ ]
199+ let _toolSearchEnabled = false
199200
200201/** Captured arguments from the last chat.completions.create() call */
201202let _lastCreateArgs : Record < string , any > | null = null
202203
204+ mock . module ( '@ant/model-provider' , ( ) => ( {
205+ resolveOpenAIModel : ( m : string ) => m ,
206+ adaptOpenAIStreamToAnthropic : ( _stream : any , _model : string ) =>
207+ eventStream ( _nextEvents ) ,
208+ anthropicMessagesToOpenAI : ( messages : any [ ] ) =>
209+ messages . map ( msg => ( {
210+ role : msg . message ?. role ?? 'user' ,
211+ content : msg . message ?. content ?? '' ,
212+ } ) ) ,
213+ anthropicToolsToOpenAI : ( tools : any [ ] ) =>
214+ tools . map ( tool => ( {
215+ type : 'function' ,
216+ function : {
217+ name : tool . name ,
218+ description : tool . description ?? '' ,
219+ parameters : tool . input_schema ?? { type : 'object' , properties : { } } ,
220+ } ,
221+ } ) ) ,
222+ anthropicToolChoiceToOpenAI : ( ) => undefined ,
223+ } ) )
224+
225+ mock . module ( '../../../../utils/envUtils.js' , ( ) => ( {
226+ isEnvTruthy : ( value : string | undefined ) =>
227+ value === '1' || value === 'true' || value === 'yes' || value === 'on' ,
228+ isEnvDefinedFalsy : ( value : string | undefined ) =>
229+ value === '0' || value === 'false' || value === 'no' || value === 'off' ,
230+ } ) )
231+
232+ mock . module ( '../../../../services/analytics/growthbook.js' , ( ) => ( {
233+ getFeatureValue_CACHED_MAY_BE_STALE : ( _key : string , fallback : unknown ) =>
234+ fallback ,
235+ } ) )
236+
237+ mock . module ( 'src/bootstrap/state.js' , ( ) => ( {
238+ isReplBridgeActive : ( ) => false ,
239+ } ) )
240+
241+ mock . module ( 'bun:bundle' , ( ) => ( {
242+ feature : ( ) => false ,
243+ } ) )
244+
203245mock . module ( '../client.js' , ( ) => ( {
204246 getOpenAIClient : ( ) => ( {
205247 chat : {
@@ -252,6 +294,13 @@ mock.module('../../../../utils/context.js', () => ({
252294mock . module ( '../../../../utils/messages.js' , ( ) => ( {
253295 normalizeMessagesForAPI : ( msgs : any ) => msgs ,
254296 normalizeContentFromAPI : ( blocks : any [ ] ) => blocks ,
297+ createUserMessage : ( opts : any ) => ( {
298+ type : 'user' ,
299+ message : { role : 'user' , content : opts . content } ,
300+ uuid : 'user-uuid' ,
301+ timestamp : new Date ( ) . toISOString ( ) ,
302+ isMeta : opts . isMeta ,
303+ } ) ,
255304 createAssistantAPIErrorMessage : ( opts : any ) => ( {
256305 type : 'assistant' ,
257306 message : {
@@ -268,8 +317,9 @@ mock.module('../../../../utils/api.js', () => ({
268317} ) )
269318
270319mock . module ( '../../../../utils/toolSearch.js' , ( ) => ( {
271- isToolSearchEnabled : async ( ) => false ,
320+ isToolSearchEnabled : async ( ) => _toolSearchEnabled ,
272321 extractDiscoveredToolNames : ( ) => new Set ( ) ,
322+ isDeferredToolsDeltaEnabled : ( ) => false ,
273323} ) )
274324
275325mock . module ( '../../../../tools/ToolSearchTool/prompt.js' , ( ) => ( {
@@ -297,6 +347,16 @@ mock.module('../../../../utils/modelCost.js', () => ({
297347 getModelPricingString : ( ) => undefined ,
298348} ) )
299349
350+ mock . module ( '../../../../services/langfuse/tracing.js' , ( ) => ( {
351+ recordLLMObservation : ( ) => { } ,
352+ } ) )
353+
354+ mock . module ( '../../../../services/langfuse/convert.js' , ( ) => ( {
355+ convertMessagesToLangfuse : ( ) => [ ] ,
356+ convertOutputToLangfuse : ( ) => ( { } ) ,
357+ convertToolsToLangfuse : ( ) => [ ] ,
358+ } ) )
359+
300360mock . module ( '../../../../utils/debug.js' , ( ) => ( {
301361 logForDebugging : ( ) => { } ,
302362 logAntError : ( ) => { } ,
@@ -543,3 +603,59 @@ describe('queryModelOpenAI — max_tokens forwarded to request', () => {
543603 expect ( _lastCreateArgs ! . max_tokens ) . toBe ( 8192 )
544604 } )
545605} )
606+
607+ describe ( 'queryModelOpenAI — deferred MCP tool visibility' , ( ) => {
608+ test ( 'prepends available deferred MCP tools to OpenAI messages' , async ( ) => {
609+ _toolSearchEnabled = true
610+ _nextEvents = [ makeMessageStart ( ) , makeMessageStop ( ) ]
611+
612+ try {
613+ const { queryModelOpenAI } = await import ( '../index.js' )
614+ const tools : any [ ] = [
615+ {
616+ name : 'ToolSearch' ,
617+ isMcp : false ,
618+ input_schema : { type : 'object' , properties : { } } ,
619+ prompt : async ( ) => 'Search deferred tools' ,
620+ } ,
621+ {
622+ name : 'mcp__wechat__send_message' ,
623+ isMcp : true ,
624+ input_schema : { type : 'object' , properties : { } } ,
625+ prompt : async ( ) => 'Send a WeChat message' ,
626+ } ,
627+ ]
628+
629+ const options : any = {
630+ model : 'test-model' ,
631+ tools : [ ] ,
632+ agents : [ ] ,
633+ querySource : 'main_loop' ,
634+ getToolPermissionContext : async ( ) => ( {
635+ alwaysAllow : [ ] ,
636+ alwaysDeny : [ ] ,
637+ needsPermission : [ ] ,
638+ mode : 'default' ,
639+ isBypassingPermissions : false ,
640+ } ) ,
641+ }
642+
643+ for await ( const _item of queryModelOpenAI (
644+ [ ] ,
645+ { type : 'text' , text : '' } as any ,
646+ tools as any ,
647+ new AbortController ( ) . signal ,
648+ options ,
649+ ) ) {
650+ // Exhaust generator so request body is built.
651+ }
652+
653+ expect ( _lastCreateArgs ) . not . toBeNull ( )
654+ expect ( JSON . stringify ( _lastCreateArgs ! . messages ) ) . toContain (
655+ '<available-deferred-tools>\\nmcp__wechat__send_message\\n</available-deferred-tools>' ,
656+ )
657+ } finally {
658+ _toolSearchEnabled = false
659+ }
660+ } )
661+ } )
0 commit comments