@@ -11,6 +11,7 @@ beforeAll(() => {
1111} ) ;
1212
1313type CapturedHeaders = Record < string , string > ;
14+ type CompletionChunk = Record < string , unknown > ;
1415
1516/**
1617 * Build an LLM, stub its OpenAI client's chat.completions.create, start a chat
@@ -61,6 +62,36 @@ async function captureHeaders(opts: {
6162 return capturedHeaders ;
6263}
6364
65+ async function collectChatChunks ( completionChunks : CompletionChunk [ ] ) {
66+ const llm = new LLM ( {
67+ model : 'openai/gpt-4o-mini' ,
68+ apiKey : 'test-key' ,
69+ apiSecret : 'test-secret' ,
70+ baseURL : 'https://example.livekit.cloud' ,
71+ } ) ;
72+
73+ const stub = async ( ) => ( {
74+ async * [ Symbol . asyncIterator ] ( ) {
75+ for ( const chunk of completionChunks ) {
76+ yield chunk ;
77+ }
78+ } ,
79+ } ) ;
80+
81+ const internal = llm as unknown as {
82+ client : { chat : { completions : { create : typeof stub } } } ;
83+ } ;
84+ internal . client . chat . completions . create = stub ;
85+
86+ const stream = llm . chat ( { chatCtx : new ChatContext ( ) } ) ;
87+ const chunks = [ ] ;
88+ for await ( const chunk of stream ) {
89+ chunks . push ( chunk ) ;
90+ }
91+
92+ return chunks ;
93+ }
94+
6495describe ( 'inference.LLM X-LiveKit-Inference-Priority header' , ( ) => {
6596 // --- no value anywhere ---
6697
@@ -105,3 +136,41 @@ describe('inference.LLM X-LiveKit-Inference-Priority header', () => {
105136 expect ( headers [ 'X-LiveKit-Inference-Priority' ] ) . toBe ( 'priority' ) ;
106137 } ) ;
107138} ) ;
139+
140+ describe ( 'inference.LLM streamed tool calls' , ( ) => {
141+ it ( 'does not forward assistant content on tool call chunks' , async ( ) => {
142+ const chunks = await collectChatChunks ( [
143+ {
144+ id : 'chatcmpl_test' ,
145+ choices : [
146+ {
147+ index : 0 ,
148+ finish_reason : 'tool_calls' ,
149+ delta : {
150+ role : 'assistant' ,
151+ content : 'saveAnswer({"answer":"yes"})' ,
152+ tool_calls : [
153+ {
154+ index : 0 ,
155+ id : 'call_123' ,
156+ type : 'function' ,
157+ function : {
158+ name : 'saveAnswer' ,
159+ arguments : '{"answer":"yes"}' ,
160+ } ,
161+ } ,
162+ ] ,
163+ } ,
164+ } ,
165+ ] ,
166+ } ,
167+ ] ) ;
168+
169+ expect ( chunks ) . toHaveLength ( 1 ) ;
170+ expect ( chunks [ 0 ] ?. delta ?. content ) . toBeUndefined ( ) ;
171+ expect ( chunks [ 0 ] ?. delta ?. toolCalls ) . toHaveLength ( 1 ) ;
172+ expect ( chunks [ 0 ] ?. delta ?. toolCalls ?. [ 0 ] ?. callId ) . toBe ( 'call_123' ) ;
173+ expect ( chunks [ 0 ] ?. delta ?. toolCalls ?. [ 0 ] ?. name ) . toBe ( 'saveAnswer' ) ;
174+ expect ( chunks [ 0 ] ?. delta ?. toolCalls ?. [ 0 ] ?. args ) . toBe ( '{"answer":"yes"}' ) ;
175+ } ) ;
176+ } ) ;
0 commit comments