@@ -36,6 +36,11 @@ public class LLMService : ILLMService
3636 private readonly IMemoryFactory memoryFactory ;
3737 private readonly string modelsPath ;
3838
39+ private readonly JsonSerializerOptions _jsonToolOptions = new ( )
40+ {
41+ PropertyNameCaseInsensitive = true ,
42+ } ;
43+
3944 public LLMService (
4045 MaINSettings options ,
4146 INotificationService notificationService ,
@@ -341,9 +346,10 @@ private static void ProcessTextMessage(Conversation conversation,
341346 }
342347 }
343348
344- if ( hasTools )
349+ if ( hasTools && isNewConversation )
345350 {
346351 var toolsPrompt = FormatToolsForPrompt ( chat . ToolsConfiguration ! ) ;
352+ // Dodaj to jako wiadomoæ systemow¹ lub na pocz¹tku pierwszego promptu u¿ytkownika
347353 finalPrompt = $ "{ toolsPrompt } \n \n { finalPrompt } ";
348354 }
349355
@@ -371,11 +377,14 @@ private static string FormatToolsForPrompt(ToolsConfiguration toolsConfig)
371377 sb . AppendLine ( $ " Parameters: { JsonSerializer . Serialize ( tool . Function . Parameters ) } ") ;
372378 }
373379
374- sb . AppendLine ( "\n ## RESPONSE FORMAT" ) ;
380+ sb . AppendLine ( "\n ## RESPONSE FORMAT (YOU HAVE TO CHOOSE ONE FORMAT AND CANNOT MIX THEM)## " ) ;
375381 sb . AppendLine ( "1. For normal conversation, just respond with plain text." ) ;
376- sb . AppendLine ( "2. For tool calls, use this format:" ) ;
382+ sb . AppendLine ( "2. For tool calls, use this format. " +
383+ "You cannot respond with plain text before or after format. " +
384+ "If you want to call multiple functions, you have to combine them into one array." +
385+ "Your response MUST contain only one tool call block:" ) ;
377386 sb . AppendLine ( "<tool_call>" ) ;
378- sb . AppendLine ( "{\" tool_calls\" : [{\" id\" : \" abc \" , \" type\" : \" function\" , \" function\" : {\" name\" : \" fn \" , \" arguments\" : \" {\\ \" p \\ \" :\\ \" v \\ \" }\" }}]}" ) ;
387+ sb . AppendLine ( "{\" tool_calls\" : [{\" id\" : \" call_1 \" , \" type\" : \" function\" , \" function\" : {\" name\" : \" tool_name \" , \" arguments\" : \" {\\ \" param \\ \" :\\ \" value \\ \" } \" }},{ \" id \" : \" call_2 \" , \" type \" : \" function \" , \" function \" : { \" name \" : \" tool2_name \" , \" arguments \" : \" { \\ \" param1 \\ \" : \\ \" value1 \\ \" , \\ \" param2 \\ \" : \\ \" value2 \\ \" }\" }}]}" ) ;
379388 sb . AppendLine ( "</tool_call>" ) ;
380389
381390 return sb . ToString ( ) ;
@@ -385,9 +394,9 @@ private static string FormatToolsForPrompt(ToolsConfiguration toolsConfig)
385394 {
386395 if ( string . IsNullOrWhiteSpace ( response ) ) return null ;
387396
397+ string jsonContent = ExtractJsonContent ( response ) ;
388398 try
389399 {
390- string jsonContent = ExtractJsonContent ( response ) ;
391400 if ( string . IsNullOrEmpty ( jsonContent ) ) return null ;
392401
393402 using var doc = JsonDocument . Parse ( jsonContent ) ;
@@ -396,7 +405,7 @@ private static string FormatToolsForPrompt(ToolsConfiguration toolsConfig)
396405 // OpenAI standard { "tool_calls": [...] }
397406 if ( root . ValueKind == JsonValueKind . Object && root . TryGetProperty ( "tool_calls" , out var toolCallsProp ) )
398407 {
399- var calls = toolCallsProp . Deserialize < List < ToolCall > > ( new JsonSerializerOptions { PropertyNameCaseInsensitive = true } ) ;
408+ var calls = toolCallsProp . Deserialize < List < ToolCall > > ( _jsonToolOptions ) ;
400409 return NormalizeToolCalls ( calls ) ;
401410 }
402411
@@ -417,7 +426,7 @@ private static string FormatToolsForPrompt(ToolsConfiguration toolsConfig)
417426 }
418427 catch ( Exception )
419428 {
420- // No tool calls found
429+ // No tool calls found no need to throw nor log
421430 }
422431
423432 return null ;
@@ -429,14 +438,14 @@ private string ExtractJsonContent(string text)
429438
430439 int firstBrace = text . IndexOf ( '{' ) ;
431440 int firstBracket = text . IndexOf ( '[' ) ;
432- int startIndex = ( firstBrace >= 0 && firstBracket >= 0 ) ? Math . Min ( firstBrace , firstBracket ) : Math . Max ( firstBrace , firstBracket ) ;
441+ int startIndex = ( firstBrace >= 0 && firstBracket >= 0 ) ? Math . Min ( firstBrace , firstBracket ) : Math . Max ( firstBrace , firstBracket ) ;
433442
434443 int lastBrace = text . LastIndexOf ( '}' ) ;
435444 int lastBracket = text . LastIndexOf ( ']' ) ;
436- int endIndex = Math . Max ( lastBrace , lastBracket ) ;
445+ int endIndex = Math . Max ( lastBrace , lastBracket ) ;
437446
438- if ( startIndex >= 0 && endIndex > startIndex )
439- {
447+ if ( startIndex >= 0 && endIndex > startIndex )
448+ {
440449 return text . Substring ( startIndex , endIndex - startIndex + 1 ) ;
441450 }
442451
@@ -645,34 +654,35 @@ private async Task<ChatResult> ProcessWithToolsAsync(
645654 Chat chat ,
646655 ChatRequestOptions requestOptions ,
647656 CancellationToken cancellationToken )
648- {
657+ {
658+ NativeLogConfig . llama_log_set ( ( level , message ) => {
659+ if ( level == LLamaLogLevel . Error )
660+ {
661+ Console . Error . Write ( message ) ;
662+ }
663+ } ) ; // Remove llama native logging
664+
649665 var model = KnownModels . GetModel ( chat . Model ) ;
650666 var tokens = new List < LLMTokenValue > ( ) ;
651667 var fullResponseBuilder = new StringBuilder ( ) ;
652668 var iterations = 0 ;
653669
654670 while ( iterations < MaxToolIterations )
655- {
656- if ( iterations > 0 && requestOptions . InteractiveUpdates && fullResponseBuilder . Length > 0 )
657- {
658- var spaceToken = new LLMTokenValue { Text = " " , Type = TokenType . Message } ;
659- tokens . Add ( spaceToken ) ;
660-
661- requestOptions . TokenCallback ? . Invoke ( spaceToken ) ;
662-
663- await notificationService . DispatchNotification (
664- NotificationMessageBuilder . CreateChatCompletion ( chat . Id , spaceToken , false ) ,
665- ServiceConstants . Notifications . ReceiveMessageUpdate ) ;
666- }
667-
671+ {
668672 var lastMsg = chat . Messages . Last ( ) ;
673+ await SendNotification ( chat . Id , new LLMTokenValue
674+ {
675+ Type = TokenType . FullAnswer ,
676+ Text = $ "Processing with tools... iteration { iterations + 1 } \n \n "
677+ } , false ) ;
678+ requestOptions . InteractiveUpdates = false ;
669679 var iterationTokens = await ProcessChatRequest ( chat , model , lastMsg , requestOptions , cancellationToken ) ;
670680
671681 var responseText = string . Concat ( iterationTokens . Select ( x => x . Text ) ) ;
672682
673683 if ( fullResponseBuilder . Length > 0 )
674684 {
675- fullResponseBuilder . Append ( " " ) ;
685+ fullResponseBuilder . Append ( ' \n ' ) ;
676686 }
677687 fullResponseBuilder . Append ( responseText ) ;
678688 tokens . AddRange ( iterationTokens ) ;
@@ -681,6 +691,12 @@ await notificationService.DispatchNotification(
681691
682692 if ( toolCalls == null || ! toolCalls . Any ( ) )
683693 {
694+ requestOptions . InteractiveUpdates = true ;
695+ await SendNotification ( chat . Id , new LLMTokenValue
696+ {
697+ Type = TokenType . FullAnswer ,
698+ Text = responseText
699+ } , false ) ;
684700 break ;
685701 }
686702
@@ -765,19 +781,14 @@ await notificationService.DispatchNotification(
765781
766782 if ( iterations >= MaxToolIterations )
767783 {
784+ await SendNotification ( chat . Id , new LLMTokenValue
785+ {
786+ Type = TokenType . FullAnswer ,
787+ Text = "Maximum tool invocation iterations reached. Ending the conversation."
788+ } , false ) ;
768789 }
769790
770791 var finalResponse = fullResponseBuilder . ToString ( ) ;
771- var finalToken = new LLMTokenValue { Text = finalResponse , Type = TokenType . FullAnswer } ;
772- tokens . Add ( finalToken ) ;
773-
774- if ( requestOptions . InteractiveUpdates )
775- {
776- await notificationService . DispatchNotification (
777- NotificationMessageBuilder . CreateChatCompletion ( chat . Id , finalToken , true ) ,
778- ServiceConstants . Notifications . ReceiveMessageUpdate ) ;
779- }
780-
781792 chat . Messages . Last ( ) . MarkProcessed ( ) ;
782793
783794 return new ChatResult
0 commit comments