Skip to content

Commit 416b0f0

Browse files
author
Krystian Moskal
committed
add ChatRequestOptions parameter to AskMemory methods across services for enhanced customization. Add implementation to interactive response when using WithFiles method, same as add ChangeOfValue return.
1 parent 4adaf07 commit 416b0f0

9 files changed

Lines changed: 180 additions & 15 deletions

File tree

src/MaIN.Services/Services/Abstract/ILLMService.cs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -32,6 +32,7 @@ public interface ILLMService
3232
/// <returns></returns>
3333
Task<ChatResult?> AskMemory(Chat chat,
3434
ChatMemoryOptions memoryOptions,
35+
ChatRequestOptions requestOptions,
3536
CancellationToken cancellationToken = default);
3637

3738
/// <summary>

src/MaIN.Services/Services/LLMService/AnthropicService.cs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -71,7 +71,7 @@ private void ValidateApiKey()
7171
if (HasFiles(lastMessage))
7272
{
7373
var result = ChatHelper.ExtractMemoryOptions(lastMessage);
74-
var memoryResult = await AskMemory(chat, result, cancellationToken);
74+
var memoryResult = await AskMemory(chat, result, options, cancellationToken);
7575
resultBuilder.Append(memoryResult!.Message.Content);
7676
lastMessage.MarkProcessed();
7777
UpdateSessionCache(chat.Id, resultBuilder.ToString(), options.CreateSession);
@@ -531,7 +531,7 @@ private List<object> BuildAnthropicMessages(List<ChatMessage> conversation)
531531
return messages;
532532
}
533533

534-
public async Task<ChatResult?> AskMemory(Chat chat, ChatMemoryOptions memoryOptions, CancellationToken cancellationToken = default)
534+
public async Task<ChatResult?> AskMemory(Chat chat, ChatMemoryOptions memoryOptions, ChatRequestOptions requestOptions, CancellationToken cancellationToken = default)
535535
{
536536
throw new NotSupportedException("Embeddings are not supported by the Anthropic. Document reading requires embedding support.");
537537
}

src/MaIN.Services/Services/LLMService/DeepSeekService.cs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -48,6 +48,7 @@ protected override void ValidateApiKey()
4848
public override async Task<ChatResult?> AskMemory(
4949
Chat chat,
5050
ChatMemoryOptions memoryOptions,
51+
ChatRequestOptions requestOptions,
5152
CancellationToken cancellationToken = default)
5253
{
5354
var lastMsg = chat.Messages.Last();

src/MaIN.Services/Services/LLMService/GeminiService.cs

Lines changed: 58 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
1-
using MaIN.Domain.Configuration;
1+
using System.Text;
2+
using MaIN.Domain.Configuration;
23
using MaIN.Services.Constants;
34
using MaIN.Services.Services.Abstract;
45
using MaIN.Services.Services.LLMService.Memory;
@@ -8,6 +9,7 @@
89
using System.Text.Json;
910
using System.Text.Json.Serialization;
1011
using MaIN.Domain.Entities;
12+
using MaIN.Domain.Models;
1113
using MaIN.Services.Utils;
1214

1315
namespace MaIN.Services.Services.LLMService;
@@ -70,6 +72,7 @@ protected override void ValidateApiKey()
7072
public override async Task<ChatResult?> AskMemory(
7173
Chat chat,
7274
ChatMemoryOptions memoryOptions,
75+
ChatRequestOptions requestOptions,
7376
CancellationToken cancellationToken = default)
7477
{
7578
if (!chat.Messages.Any())
@@ -88,7 +91,60 @@ protected override void ValidateApiKey()
8891
$"{userQuery} | For your next response only, please respond using exactly the following JSON format: \n{jsonGrammar}\n. Do not include any explanations, code blocks, or additional content. After this single JSON response, resume your normal conversational style.";
8992
}
9093

91-
var retrievedContext = await kernel.AskAsync(userQuery, cancellationToken: cancellationToken);
94+
MemoryAnswer retrievedContext;
95+
96+
if (requestOptions.InteractiveUpdates)
97+
{
98+
var responseBuilder = new StringBuilder();
99+
100+
var searchOptions = new SearchOptions
101+
{
102+
Stream = true
103+
};
104+
105+
await foreach (var chunk in kernel.AskStreamingAsync(
106+
userQuery,
107+
options: searchOptions,
108+
cancellationToken: cancellationToken))
109+
{
110+
if (!string.IsNullOrEmpty(chunk.Result))
111+
{
112+
responseBuilder.Append(chunk.Result);
113+
114+
var tokenValue = new LLMTokenValue
115+
{
116+
Text = chunk.Result,
117+
Type = TokenType.Message
118+
};
119+
120+
await notificationService.DispatchNotification(
121+
NotificationMessageBuilder.CreateChatCompletion(chat.Id, tokenValue, false),
122+
ServiceConstants.Notifications.ReceiveMessageUpdate);
123+
124+
requestOptions.TokenCallback?.Invoke(tokenValue);
125+
}
126+
}
127+
128+
retrievedContext = new MemoryAnswer
129+
{
130+
Question = userQuery,
131+
Result = responseBuilder.ToString(),
132+
NoResult = responseBuilder.Length == 0
133+
};
134+
}
135+
else
136+
{
137+
var searchOptions = new SearchOptions
138+
{
139+
Stream = false
140+
};
141+
142+
retrievedContext = await kernel.AskAsync(
143+
userQuery,
144+
options: searchOptions,
145+
cancellationToken: cancellationToken);
146+
}
147+
92148
chat.Messages.Last().MarkProcessed();
93149
await kernel.DeleteIndexAsync(cancellationToken: cancellationToken);
94150
return CreateChatResult(chat, retrievedContext.Result, []);

src/MaIN.Services/Services/LLMService/GroqCloudService.cs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -42,6 +42,7 @@ protected override void ValidateApiKey()
4242
public override async Task<ChatResult?> AskMemory(
4343
Chat chat,
4444
ChatMemoryOptions memoryOptions,
45+
ChatRequestOptions requestOptions,
4546
CancellationToken cancellationToken = default)
4647
{
4748
var lastMsg = chat.Messages.Last();

src/MaIN.Services/Services/LLMService/LLMService.cs

Lines changed: 57 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -58,7 +58,7 @@ public LLMService(
5858
if (ChatHelper.HasFiles(lastMsg))
5959
{
6060
var memoryOptions = ChatHelper.ExtractMemoryOptions(lastMsg);
61-
return await AskMemory(chat, memoryOptions, cancellationToken);
61+
return await AskMemory(chat, memoryOptions, requestOptions, cancellationToken);
6262
}
6363

6464
var model = KnownModels.GetModel(chat.Model);
@@ -90,6 +90,7 @@ public Task CleanSessionCache(string? id)
9090
public async Task<ChatResult?> AskMemory(
9191
Chat chat,
9292
ChatMemoryOptions memoryOptions,
93+
ChatRequestOptions requestOptions,
9394
CancellationToken cancellationToken = default)
9495
{
9596
var model = KnownModels.GetModel(chat.Model);
@@ -112,9 +113,61 @@ public Task CleanSessionCache(string? id)
112113

113114
await memoryService.ImportDataToMemory((memory.km, memory.generator), memoryOptions, cancellationToken);
114115
var userMessage = chat.Messages.Last();
115-
var result = await memory.km.AskAsync(
116-
userMessage.Content,
117-
cancellationToken: cancellationToken);
116+
117+
MemoryAnswer result;
118+
119+
if (requestOptions.InteractiveUpdates)
120+
{
121+
var responseBuilder = new StringBuilder();
122+
123+
var searchOptions = new SearchOptions
124+
{
125+
Stream = true
126+
};
127+
128+
await foreach (var chunk in memory.km.AskStreamingAsync(
129+
userMessage.Content,
130+
options: searchOptions,
131+
cancellationToken: cancellationToken))
132+
{
133+
if (!string.IsNullOrEmpty(chunk.Result))
134+
{
135+
responseBuilder.Append(chunk.Result);
136+
137+
var tokenValue = new LLMTokenValue
138+
{
139+
Text = chunk.Result,
140+
Type = TokenType.Message
141+
};
142+
143+
await notificationService.DispatchNotification(
144+
NotificationMessageBuilder.CreateChatCompletion(chat.Id, tokenValue, false),
145+
ServiceConstants.Notifications.ReceiveMessageUpdate);
146+
147+
requestOptions.TokenCallback?.Invoke(tokenValue);
148+
}
149+
}
150+
151+
result = new MemoryAnswer
152+
{
153+
Question = userMessage.Content,
154+
Result = responseBuilder.ToString(),
155+
NoResult = responseBuilder.Length == 0
156+
};
157+
}
158+
else
159+
{
160+
var searchOptions = new SearchOptions
161+
{
162+
Stream = false
163+
};
164+
165+
result = await memory.km.AskAsync(
166+
userMessage.Content,
167+
options: searchOptions,
168+
cancellationToken: cancellationToken);
169+
}
170+
118171
await memory.km.DeleteIndexAsync(cancellationToken: cancellationToken);
119172

120173
if (disableCache)

src/MaIN.Services/Services/LLMService/OpenAiCompatibleService.cs

Lines changed: 55 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -68,7 +68,7 @@ public abstract class OpenAiCompatibleService(
6868
if (HasFiles(lastMessage))
6969
{
7070
var result = ChatHelper.ExtractMemoryOptions(lastMessage);
71-
var memoryResult = await AskMemory(chat, result, cancellationToken);
71+
var memoryResult = await AskMemory(chat, result, options, cancellationToken);
7272
resultBuilder.Append(memoryResult!.Message.Content);
7373
lastMessage.MarkProcessed();
7474
UpdateSessionCache(chat.Id, resultBuilder.ToString(), options.CreateSession);
@@ -438,6 +438,7 @@ await _notificationService.DispatchNotification(
438438
public virtual async Task<ChatResult?> AskMemory(
439439
Chat chat,
440440
ChatMemoryOptions memoryOptions,
441+
ChatRequestOptions requestOptions,
441442
CancellationToken cancellationToken = default)
442443
{
443444
if (!chat.Messages.Any())
@@ -455,8 +456,60 @@ await _notificationService.DispatchNotification(
455456
userQuery = $"{userQuery} | Respond only using the following JSON format: \n{jsonGrammar}\n. Do not add explanations, code tags, or any extra content.";
456457
}
457458

458-
var retrievedContext = await kernel.AskAsync(userQuery, cancellationToken: cancellationToken);
459+
MemoryAnswer retrievedContext;
459460

461+
if (requestOptions.InteractiveUpdates)
462+
{
463+
var responseBuilder = new StringBuilder();
464+
465+
var searchOptions = new SearchOptions
466+
{
467+
Stream = true
468+
};
469+
470+
await foreach (var chunk in kernel.AskStreamingAsync(
471+
userQuery,
472+
options: searchOptions,
473+
cancellationToken: cancellationToken))
474+
{
475+
if (!string.IsNullOrEmpty(chunk.Result))
476+
{
477+
responseBuilder.Append(chunk.Result);
478+
479+
var tokenValue = new LLMTokenValue
480+
{
481+
Text = chunk.Result,
482+
Type = TokenType.Message
483+
};
484+
485+
await notificationService.DispatchNotification(
486+
NotificationMessageBuilder.CreateChatCompletion(chat.Id, tokenValue, false),
487+
ServiceConstants.Notifications.ReceiveMessageUpdate);
488+
489+
requestOptions.TokenCallback?.Invoke(tokenValue);
490+
}
491+
}
492+
493+
retrievedContext = new MemoryAnswer
494+
{
495+
Question = userQuery,
496+
Result = responseBuilder.ToString(),
497+
NoResult = responseBuilder.Length == 0
498+
};
499+
}
500+
else
501+
{
502+
var searchOptions = new SearchOptions
503+
{
504+
Stream = false
505+
};
506+
507+
retrievedContext = await kernel.AskAsync(
508+
userQuery,
509+
options: searchOptions,
510+
cancellationToken: cancellationToken);
511+
}
512+
460513
await kernel.DeleteIndexAsync(cancellationToken: cancellationToken);
461514
return CreateChatResult(chat, retrievedContext.Result, []);
462515
}

src/MaIN.Services/Services/Steps/Commands/AnswerCommandHandler.cs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,7 @@ public class AnswerCommandHandler(
3636
{
3737
case KnowledgeUsage.UseMemory:
3838
result = await llmService.AskMemory(command.Chat,
39-
new ChatMemoryOptions { Memory = command.Chat.Memory });
39+
new ChatMemoryOptions { Memory = command.Chat.Memory }, new ChatRequestOptions());
4040
return result!.Message;
4141
case KnowledgeUsage.UseKnowledge:
4242
var isKnowledgeNeeded = await ShouldUseKnowledge(command.Knowledge, command.Chat);
@@ -138,7 +138,7 @@ await notificationService.DispatchNotification(NotificationMessageBuilder.Create
138138
return result.Message;
139139
}
140140

141-
var knowledgeResult = await llmService.AskMemory(chat, memoryOptions);
141+
var knowledgeResult = await llmService.AskMemory(chat, memoryOptions, new ChatRequestOptions());
142142
chat.Messages.Last().Content = originalContent;
143143
return knowledgeResult?.Message;
144144
}

src/MaIN.Services/Services/Steps/Commands/FetchCommandHandler.cs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -94,7 +94,7 @@ private async Task<Message> HandleFileSource(FetchCommand command, Dictionary<st
9494
{
9595
FilesData = filesDictionary,
9696
PreProcess = fileData.PreProcess
97-
}
97+
}, new ChatRequestOptions()
9898
);
9999

100100
return result!.Message;
@@ -112,7 +112,7 @@ private async Task<Message> HandleWebSource(FetchCommand command, Dictionary<str
112112
{
113113
var memoryChat = command.MemoryChat;
114114
var result = await llmServiceFactory.CreateService(command.Chat.Backend ?? settings.BackendType)
115-
.AskMemory(memoryChat!, new ChatMemoryOptions { WebUrls = [webData!.Url] });
115+
.AskMemory(memoryChat!, new ChatMemoryOptions { WebUrls = [webData!.Url] }, new ChatRequestOptions());
116116
result!.Message.Role = command.ResponseType == FetchResponseType.AS_System ? "System" : "Assistant";
117117
return result!.Message;
118118
}
@@ -131,7 +131,7 @@ private async Task<Message> ProcessJsonResponse(Message response, FetchCommand c
131131
var result = await llmServiceFactory.CreateService(command.Chat.Backend ?? settings.BackendType).AskMemory(command.MemoryChat!, new ChatMemoryOptions
132132
{
133133
TextData = chunks
134-
});
134+
}, new ChatRequestOptions());
135135

136136
result!.Message.Role = command.ResponseType == FetchResponseType.AS_System ? "System" : "Assistant";
137137
var newMessage = result!.Message;

0 commit comments

Comments
 (0)