Skip to content

Commit 32cdeb6

Browse files
Kr1sekKrystian Moskal
andauthored
add ChatRequestOptions parameter to AskMemory methods across services… (#105)
* add ChatRequestOptions parameter to AskMemory methods across services for enhanced customization. Add implementation to interactive response when using WithFiles method, same as add ChangeOfValue return. * add ChatRequestOptions parameter to AskMemory methods across services for enhanced customization. Add implementation to interactive response when using WithFiles method, same as add ChangeOfValue return. * add missing service implementation * seperate ChangeOfValue and Interactive response to make it independet * bump version to 0.7.10 and create release notes --------- Co-authored-by: Krystian Moskal <kmoskal@mobitouch.net>
1 parent c93cc5a commit 32cdeb6

File tree

12 files changed

+194
-16
lines changed

12 files changed

+194
-16
lines changed

Releases/0.7.10.md

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
# 0.7.10 release
2+
3+
- Fix changeOfValue not returning tokens while using .WithFiles() method.

src/MaIN.Core/.nuspec

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@
22
<package>
33
<metadata>
44
<id>MaIN.NET</id>
5-
<version>0.7.9</version>
5+
<version>0.7.10</version>
66
<authors>Wisedev</authors>
77
<owners>Wisedev</owners>
88
<icon>favicon.png</icon>

src/MaIN.Services/Services/Abstract/ILLMService.cs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -32,6 +32,7 @@ public interface ILLMService
3232
/// <returns></returns>
3333
Task<ChatResult?> AskMemory(Chat chat,
3434
ChatMemoryOptions memoryOptions,
35+
ChatRequestOptions requestOptions,
3536
CancellationToken cancellationToken = default);
3637

3738
/// <summary>

src/MaIN.Services/Services/LLMService/AnthropicService.cs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -71,7 +71,7 @@ private void ValidateApiKey()
7171
if (HasFiles(lastMessage))
7272
{
7373
var result = ChatHelper.ExtractMemoryOptions(lastMessage);
74-
var memoryResult = await AskMemory(chat, result, cancellationToken);
74+
var memoryResult = await AskMemory(chat, result, options, cancellationToken);
7575
resultBuilder.Append(memoryResult!.Message.Content);
7676
lastMessage.MarkProcessed();
7777
UpdateSessionCache(chat.Id, resultBuilder.ToString(), options.CreateSession);
@@ -531,7 +531,7 @@ private List<object> BuildAnthropicMessages(List<ChatMessage> conversation)
531531
return messages;
532532
}
533533

534-
public async Task<ChatResult?> AskMemory(Chat chat, ChatMemoryOptions memoryOptions, CancellationToken cancellationToken = default)
534+
public async Task<ChatResult?> AskMemory(Chat chat, ChatMemoryOptions memoryOptions, ChatRequestOptions requestOptions, CancellationToken cancellationToken = default)
535535
{
536536
throw new NotSupportedException("Embeddings are not supported by the Anthropic. Document reading requires embedding support.");
537537
}

src/MaIN.Services/Services/LLMService/DeepSeekService.cs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -48,6 +48,7 @@ protected override void ValidateApiKey()
4848
public override async Task<ChatResult?> AskMemory(
4949
Chat chat,
5050
ChatMemoryOptions memoryOptions,
51+
ChatRequestOptions requestOptions,
5152
CancellationToken cancellationToken = default)
5253
{
5354
var lastMsg = chat.Messages.Last();

src/MaIN.Services/Services/LLMService/GeminiService.cs

Lines changed: 61 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
1-
using MaIN.Domain.Configuration;
1+
using System.Text;
2+
using MaIN.Domain.Configuration;
23
using MaIN.Services.Constants;
34
using MaIN.Services.Services.Abstract;
45
using MaIN.Services.Services.LLMService.Memory;
@@ -8,6 +9,7 @@
89
using System.Text.Json;
910
using System.Text.Json.Serialization;
1011
using MaIN.Domain.Entities;
12+
using MaIN.Domain.Models;
1113
using MaIN.Services.Utils;
1214

1315
namespace MaIN.Services.Services.LLMService;
@@ -70,6 +72,7 @@ protected override void ValidateApiKey()
7072
public override async Task<ChatResult?> AskMemory(
7173
Chat chat,
7274
ChatMemoryOptions memoryOptions,
75+
ChatRequestOptions requestOptions,
7376
CancellationToken cancellationToken = default)
7477
{
7578
if (!chat.Messages.Any())
@@ -88,7 +91,63 @@ protected override void ValidateApiKey()
8891
$"{userQuery} | For your next response only, please respond using exactly the following JSON format: \n{jsonGrammar}\n. Do not include any explanations, code blocks, or additional content. After this single JSON response, resume your normal conversational style.";
8992
}
9093

91-
var retrievedContext = await kernel.AskAsync(userQuery, cancellationToken: cancellationToken);
94+
MemoryAnswer retrievedContext;
95+
96+
if (requestOptions.InteractiveUpdates || requestOptions.TokenCallback != null)
97+
{
98+
var responseBuilder = new StringBuilder();
99+
100+
var searchOptions = new SearchOptions
101+
{
102+
Stream = true
103+
};
104+
105+
await foreach (var chunk in kernel.AskStreamingAsync(
106+
userQuery,
107+
options: searchOptions,
108+
cancellationToken: cancellationToken))
109+
{
110+
if (!string.IsNullOrEmpty(chunk.Result))
111+
{
112+
responseBuilder.Append(chunk.Result);
113+
114+
var tokenValue = new LLMTokenValue
115+
{
116+
Text = chunk.Result,
117+
Type = TokenType.Message
118+
};
119+
120+
if (requestOptions.InteractiveUpdates)
121+
{
122+
await notificationService.DispatchNotification(
123+
NotificationMessageBuilder.CreateChatCompletion(chat.Id, tokenValue, false),
124+
ServiceConstants.Notifications.ReceiveMessageUpdate);
125+
}
126+
127+
requestOptions.TokenCallback?.Invoke(tokenValue);
128+
}
129+
}
130+
131+
retrievedContext = new MemoryAnswer
132+
{
133+
Question = userQuery,
134+
Result = responseBuilder.ToString(),
135+
NoResult = responseBuilder.Length == 0
136+
};
137+
}
138+
else
139+
{
140+
var searchOptions = new SearchOptions
141+
{
142+
Stream = false
143+
};
144+
145+
retrievedContext = await kernel.AskAsync(
146+
userQuery,
147+
options: searchOptions,
148+
cancellationToken: cancellationToken);
149+
}
150+
92151
chat.Messages.Last().MarkProcessed();
93152
await kernel.DeleteIndexAsync(cancellationToken: cancellationToken);
94153
return CreateChatResult(chat, retrievedContext.Result, []);

src/MaIN.Services/Services/LLMService/GroqCloudService.cs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -41,6 +41,7 @@ protected override void ValidateApiKey()
4141
public override async Task<ChatResult?> AskMemory(
4242
Chat chat,
4343
ChatMemoryOptions memoryOptions,
44+
ChatRequestOptions requestOptions,
4445
CancellationToken cancellationToken = default)
4546
{
4647
var lastMsg = chat.Messages.Last();

src/MaIN.Services/Services/LLMService/LLMService.cs

Lines changed: 60 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -58,7 +58,7 @@ public LLMService(
5858
if (ChatHelper.HasFiles(lastMsg))
5959
{
6060
var memoryOptions = ChatHelper.ExtractMemoryOptions(lastMsg);
61-
return await AskMemory(chat, memoryOptions, cancellationToken);
61+
return await AskMemory(chat, memoryOptions, requestOptions, cancellationToken);
6262
}
6363

6464
var model = KnownModels.GetModel(chat.Model);
@@ -90,6 +90,7 @@ public Task CleanSessionCache(string? id)
9090
public async Task<ChatResult?> AskMemory(
9191
Chat chat,
9292
ChatMemoryOptions memoryOptions,
93+
ChatRequestOptions requestOptions,
9394
CancellationToken cancellationToken = default)
9495
{
9596
var model = KnownModels.GetModel(chat.Model);
@@ -112,9 +113,64 @@ public Task CleanSessionCache(string? id)
112113

113114
await memoryService.ImportDataToMemory((memory.km, memory.generator), memoryOptions, cancellationToken);
114115
var userMessage = chat.Messages.Last();
115-
var result = await memory.km.AskAsync(
116-
userMessage.Content,
117-
cancellationToken: cancellationToken);
116+
117+
MemoryAnswer result;
118+
119+
if (requestOptions.InteractiveUpdates || requestOptions.TokenCallback != null)
120+
{
121+
var responseBuilder = new StringBuilder();
122+
123+
var searchOptions = new SearchOptions
124+
{
125+
Stream = true
126+
};
127+
128+
await foreach (var chunk in memory.km.AskStreamingAsync(
129+
userMessage.Content,
130+
options: searchOptions,
131+
cancellationToken: cancellationToken))
132+
{
133+
if (!string.IsNullOrEmpty(chunk.Result))
134+
{
135+
responseBuilder.Append(chunk.Result);
136+
137+
var tokenValue = new LLMTokenValue
138+
{
139+
Text = chunk.Result,
140+
Type = TokenType.Message
141+
};
142+
143+
if (requestOptions.InteractiveUpdates)
144+
{
145+
await notificationService.DispatchNotification(
146+
NotificationMessageBuilder.CreateChatCompletion(chat.Id, tokenValue, false),
147+
ServiceConstants.Notifications.ReceiveMessageUpdate);
148+
}
149+
150+
requestOptions.TokenCallback?.Invoke(tokenValue);
151+
}
152+
}
153+
154+
result = new MemoryAnswer
155+
{
156+
Question = userMessage.Content,
157+
Result = responseBuilder.ToString(),
158+
NoResult = responseBuilder.Length == 0
159+
};
160+
}
161+
else
162+
{
163+
var searchOptions = new SearchOptions
164+
{
165+
Stream = false
166+
};
167+
168+
result = await memory.km.AskAsync(
169+
userMessage.Content,
170+
options: searchOptions,
171+
cancellationToken: cancellationToken);
172+
}
173+
118174
await memory.km.DeleteIndexAsync(cancellationToken: cancellationToken);
119175

120176
if (disableCache)

src/MaIN.Services/Services/LLMService/OpenAiCompatibleService.cs

Lines changed: 58 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -68,7 +68,7 @@ public abstract class OpenAiCompatibleService(
6868
if (HasFiles(lastMessage))
6969
{
7070
var result = ChatHelper.ExtractMemoryOptions(lastMessage);
71-
var memoryResult = await AskMemory(chat, result, cancellationToken);
71+
var memoryResult = await AskMemory(chat, result, options, cancellationToken);
7272
resultBuilder.Append(memoryResult!.Message.Content);
7373
lastMessage.MarkProcessed();
7474
UpdateSessionCache(chat.Id, resultBuilder.ToString(), options.CreateSession);
@@ -438,6 +438,7 @@ await _notificationService.DispatchNotification(
438438
public virtual async Task<ChatResult?> AskMemory(
439439
Chat chat,
440440
ChatMemoryOptions memoryOptions,
441+
ChatRequestOptions requestOptions,
441442
CancellationToken cancellationToken = default)
442443
{
443444
if (!chat.Messages.Any())
@@ -455,8 +456,63 @@ await _notificationService.DispatchNotification(
455456
userQuery = $"{userQuery} | Respond only using the following JSON format: \n{jsonGrammar}\n. Do not add explanations, code tags, or any extra content.";
456457
}
457458

458-
var retrievedContext = await kernel.AskAsync(userQuery, cancellationToken: cancellationToken);
459+
MemoryAnswer retrievedContext;
459460

461+
if (requestOptions.InteractiveUpdates || requestOptions.TokenCallback != null)
462+
{
463+
var responseBuilder = new StringBuilder();
464+
465+
var searchOptions = new SearchOptions
466+
{
467+
Stream = true
468+
};
469+
470+
await foreach (var chunk in kernel.AskStreamingAsync(
471+
userQuery,
472+
options: searchOptions,
473+
cancellationToken: cancellationToken))
474+
{
475+
if (!string.IsNullOrEmpty(chunk.Result))
476+
{
477+
responseBuilder.Append(chunk.Result);
478+
479+
var tokenValue = new LLMTokenValue
480+
{
481+
Text = chunk.Result,
482+
Type = TokenType.Message
483+
};
484+
485+
if (requestOptions.InteractiveUpdates)
486+
{
487+
await notificationService.DispatchNotification(
488+
NotificationMessageBuilder.CreateChatCompletion(chat.Id, tokenValue, false),
489+
ServiceConstants.Notifications.ReceiveMessageUpdate);
490+
}
491+
492+
requestOptions.TokenCallback?.Invoke(tokenValue);
493+
}
494+
}
495+
496+
retrievedContext = new MemoryAnswer
497+
{
498+
Question = userQuery,
499+
Result = responseBuilder.ToString(),
500+
NoResult = responseBuilder.Length == 0
501+
};
502+
}
503+
else
504+
{
505+
var searchOptions = new SearchOptions
506+
{
507+
Stream = false
508+
};
509+
510+
retrievedContext = await kernel.AskAsync(
511+
userQuery,
512+
options: searchOptions,
513+
cancellationToken: cancellationToken);
514+
}
515+
460516
await kernel.DeleteIndexAsync(cancellationToken: cancellationToken);
461517
return CreateChatResult(chat, retrievedContext.Result, []);
462518
}

src/MaIN.Services/Services/LLMService/XaiService.cs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -41,6 +41,7 @@ protected override void ValidateApiKey()
4141
public override async Task<ChatResult?> AskMemory(
4242
Chat chat,
4343
ChatMemoryOptions memoryOptions,
44+
ChatRequestOptions requestOptions,
4445
CancellationToken cancellationToken = default)
4546
{
4647
var lastMsg = chat.Messages.Last();

0 commit comments

Comments
 (0)