Skip to content

Commit 2245b7c

Browse files
committed
Fix: Preserve images with file attachments across providers
Detect when the latest message contains images and, instead of clearing them, run a memory/kernel search to build contextual text for the LLM. GeminiService and LLMService now: perform SearchAsync, delete the temporary index, aggregate citation text into a context block, inject that context into the message content (nulling Files), call Send, then restore the original message content and return the result. LLMService also performs additional model/resource cleanup when disableCache is true (dispose models/generator and embedder weights, remove model from loader). DeepSeekService and OllamaService no longer null out Images on the message so image data is preserved for the new image-handling flow.
1 parent fae3ed5 commit 2245b7c

File tree

4 files changed

+52
-3
lines changed

4 files changed

+52
-3
lines changed

src/MaIN.Services/Services/LLMService/DeepSeekService.cs

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -67,7 +67,6 @@ protected override void ValidateApiKey()
6767

6868
chat.Messages.Last().Content = message.Content;
6969
chat.Messages.Last().Files = [];
70-
chat.Messages.Last().Images = null;
7170
var result = await Send(chat, requestOptions, cancellationToken);
7271
chat.Messages.Last().Content = lastMsg.Content;
7372
return result;

src/MaIN.Services/Services/LLMService/GeminiService.cs

Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -98,6 +98,27 @@ protected override void ValidateApiKey()
9898
$"{userQuery} | For your next response only, please respond using exactly the following JSON format: \n{jsonGrammar}\n. Do not include any explanations, code blocks, or additional content. After this single JSON response, resume your normal conversational style.";
9999
}
100100

101+
var lastMessage = chat.Messages.Last();
102+
if (lastMessage.Images?.Count > 0)
103+
{
104+
var searchResult = await kernel.SearchAsync(userQuery, cancellationToken: cancellationToken);
105+
await kernel.DeleteIndexAsync(cancellationToken: cancellationToken);
106+
107+
var ctxBuilder = new StringBuilder();
108+
foreach (var citation in searchResult.Results.SelectMany(r => r.Partitions))
109+
ctxBuilder.AppendLine(citation.Text);
110+
111+
var originalContent = lastMessage.Content;
112+
if (ctxBuilder.Length > 0)
113+
lastMessage.Content =
114+
$"Use the following context to answer the question:\n\n{ctxBuilder}\n\nQuestion: {originalContent}";
115+
lastMessage.Files = null;
116+
117+
var result = await Send(chat, requestOptions, cancellationToken);
118+
lastMessage.Content = originalContent;
119+
return result;
120+
}
121+
101122
MemoryAnswer retrievedContext;
102123

103124
if (requestOptions.InteractiveUpdates || requestOptions.TokenCallback != null)

src/MaIN.Services/Services/LLMService/LLMService.cs

Lines changed: 31 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -129,7 +129,37 @@ public Task CleanSessionCache(string? id)
129129

130130
await memoryService.ImportDataToMemory((km, generator), memoryOptions, cancellationToken);
131131
var userMessage = chat.Messages.Last();
132-
132+
133+
if (userMessage.Images?.Count > 0)
134+
{
135+
var searchResult = await km.SearchAsync(userMessage.Content, cancellationToken: cancellationToken);
136+
await km.DeleteIndexAsync(cancellationToken: cancellationToken);
137+
138+
if (disableCache)
139+
{
140+
llmModel.Dispose();
141+
ModelLoader.RemoveModel(model.FileName);
142+
textGenerator.Dispose();
143+
}
144+
generator._embedder.Dispose();
145+
generator._embedder._weights.Dispose();
146+
generator.Dispose();
147+
148+
var ctxBuilder = new StringBuilder();
149+
foreach (var citation in searchResult.Results.SelectMany(r => r.Partitions))
150+
ctxBuilder.AppendLine(citation.Text);
151+
152+
var originalContent = userMessage.Content;
153+
if (ctxBuilder.Length > 0)
154+
userMessage.Content =
155+
$"Use the following context to answer the question:\n\n{ctxBuilder}\n\nQuestion: {originalContent}";
156+
userMessage.Files = null;
157+
158+
var chatResult = await Send(chat, requestOptions, cancellationToken);
159+
userMessage.Content = originalContent;
160+
return chatResult;
161+
}
162+
133163
MemoryAnswer result;
134164

135165
var tokens = new List<LLMTokenValue>();

src/MaIN.Services/Services/LLMService/OllamaService.cs

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -58,7 +58,6 @@ protected override void ValidateApiKey()
5858

5959
chat.Messages.Last().Content = message.Content;
6060
chat.Messages.Last().Files = [];
61-
chat.Messages.Last().Images = null;
6261
var result = await Send(chat, requestOptions, cancellationToken);
6362
chat.Messages.Last().Content = lastMsg.Content;
6463
return result;

0 commit comments

Comments
 (0)