11using MaIN . Domain . Configuration ;
22using MaIN . Domain . Entities ;
33using MaIN . Domain . Entities . Agents . Knowledge ;
4+ using MaIN . Domain . Exceptions . Agents ;
45using MaIN . Domain . Models ;
56using MaIN . Domain . Models . Abstract ;
67using MaIN . Services . Constants ;
@@ -19,8 +20,7 @@ public class AnswerCommandHandler(
1920 ILLMServiceFactory llmServiceFactory ,
2021 IMcpService mcpService ,
2122 INotificationService notificationService ,
22- IImageGenServiceFactory imageGenServiceFactory ,
23- MaINSettings settings )
23+ IImageGenServiceFactory imageGenServiceFactory )
2424 : ICommandHandler < AnswerCommand , Message ? >
2525{
2626 private static readonly JsonSerializerOptions _jsonOptions = new ( )
@@ -30,10 +30,13 @@ public class AnswerCommandHandler(
3030
3131 public async Task < Message ? > HandleAsync ( AnswerCommand command )
3232 {
33+ if ( ! ModelRegistry . TryGetById ( command . Chat . ModelId , out var model ) )
34+ {
35+ throw new AgentModelNotAvailableException ( command . AgentId , command . Chat . ModelId ) ;
36+ }
37+
3338 ChatResult ? result ;
34- var backend = ModelRegistry . TryGetById ( command . Chat . ModelId , out var resolvedModel )
35- ? resolvedModel ! . Backend
36- : settings . BackendType ;
39+ var backend = model ! . Backend ;
3740 var llmService = llmServiceFactory . CreateService ( backend ) ;
3841 var imageGenService = imageGenServiceFactory . CreateService ( backend ) ;
3942
@@ -44,15 +47,15 @@ public class AnswerCommandHandler(
4447 new ChatMemoryOptions { Memory = command . Chat . Memory } , new ChatRequestOptions ( ) ) ;
4548 return result ! . Message ;
4649 case KnowledgeUsage . UseKnowledge :
47- var isKnowledgeNeeded = await ShouldUseKnowledge ( command . Knowledge , command . Chat ) ;
50+ var isKnowledgeNeeded = await ShouldUseKnowledge ( command . Knowledge , command . Chat , backend ) ;
4851 if ( isKnowledgeNeeded )
4952 {
50- return await ProcessKnowledgeQuery ( command . Knowledge , command . Chat , command . AgentId ) ;
53+ return await ProcessKnowledgeQuery ( command . Knowledge , command . Chat , command . AgentId , llmService ) ;
5154 }
5255
5356 break ;
5457 case KnowledgeUsage . AlwaysUseKnowledge :
55- return await ProcessKnowledgeQuery ( command . Knowledge , command . Chat , command . AgentId ) ;
58+ return await ProcessKnowledgeQuery ( command . Knowledge , command . Chat , command . AgentId , llmService ) ;
5659 }
5760
5861 result = command . Chat . ImageGen
@@ -68,7 +71,7 @@ public class AnswerCommandHandler(
6871 return result ! . Message ;
6972 }
7073
71- private async Task < bool > ShouldUseKnowledge ( Knowledge ? knowledge , Chat chat )
74+ private async Task < bool > ShouldUseKnowledge ( Knowledge ? knowledge , Chat chat , BackendType backend )
7275 {
7376 var originalContent = chat . Messages . Last ( ) . Content ;
7477
@@ -87,9 +90,6 @@ private async Task<bool> ShouldUseKnowledge(Knowledge? knowledge, Chat chat)
8790 Content of available knowledge has source tags. Prompt: { originalContent }
8891 """ ;
8992
90- var backend = ModelRegistry . TryGetById ( chat . ModelId , out var resolvedModel )
91- ? resolvedModel ! . Backend
92- : settings . BackendType ;
9393 var service = llmServiceFactory . CreateService ( backend ) ;
9494
9595 var result = await service . Send ( chat , new ChatRequestOptions ( )
@@ -104,7 +104,7 @@ private async Task<bool> ShouldUseKnowledge(Knowledge? knowledge, Chat chat)
104104 return shouldUseKnowledge ;
105105 }
106106
107- private async Task < Message ? > ProcessKnowledgeQuery ( Knowledge ? knowledge , Chat chat , string agentId )
107+ private async Task < Message ? > ProcessKnowledgeQuery ( Knowledge ? knowledge , Chat chat , string agentId , ILLMService llmService )
108108 {
109109 var originalContent = chat . Messages . Last ( ) . Content ;
110110 var indexAsKnowledge = knowledge ? . Index . Items . ToDictionary ( x => x . Name , x => x . Tags ) ;
@@ -116,15 +116,10 @@ private async Task<bool> ShouldUseKnowledge(Knowledge? knowledge, Chat chat)
116116 KNOWLEDGE:
117117 { index }
118118
119- Find tags that fits user query based on available knowledge (provided to you above as pair of item names with tags).
119+ Find tags that fits user query based on available knowledge (provided to you above as pair of item names with tags).
120120 Always return at least 1 tag in array, and no more than 4. Prompt: { originalContent }
121121 """ ;
122122
123- var backend = ModelRegistry . TryGetById ( chat . ModelId , out var resolvedModel )
124- ? resolvedModel ! . Backend
125- : settings . BackendType ;
126- var llmService = llmServiceFactory . CreateService ( backend ) ;
127-
128123 var searchResult = await llmService . Send ( chat , new ChatRequestOptions ( )
129124 {
130125 SaveConv = false
@@ -134,7 +129,7 @@ Find tags that fits user query based on available knowledge (provided to you abo
134129 . Where ( x => x . Tags . Intersect ( matchedTags ! ) . Any ( ) || matchedTags ! . Contains ( x . Name ) )
135130 . ToList ( ) ;
136131
137- //NOTE: perhaps good idea for future to combine knowledge form MCP and from KM
132+ //NOTE: perhaps good idea for future to combine knowledge form MCP and from KM
138133 var memoryOptions = new ChatMemoryOptions ( ) ;
139134 var mcpConfig = BuildMemoryOptionsFromKnowledgeItems ( knowledgeItems , memoryOptions ) ;
140135
0 commit comments