Skip to content

Commit b93669e

Browse files
committed
Add MCP for vertex
Add support for Vertex AI Gemini chat completions and expose location on Mcp. Mcp.cs: introduce a new Location property with default "us-central1". McpService.cs: refactor InitializeChatCompletions to accept the Mcp config (and derive backend and model from it), add a using for LLMService.Auth, and wire up a Vertex backend case that creates a Google service-account token provider, builds a bearer token delegate, normalizes the model name, and registers the VertexAIGemini chat completion with the kernel using config.Location and the project ID. Also adjust the promptSettings call site to pass the config. Includes an auth presence check that throws if Vertex credentials are not configured.
1 parent 217732e commit b93669e

2 files changed

Lines changed: 28 additions & 5 deletions

File tree

src/MaIN.Domain/Entities/Mcp.cs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@ public class Mcp
88
public required List<string> Arguments { get; init; }
99
public required string Command { get; init; }
1010
public required string Model { get; init; }
11+
public string Location { get; set; } = "us-central1";
1112
public Dictionary<string, string> Properties { get; set; } = [];
1213
public BackendType? Backend { get; set; }
1314
public Dictionary<string, string> EnvironmentVariables { get; set; } = [];

src/MaIN.Services/Services/McpService.cs

Lines changed: 27 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -2,6 +2,7 @@
22
using MaIN.Domain.Entities;
33
using MaIN.Domain.Models.Concrete;
44
using MaIN.Services.Services.Abstract;
5+
using MaIN.Services.Services.LLMService.Auth;
56
using MaIN.Services.Services.LLMService.Utils;
67
using MaIN.Services.Services.Models;
78
using Microsoft.SemanticKernel;
@@ -30,7 +31,7 @@ public async Task<McpResult> Prompt(Mcp config, List<Message> messageHistory)
3031
);
3132

3233
var builder = Kernel.CreateBuilder();
33-
var promptSettings = InitializeChatCompletions(builder, config.Backend ?? settings.BackendType, config.Model);
34+
var promptSettings = InitializeChatCompletions(builder, config);
3435
var kernel = builder.Build();
3536
var tools = await mcpClient.ListToolsAsync();
3637
kernel.Plugins.AddFromFunctions("Tools", tools.Select(x => x.AsKernelFunction()));
@@ -49,10 +50,10 @@ public async Task<McpResult> Prompt(Mcp config, List<Message> messageHistory)
4950
}
5051

5152
var chatService = kernel.GetRequiredService<IChatCompletionService>();
52-
53+
5354
var result = await chatService.GetChatMessageContentsAsync(
54-
chatHistory,
55-
promptSettings,
55+
chatHistory,
56+
promptSettings,
5657
kernel);
5758

5859
return new McpResult
@@ -68,8 +69,11 @@ public async Task<McpResult> Prompt(Mcp config, List<Message> messageHistory)
6869
};
6970
}
7071

71-
private PromptExecutionSettings InitializeChatCompletions(IKernelBuilder kernelBuilder, BackendType backendType, string model)
72+
private PromptExecutionSettings InitializeChatCompletions(IKernelBuilder kernelBuilder, Mcp config)
7273
{
74+
var backendType = config.Backend ?? settings.BackendType;
75+
var model = config.Model;
76+
7377
switch (backendType)
7478
{
7579
case BackendType.OpenAi:
@@ -118,6 +122,24 @@ private PromptExecutionSettings InitializeChatCompletions(IKernelBuilder kernelB
118122
FunctionChoiceBehavior = FunctionChoiceBehavior.Auto(options: new() { RetainArgumentTypes = true })
119123
};
120124

125+
case BackendType.Vertex:
126+
var auth = settings.GoogleServiceAccountAuth
127+
?? throw new InvalidOperationException("Vertex AI service account is not configured.");
128+
var tokenProvider = new GoogleServiceAccountTokenProvider(auth);
129+
var httpClient = new HttpClient();
130+
Func<ValueTask<string>> bearerTokenProvider = async ()
131+
=> await tokenProvider.GetAccessTokenAsync(httpClient);
132+
133+
var modelName = model.StartsWith("google/", StringComparison.OrdinalIgnoreCase)
134+
? model["google/".Length..]
135+
: model;
136+
137+
kernelBuilder.Services.AddVertexAIGeminiChatCompletion(modelName, bearerTokenProvider, config.Location, auth.ProjectId);
138+
return new GeminiPromptExecutionSettings
139+
{
140+
FunctionChoiceBehavior = FunctionChoiceBehavior.Auto(options: new() { RetainArgumentTypes = true })
141+
};
142+
121143
case BackendType.Ollama:
122144
throw new NotSupportedException("Ollama models does not support MCP integration.");
123145

0 commit comments

Comments
 (0)