-
Notifications
You must be signed in to change notification settings - Fork 1.7k
Expand file tree
/
Copy pathProgram.cs
More file actions
281 lines (243 loc) · 11.1 KB
/
Program.cs
File metadata and controls
281 lines (243 loc) · 11.1 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
// Copyright (c) Microsoft. All rights reserved.
// This sample demonstrates how to dynamically expand the set of function tools available to an
// agent during a function-calling loop. The agent starts with a single "RequestTools" function.
// When the model calls RequestTools with a description of the capabilities needed, the function
// uses the ambient FunctionInvocationContext to add new tools to ChatOptions.Tools. The agent
// can then use the newly added tools in subsequent iterations of the same function-calling loop.
using System.ComponentModel;
using Azure.AI.OpenAI;
using Azure.Identity;
using Microsoft.Agents.AI;
using Microsoft.Extensions.AI;
var endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT is not set.");
var deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT_NAME") ?? "gpt-5.4-mini";
// Pre-defined tool implementations that can be loaded on demand.
[Description("Get the current weather for a city.")]
static string GetWeather([Description("The city name.")] string city) =>
city.ToUpperInvariant() switch
{
"SEATTLE" => "Seattle: 55°F, cloudy with light rain.",
"NEW YORK" => "New York: 72°F, sunny and warm.",
"LONDON" => "London: 48°F, overcast with fog.",
_ => $"{city}: weather data not available, please provide one of the following city names: 'Seattle', 'New York', 'London'."
};
[Description("Get the current local time for a city.")]
static string GetTime([Description("The city name.")] string city) =>
city.ToUpperInvariant() switch
{
"SEATTLE" => "Seattle: 9:00 AM PST",
"NEW YORK" => "New York: 12:00 PM EST",
"LONDON" => "London: 5:00 PM GMT",
_ => $"{city}: time data not available, please provide one of the following city names: 'Seattle', 'New York', 'London'."
};
[Description("Convert a temperature from Fahrenheit to Celsius.")]
static string ConvertFahrenheitToCelsius([Description("The temperature in Fahrenheit.")] double fahrenheit) =>
$"{fahrenheit}°F = {(fahrenheit - 32) * 5 / 9:F1}°C";
// A registry of tool sets that can be loaded by description keyword.
Dictionary<string, List<AITool>> toolCatalog = new(StringComparer.OrdinalIgnoreCase)
{
["weather"] = [AIFunctionFactory.Create(GetWeather, name: "GetWeather")],
["time"] = [AIFunctionFactory.Create(GetTime, name: "GetTime")],
["temperature"] = [AIFunctionFactory.Create(ConvertFahrenheitToCelsius, name: "ConvertFahrenheitToCelsius")],
};
// The RequestTools function uses the ambient FunctionInvocationContext to add tools dynamically.
AIFunction requestToolsFunction = AIFunctionFactory.Create(
[Description("Request additional tools to be loaded based on a description of the functionality needed. " +
"Call this when you need capabilities that are not yet available in your current tool set.")] (
[Description("A description of the functionality required, e.g. 'weather', 'time', or 'temperature conversion'.")] string description
) =>
{
// Access the ambient FunctionInvocationContext provided by FunctionInvokingChatClient.
var context = FunctionInvokingChatClient.CurrentContext
?? throw new InvalidOperationException("No ambient FunctionInvocationContext available.");
var tools = context.Options?.Tools;
if (tools is null)
{
return "Unable to register new tools: ChatOptions.Tools is not available.";
}
// Find matching tool sets from the catalog.
List<string> addedToolNames = [];
foreach (var kvp in toolCatalog)
{
var keyword = kvp.Key;
var catalogTools = kvp.Value;
if (description.Contains(keyword, StringComparison.OrdinalIgnoreCase))
{
foreach (var tool in catalogTools)
{
// Avoid adding duplicates.
if (tool is AIFunction fn && !tools.Any(t => t is AIFunction existing && existing.Name == fn.Name))
{
tools.Add(tool);
addedToolNames.Add(fn.Name);
}
}
}
}
return addedToolNames.Count > 0
? "Successfully loaded tools"
: $"No tools matched the description '{description}'. Available categories: {string.Join(", ", toolCatalog.Keys)}.";
},
name: "RequestTools");
// Create the agent with only the RequestTools function initially.
// Insert chat client middleware that logs the tools available on each LLM call,
// making the dynamic expansion visible in the console output.
// WARNING: DefaultAzureCredential is convenient for development but requires careful consideration in production.
// In production, consider using a specific credential (e.g., ManagedIdentityCredential) to avoid
// latency issues, unintended credential probing, and potential security risks from fallback mechanisms.
AIAgent agent = new AzureOpenAIClient(
new Uri(endpoint),
new DefaultAzureCredential())
.GetChatClient(deploymentName)
.AsIChatClient()
.AsBuilder()
.Use(getResponseFunc: ToolLoggingMiddleware, getStreamingResponseFunc: ToolLoggingStreamingMiddleware)
.BuildAIAgent(
instructions: """
You are a helpful assistant. You start with limited tools.
When you need functionality that you don't currently have, call RequestTools with a description
of what you need. After new tools are loaded, use them to answer the user's question.
""",
tools: [requestToolsFunction]);
// Run a conversation that triggers dynamic tool expansion.
Console.WriteLine("=== Dynamic Function Tools Sample ===\n");
string[] prompts =
[
"What's the weather like in Seattle and London?",
"What time is it in New York?",
"Can you convert those temperatures to Celsius?"
];
// --- Non-Streaming Mode ---
Console.ForegroundColor = ConsoleColor.Yellow;
Console.WriteLine("=== Non-Streaming Mode ===");
Console.ResetColor();
Console.WriteLine();
AgentSession session = await agent.CreateSessionAsync();
foreach (var prompt in prompts)
{
Console.ForegroundColor = ConsoleColor.Green;
Console.Write("[User] ");
Console.ResetColor();
Console.WriteLine(prompt);
var response = await agent.RunAsync(prompt, session);
// Print all message contents including tool calls, tool results, and text.
foreach (var message in response.Messages)
{
foreach (var content in message.Contents)
{
switch (content)
{
case FunctionCallContent functionCall:
Console.ForegroundColor = ConsoleColor.Yellow;
Console.WriteLine($" [Tool Call] {functionCall.Name}({string.Join(", ", functionCall.Arguments?.Select(a => $"{a.Key}: {a.Value}") ?? [])})");
Console.ResetColor();
break;
case FunctionResultContent functionResult:
Console.ForegroundColor = ConsoleColor.DarkYellow;
Console.WriteLine($" [Tool Result] {functionResult.CallId} => {functionResult.Result}");
Console.ResetColor();
break;
case TextContent textContent when !string.IsNullOrWhiteSpace(textContent.Text):
Console.ForegroundColor = ConsoleColor.Cyan;
Console.Write("[Agent] ");
Console.ResetColor();
Console.WriteLine(textContent.Text);
break;
}
}
}
Console.WriteLine();
}
// --- Streaming Mode ---
Console.ForegroundColor = ConsoleColor.Yellow;
Console.WriteLine("=== Streaming Mode ===");
Console.ResetColor();
Console.WriteLine();
AgentSession streamingSession = await agent.CreateSessionAsync();
foreach (var prompt in prompts)
{
Console.ForegroundColor = ConsoleColor.Green;
Console.Write("[User] ");
Console.ResetColor();
Console.WriteLine(prompt);
bool inAgentText = false;
await foreach (var update in agent.RunStreamingAsync(prompt, streamingSession))
{
foreach (var content in update.Contents)
{
switch (content)
{
case FunctionCallContent functionCall:
if (inAgentText)
{
Console.WriteLine();
inAgentText = false;
}
Console.ForegroundColor = ConsoleColor.Yellow;
Console.WriteLine($" [Tool Call] {functionCall.Name}({string.Join(", ", functionCall.Arguments?.Select(a => $"{a.Key}: {a.Value}") ?? [])})");
Console.ResetColor();
break;
case FunctionResultContent functionResult:
Console.ForegroundColor = ConsoleColor.DarkYellow;
Console.WriteLine($" [Tool Result] {functionResult.CallId} => {functionResult.Result}");
Console.ResetColor();
break;
case TextContent textContent when !string.IsNullOrWhiteSpace(textContent.Text):
if (!inAgentText)
{
Console.ForegroundColor = ConsoleColor.Cyan;
Console.Write("[Agent] ");
Console.ResetColor();
inAgentText = true;
}
Console.Write(textContent.Text);
break;
}
}
}
if (inAgentText)
{
Console.WriteLine();
}
Console.WriteLine();
}
// Chat client middleware that logs the number and names of tools on each LLM request.
async Task<ChatResponse> ToolLoggingMiddleware(
IEnumerable<ChatMessage> messages,
ChatOptions? options,
IChatClient innerChatClient,
CancellationToken cancellationToken)
{
LogTools(options);
return await innerChatClient.GetResponseAsync(messages, options, cancellationToken);
}
// Streaming version of the tool logging middleware.
async IAsyncEnumerable<ChatResponseUpdate> ToolLoggingStreamingMiddleware(
IEnumerable<ChatMessage> messages,
ChatOptions? options,
IChatClient innerChatClient,
[System.Runtime.CompilerServices.EnumeratorCancellation] CancellationToken cancellationToken)
{
LogTools(options);
await foreach (var update in innerChatClient.GetStreamingResponseAsync(messages, options, cancellationToken))
{
yield return update;
}
}
// Shared helper to log the current tool set.
void LogTools(ChatOptions? options)
{
if (options?.Tools is { Count: > 0 } tools)
{
var toolNames = tools.OfType<AIFunction>().Select(t => t.Name);
Console.ForegroundColor = ConsoleColor.DarkGray;
Console.WriteLine($" [Middleware] LLM call with {tools.Count} tool(s): {string.Join(", ", toolNames)}");
Console.ResetColor();
}
else
{
Console.ForegroundColor = ConsoleColor.DarkGray;
Console.WriteLine(" [Middleware] LLM call with 0 tools");
Console.ResetColor();
}
}