-
Notifications
You must be signed in to change notification settings - Fork 310
Expand file tree
/
Copy pathProgram.cs
More file actions
180 lines (140 loc) · 5.13 KB
/
Program.cs
File metadata and controls
180 lines (140 loc) · 5.13 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
// <complete_code>
using Microsoft.AI.Foundry.Local;
using OpenAI;
using OpenAI.Chat;
using System.ClientModel;
using System.Text.Json;
var config = new Configuration
{
AppName = "foundry_local_samples",
LogLevel = Microsoft.AI.Foundry.Local.LogLevel.Information,
Web = new Configuration.WebService
{
Urls = "http://127.0.0.1:52495"
}
};
// Initialize the singleton instance.
await FoundryLocalManager.CreateAsync(config, Utils.GetAppLogger());
var mgr = FoundryLocalManager.Instance;
// Download and register all execution providers.
await Utils.RunWithSpinner("Registering execution providers", mgr.DownloadAndRegisterEpsAsync());
// Get the model catalog
var catalog = await mgr.GetCatalogAsync();
// Get a model using an alias
var model = await catalog.GetModelAsync("qwen2.5-0.5b") ?? throw new Exception("Model not found");
// Download the model (the method skips download if already cached)
await model.DownloadAsync(progress =>
{
Console.Write($"\rDownloading model: {progress:F2}%");
if (progress >= 100f)
{
Console.WriteLine();
}
});
// Load the model
Console.Write($"Loading model {model.Id}...");
await model.LoadAsync();
Console.WriteLine("done.");
// Start the web service
Console.Write($"Starting web service on {config.Web.Urls}...");
await mgr.StartWebServiceAsync();
Console.WriteLine("done.");
// <<<<<< OPEN AI SDK USAGE >>>>>>
// Use the OpenAI SDK to call the local Foundry web service
ApiKeyCredential key = new ApiKeyCredential("notneeded");
OpenAIClient client = new OpenAIClient(key, new OpenAIClientOptions
{
Endpoint = new Uri(config.Web.Urls + "/v1"),
});
// Get chat client
var chatClient = client.GetChatClient(model.Id);
// Prepare messages
var messages = new List<ChatMessage>
{
ChatMessage.CreateSystemMessage("You are a helpful AI assistant. If necessary, you can use any provided tools to answer the question."),
ChatMessage.CreateUserMessage("What is the answer to 7 multiplied by 6?")
};
// Prepare tools
var tools = new List<ChatTool>
{
ChatTool.CreateFunctionTool(
functionName: "multiply_numbers",
functionDescription: "A tool for multiplying two numbers.",
functionParameters: BinaryData.FromString("""
{
"type": "object",
"properties": {
"first": { "type": "number", "description": "The first number in the operation" },
"second": { "type": "number", "description": "The second number in the operation" }
},
"required": ["first", "second"]
}
""")
)
};
// Prepare chat completion options
var options = new ChatCompletionOptions
{
ToolChoice = ChatToolChoice.CreateRequiredChoice() // Force the model to make a tool call
};
foreach (var tool in tools)
{
options.Tools.Add(tool);
}
// Get a streaming chat completion response
var completionUpdates = chatClient.CompleteChatStreaming(messages, options);
var toolCalls = new List<StreamingChatToolCallUpdate>();
Console.Write($"[ASSISTANT]: ");
foreach (var completionUpdate in completionUpdates)
{
if (completionUpdate.ContentUpdate.Count > 0)
{
Console.Write(completionUpdate.ContentUpdate[0].Text);
}
if (completionUpdate.FinishReason == ChatFinishReason.ToolCalls)
{
foreach (var toolCall in completionUpdate.ToolCallUpdates)
{
toolCalls.Add(toolCall);
}
}
}
Console.WriteLine();
// Invoke tools called and append responses to the chat
foreach (var toolCall in toolCalls)
{
if (toolCall.FunctionName == "multiply_numbers")
{
var arguments = JsonDocument.Parse(toolCall.FunctionArgumentsUpdate.ToString()).RootElement;
var first = arguments.GetProperty("first").GetInt32();
var second = arguments.GetProperty("second").GetInt32();
Console.WriteLine($"\nInvoking tool: {toolCall.FunctionName} with arguments {first} and {second}");
var result = Utils.MultiplyNumbers(first, second);
Console.WriteLine($"Tool response: {result.ToString()}");
messages.Add(ChatMessage.CreateToolMessage(toolCallId: "abcd1234", content: result.ToString()));
}
}
Console.WriteLine("\nTool calls completed. Prompting model to continue conversation...\n");
// Prompt the model to continue the conversation after the tool call
messages.Add(ChatMessage.CreateSystemMessage("Respond only with the answer generated by the tool."));
// Set tool calling back to auto so that the model can decide whether to call
// the tool again or continue the conversation based on the new user prompt
options.ToolChoice = ChatToolChoice.CreateAutoChoice();
// Run the next turn of the conversation
Console.WriteLine("Chat completion response:");
completionUpdates = chatClient.CompleteChatStreaming(messages, options);
Console.Write($"[ASSISTANT]: ");
foreach (var completionUpdate in completionUpdates)
{
if (completionUpdate.ContentUpdate.Count > 0)
{
Console.Write(completionUpdate.ContentUpdate[0].Text);
}
}
Console.WriteLine();
// <<<<<< END OPEN AI SDK USAGE >>>>>>
// Tidy up
// Stop the web service and unload model
await mgr.StopWebServiceAsync();
await model.UnloadAsync();
// </complete_code>