|
8 | 8 | // - starting/stopping the local web service |
9 | 9 | // |
10 | 10 | // Responses API calls go through the official OpenAI .NET package's `ResponsesClient` |
11 | | -// pointed at the local web service, mirroring how `foundry-local-web-server` uses |
12 | | -// `OpenAIClient.GetChatClient(...)`. |
| 11 | +// pointed at the local web service, mirroring how `samples/cs/foundry-local-web-server` |
| 12 | +// uses `OpenAIClient.GetChatClient(...)` for chat completions. |
13 | 13 |
|
14 | 14 | using System.ClientModel; |
15 | | -using System.Text; |
16 | | -using System.Text.Json; |
17 | 15 |
|
18 | 16 | using Microsoft.AI.Foundry.Local; |
19 | 17 |
|
@@ -73,108 +71,104 @@ await model.DownloadAsync(progress => |
73 | 71 | await mgr.StartWebServiceAsync(); |
74 | 72 | Console.WriteLine("done."); |
75 | 73 |
|
76 | | -// <<<<<< OPEN AI RESPONSES SDK USAGE >>>>>> |
77 | | -// Use the OpenAI Responses client to call the local Foundry web service. |
78 | | -ApiKeyCredential key = new ApiKeyCredential("notneeded"); |
79 | | -OpenAIClient openai = new OpenAIClient(key, new OpenAIClientOptions |
| 74 | +try |
80 | 75 | { |
81 | | - Endpoint = new Uri(config.Web.Urls + "/v1"), |
82 | | -}); |
83 | | -ResponsesClient responses = openai.GetResponsesClient(); |
84 | | - |
85 | | -// 1) Non-streaming |
86 | | -Console.WriteLine("\n=== Non-streaming ==="); |
87 | | -ResponseResult simple = await responses.CreateResponseAsync(model.Id, "What is 2 + 2? Respond with just the number."); |
88 | | -Console.WriteLine($"[ASSISTANT]: {simple.GetOutputText()}"); |
89 | | - |
90 | | -// 2) Streaming |
91 | | -Console.WriteLine("\n=== Streaming ==="); |
92 | | -Console.Write("[ASSISTANT]: "); |
93 | | -await foreach (StreamingResponseUpdate update in responses.CreateResponseStreamingAsync(model.Id, "Count from 1 to 3.")) |
94 | | -{ |
95 | | - if (update is StreamingResponseOutputTextDeltaUpdate delta && !string.IsNullOrEmpty(delta.Delta)) |
| 76 | + // <<<<<< OPEN AI RESPONSES SDK USAGE >>>>>> |
| 77 | + // Use the OpenAI Responses client to call the local Foundry web service. |
| 78 | + ApiKeyCredential key = new("notneeded"); |
| 79 | + OpenAIClient openai = new(key, new OpenAIClientOptions |
96 | 80 | { |
97 | | - Console.Write(delta.Delta); |
98 | | - } |
99 | | -} |
100 | | -Console.WriteLine(); |
101 | | - |
102 | | -// 3) Function/tool calling — full round-trip using previous_response_id. |
103 | | -Console.WriteLine("\n=== Function calling ==="); |
104 | | -var weatherSchema = BinaryData.FromString(""" |
| 81 | + Endpoint = new Uri(config.Web.Urls + "/v1"), |
| 82 | + }); |
| 83 | + ResponsesClient responses = openai.GetResponsesClient(); |
| 84 | + |
| 85 | + // 1) Non-streaming |
| 86 | + Console.WriteLine("\n=== Non-streaming ==="); |
| 87 | + ResponseResult simple = await responses.CreateResponseAsync(model.Id, "Reply with one short sentence about local AI."); |
| 88 | + Console.WriteLine($"[ASSISTANT]: {simple.GetOutputText()}"); |
| 89 | + |
| 90 | + // 2) Streaming |
| 91 | + Console.WriteLine("\n=== Streaming ==="); |
| 92 | + Console.Write("[ASSISTANT]: "); |
| 93 | + await foreach (StreamingResponseUpdate update in responses.CreateResponseStreamingAsync(model.Id, "Count from 1 to 3.")) |
105 | 94 | { |
106 | | - "type": "object", |
107 | | - "properties": { |
108 | | - "city": { "type": "string", "description": "The city to look up" } |
109 | | - }, |
110 | | - "required": ["city"] |
| 95 | + if (update is StreamingResponseOutputTextDeltaUpdate delta && !string.IsNullOrEmpty(delta.Delta)) |
| 96 | + { |
| 97 | + Console.Write(delta.Delta); |
| 98 | + } |
111 | 99 | } |
112 | | - """); |
| 100 | + Console.WriteLine(); |
| 101 | + |
| 102 | + // 3) Function/tool calling — full round-trip via previous_response_id. |
| 103 | + // The function takes no arguments, which matches the pattern small models handle reliably. |
| 104 | + Console.WriteLine("\n=== Function calling ==="); |
| 105 | + var emptyParamsSchema = BinaryData.FromString(""" |
| 106 | + { |
| 107 | + "type": "object", |
| 108 | + "properties": {}, |
| 109 | + "additionalProperties": false |
| 110 | + } |
| 111 | + """); |
| 112 | + |
| 113 | + ResponseTool getWeatherTool = ResponseTool.CreateFunctionTool( |
| 114 | + functionName: "get_weather", |
| 115 | + functionParameters: emptyParamsSchema, |
| 116 | + strictModeEnabled: true, |
| 117 | + functionDescription: "Get the current weather. This sample always returns Seattle weather."); |
113 | 118 |
|
114 | | -var toolOptions = new CreateResponseOptions( |
115 | | - model.Id, |
116 | | - new[] { ResponseItem.CreateUserMessageItem("Use get_weather to look up the weather in Seattle, then summarize it.") }) |
117 | | -{ |
118 | | - StoredOutputEnabled = true, |
119 | | - ToolChoice = ResponseToolChoice.CreateRequiredChoice(), |
120 | | -}; |
121 | | -toolOptions.Tools.Add(ResponseTool.CreateFunctionTool( |
122 | | - functionName: "get_weather", |
123 | | - functionParameters: weatherSchema, |
124 | | - strictModeEnabled: true, |
125 | | - functionDescription: "Get the current weather for a given city.")); |
| 119 | + var toolCallOptions = new CreateResponseOptions( |
| 120 | + model.Id, |
| 121 | + new[] { ResponseItem.CreateUserMessageItem("Use the get_weather tool and then answer with the weather.") }) |
| 122 | + { |
| 123 | + StoredOutputEnabled = true, |
| 124 | + ToolChoice = ResponseToolChoice.CreateRequiredChoice(), |
| 125 | + MaxOutputTokenCount = 64, |
| 126 | + Temperature = 0.0f, |
| 127 | + }; |
| 128 | + toolCallOptions.Tools.Add(getWeatherTool); |
126 | 129 |
|
127 | | -ResponseResult toolCallResponse = await responses.CreateResponseAsync(toolOptions); |
| 130 | + ResponseResult toolResponse = await responses.CreateResponseAsync(toolCallOptions); |
128 | 131 |
|
129 | | -// Find the function-call output item the model produced. |
130 | | -FunctionCallResponseItem? functionCall = null; |
131 | | -foreach (var item in toolCallResponse.OutputItems) |
132 | | -{ |
133 | | - if (item is FunctionCallResponseItem fc && fc.FunctionName == "get_weather") |
| 132 | + FunctionCallResponseItem? functionCall = null; |
| 133 | + foreach (var item in toolResponse.OutputItems) |
134 | 134 | { |
135 | | - functionCall = fc; |
136 | | - break; |
| 135 | + if (item is FunctionCallResponseItem fc && fc.FunctionName == "get_weather") |
| 136 | + { |
| 137 | + functionCall = fc; |
| 138 | + break; |
| 139 | + } |
137 | 140 | } |
138 | | -} |
139 | 141 |
|
140 | | -if (functionCall is null) |
141 | | -{ |
142 | | - Console.WriteLine("Model did not produce a function call; skipping tool round-trip."); |
143 | | -} |
144 | | -else |
145 | | -{ |
146 | | - var argsJson = functionCall.FunctionArguments?.ToString() ?? "{}"; |
147 | | - var city = "unknown"; |
148 | | - try |
| 142 | + if (functionCall is null) |
149 | 143 | { |
150 | | - city = JsonDocument.Parse(argsJson).RootElement.GetProperty("city").GetString() ?? "unknown"; |
| 144 | + Console.WriteLine("Model did not produce a function call; skipping tool round-trip."); |
151 | 145 | } |
152 | | - catch (KeyNotFoundException) { /* model gave us no city */ } |
153 | | - |
154 | | - Console.WriteLine($"Tool call: get_weather(city=\"{city}\")"); |
155 | | - var toolOutput = $$$"""{"city": "{{{city}}}", "temperatureF": 68, "summary": "partly cloudy"}"""; |
156 | | - Console.WriteLine($"Tool output: {toolOutput}"); |
157 | | - |
158 | | - // Submit the tool's output and ask the model to continue using `previous_response_id`. |
159 | | - var followUpOptions = new CreateResponseOptions( |
160 | | - model.Id, |
161 | | - new[] { ResponseItem.CreateFunctionCallOutputItem(functionCall.CallId, toolOutput) }) |
| 146 | + else |
162 | 147 | { |
163 | | - PreviousResponseId = toolCallResponse.Id, |
164 | | - StoredOutputEnabled = true, |
165 | | - }; |
166 | | - followUpOptions.Tools.Add(ResponseTool.CreateFunctionTool( |
167 | | - functionName: "get_weather", |
168 | | - functionParameters: weatherSchema, |
169 | | - strictModeEnabled: true, |
170 | | - functionDescription: "Get the current weather for a given city.")); |
171 | | - |
172 | | - ResponseResult finalResponse = await responses.CreateResponseAsync(followUpOptions); |
173 | | - Console.WriteLine($"[ASSISTANT]: {finalResponse.GetOutputText()}"); |
| 148 | + Console.WriteLine($"[TOOL CALL]: {functionCall.FunctionName}({functionCall.FunctionArguments})"); |
| 149 | + |
| 150 | + const string toolOutput = """{"location": "Seattle", "weather": "72 degrees F and sunny"}"""; |
| 151 | + |
| 152 | + var followUpOptions = new CreateResponseOptions( |
| 153 | + model.Id, |
| 154 | + new[] { ResponseItem.CreateFunctionCallOutputItem(functionCall.CallId, toolOutput) }) |
| 155 | + { |
| 156 | + PreviousResponseId = toolResponse.Id, |
| 157 | + StoredOutputEnabled = true, |
| 158 | + MaxOutputTokenCount = 64, |
| 159 | + Temperature = 0.0f, |
| 160 | + }; |
| 161 | + followUpOptions.Tools.Add(getWeatherTool); |
| 162 | + |
| 163 | + ResponseResult finalResponse = await responses.CreateResponseAsync(followUpOptions); |
| 164 | + Console.WriteLine($"[ASSISTANT FINAL]: {finalResponse.GetOutputText()}"); |
| 165 | + } |
| 166 | + // <<<<<< END OPEN AI RESPONSES SDK USAGE >>>>>> |
| 167 | +} |
| 168 | +finally |
| 169 | +{ |
| 170 | + // Tidy up |
| 171 | + await mgr.StopWebServiceAsync(); |
| 172 | + await model.UnloadAsync(); |
174 | 173 | } |
175 | | -// <<<<<< END OPEN AI RESPONSES SDK USAGE >>>>>> |
176 | | - |
177 | | -// Tidy up |
178 | | -await mgr.StopWebServiceAsync(); |
179 | | -await model.UnloadAsync(); |
180 | 174 | // </complete_code> |
0 commit comments