Skip to content

Commit 4ec1654

Browse files
committed
Expand usage scenarios in examples docs
1 parent cdcd81f commit 4ec1654

File tree

3 files changed

+259
-0
lines changed

3 files changed

+259
-0
lines changed

docs/en/examples.md

Lines changed: 119 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -152,6 +152,125 @@ int main() {
152152
}
153153
```
154154

155+
## Exception Mode
156+
157+
The library currently reports failures by throwing exceptions. This is the recommended direct style when you want errors to propagate naturally.
158+
159+
```cpp
160+
import mcpplibs.llmapi;
161+
import std;
162+
163+
int main() {
164+
using namespace mcpplibs::llmapi;
165+
166+
try {
167+
auto client = Client(Config{
168+
.apiKey = std::getenv("OPENAI_API_KEY"),
169+
.model = "gpt-4o-mini",
170+
});
171+
172+
auto resp = client.chat("Explain RAII in one paragraph.");
173+
std::cout << resp.text() << '\n';
174+
} catch (const ApiError& e) {
175+
std::cerr << "API error: status=" << e.statusCode << " body=" << e.body << '\n';
176+
return 2;
177+
} catch (const ConnectionError& e) {
178+
std::cerr << "Connection error: " << e.what() << '\n';
179+
return 3;
180+
} catch (const std::exception& e) {
181+
std::cerr << "Unexpected error: " << e.what() << '\n';
182+
return 4;
183+
}
184+
}
185+
```
186+
187+
## No-Exception Style At Call Site
188+
189+
If your application prefers not to let exceptions escape, wrap the call and convert the result to `std::optional`, `std::expected`, or your own result type.
190+
191+
```cpp
192+
import mcpplibs.llmapi;
193+
import std;
194+
195+
std::optional<std::string> safe_chat(std::string_view prompt) {
196+
using namespace mcpplibs::llmapi;
197+
198+
try {
199+
auto client = Client(Config{
200+
.apiKey = std::getenv("OPENAI_API_KEY"),
201+
.model = "gpt-4o-mini",
202+
});
203+
return client.chat(prompt).text();
204+
} catch (...) {
205+
return std::nullopt;
206+
}
207+
}
208+
```
209+
210+
## Recommended Retry At The Application Layer
211+
212+
Retry policy is currently best implemented by the library user because retryability depends on business semantics.
213+
214+
```cpp
215+
import mcpplibs.llmapi;
216+
import std;
217+
218+
std::string chat_with_retry(std::string_view prompt) {
219+
using namespace mcpplibs::llmapi;
220+
221+
for (int attempt = 0; attempt < 3; ++attempt) {
222+
try {
223+
auto client = Client(Config{
224+
.apiKey = std::getenv("OPENAI_API_KEY"),
225+
.model = "gpt-4o-mini",
226+
});
227+
return client.chat(prompt).text();
228+
} catch (const ConnectionError&) {
229+
} catch (const ApiError& e) {
230+
if (e.statusCode != 429 && (e.statusCode < 500 || e.statusCode >= 600)) {
231+
throw;
232+
}
233+
}
234+
235+
std::this_thread::sleep_for(std::chrono::milliseconds(200 * (1 << attempt)));
236+
}
237+
238+
throw std::runtime_error("retry limit exceeded");
239+
}
240+
```
241+
242+
## Parallel Use With Isolated Clients
243+
244+
The recommended concurrency model is one client per task or thread.
245+
246+
```cpp
247+
import mcpplibs.llmapi;
248+
import std;
249+
250+
int main() {
251+
using namespace mcpplibs::llmapi;
252+
253+
auto futureA = std::async(std::launch::async, [] {
254+
auto client = Client(Config{
255+
.apiKey = std::getenv("OPENAI_API_KEY"),
256+
.model = "gpt-4o-mini",
257+
});
258+
return client.chat("Summarize modules.").text();
259+
});
260+
261+
auto futureB = std::async(std::launch::async, [] {
262+
auto client = Client(AnthropicConfig{
263+
.apiKey = std::getenv("ANTHROPIC_API_KEY"),
264+
.model = "claude-sonnet-4-20250514",
265+
});
266+
return client.chat("Translate 'hello world' to Japanese.").text();
267+
});
268+
269+
std::cout << futureA.get() << '\n';
270+
std::cout << futureB.get() << '\n';
271+
}
272+
```
273+
155274
## See Also
156275

157276
- [C++ API Reference](cpp-api.md)

docs/zh-hant/examples.md

Lines changed: 70 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -66,3 +66,73 @@ auto embedding = client.embed(
6666
"text-embedding-3-small"
6767
);
6868
```
69+
70+
## 例外模式
71+
72+
目前函式庫以拋出例外作為失敗回報方式。對於希望讓錯誤自然向上傳遞的呼叫鏈,這是最直接的使用方式。
73+
74+
```cpp
75+
try {
76+
auto resp = client.chat("請用一段話解釋 RAII。");
77+
std::cout << resp.text() << '\n';
78+
} catch (const ApiError& e) {
79+
std::cerr << "API error: status=" << e.statusCode << " body=" << e.body << '\n';
80+
} catch (const ConnectionError& e) {
81+
std::cerr << "Connection error: " << e.what() << '\n';
82+
}
83+
```
84+
85+
## 無例外模式
86+
87+
如果上層不希望讓例外繼續外拋,可以在呼叫點自行包一層,轉成 `optional``expected` 或業務自己的結果型別。
88+
89+
```cpp
90+
std::optional<std::string> safe_chat(std::string_view prompt) {
91+
try {
92+
return client.chat(prompt).text();
93+
} catch (...) {
94+
return std::nullopt;
95+
}
96+
}
97+
```
98+
99+
## 建議的上層重試
100+
101+
目前更建議由函式庫使用者在上層實作重試,因為是否可重試取決於業務語義。
102+
103+
```cpp
104+
for (int attempt = 0; attempt < 3; ++attempt) {
105+
try {
106+
return client.chat(prompt).text();
107+
} catch (const ConnectionError&) {
108+
} catch (const ApiError& e) {
109+
if (e.statusCode != 429 && (e.statusCode < 500 || e.statusCode >= 600)) {
110+
throw;
111+
}
112+
}
113+
114+
std::this_thread::sleep_for(std::chrono::milliseconds(200 * (1 << attempt)));
115+
}
116+
```
117+
118+
## 並發使用
119+
120+
建議模式是「實例隔離、上層並發」,也就是每個任務 / 執行緒各自建立一個 `Client`
121+
122+
```cpp
123+
auto futureA = std::async(std::launch::async, [] {
124+
auto client = Client(Config{
125+
.apiKey = std::getenv("OPENAI_API_KEY"),
126+
.model = "gpt-4o-mini",
127+
});
128+
return client.chat("總結一下 modules。").text();
129+
});
130+
131+
auto futureB = std::async(std::launch::async, [] {
132+
auto client = Client(AnthropicConfig{
133+
.apiKey = std::getenv("ANTHROPIC_API_KEY"),
134+
.model = "claude-sonnet-4-20250514",
135+
});
136+
return client.chat("把 hello world 翻譯成日語。").text();
137+
});
138+
```

docs/zh/examples.md

Lines changed: 70 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -66,3 +66,73 @@ auto embedding = client.embed(
6666
"text-embedding-3-small"
6767
);
6868
```
69+
70+
## 异常模式
71+
72+
当前库以抛异常作为失败报告方式。对于希望让错误直接上传的调用链,这是最直接的使用方式。
73+
74+
```cpp
75+
try {
76+
auto resp = client.chat("请用一段话解释 RAII。");
77+
std::cout << resp.text() << '\n';
78+
} catch (const ApiError& e) {
79+
std::cerr << "API error: status=" << e.statusCode << " body=" << e.body << '\n';
80+
} catch (const ConnectionError& e) {
81+
std::cerr << "Connection error: " << e.what() << '\n';
82+
}
83+
```
84+
85+
## 无异常模式
86+
87+
如果你的上层不希望异常继续外抛,可以在调用点自行包一层,转换成 `optional``expected` 或业务自己的结果类型。
88+
89+
```cpp
90+
std::optional<std::string> safe_chat(std::string_view prompt) {
91+
try {
92+
return client.chat(prompt).text();
93+
} catch (...) {
94+
return std::nullopt;
95+
}
96+
}
97+
```
98+
99+
## 推荐的上层重试
100+
101+
当前更推荐由库使用者在上层实现重试,因为是否可重试取决于业务语义。
102+
103+
```cpp
104+
for (int attempt = 0; attempt < 3; ++attempt) {
105+
try {
106+
return client.chat(prompt).text();
107+
} catch (const ConnectionError&) {
108+
} catch (const ApiError& e) {
109+
if (e.statusCode != 429 && (e.statusCode < 500 || e.statusCode >= 600)) {
110+
throw;
111+
}
112+
}
113+
114+
std::this_thread::sleep_for(std::chrono::milliseconds(200 * (1 << attempt)));
115+
}
116+
```
117+
118+
## 并发使用
119+
120+
推荐模式是“实例隔离,上层并发”,也就是每个任务 / 线程各自创建一个 `Client`
121+
122+
```cpp
123+
auto futureA = std::async(std::launch::async, [] {
124+
auto client = Client(Config{
125+
.apiKey = std::getenv("OPENAI_API_KEY"),
126+
.model = "gpt-4o-mini",
127+
});
128+
return client.chat("总结一下 modules。").text();
129+
});
130+
131+
auto futureB = std::async(std::launch::async, [] {
132+
auto client = Client(AnthropicConfig{
133+
.apiKey = std::getenv("ANTHROPIC_API_KEY"),
134+
.model = "claude-sonnet-4-20250514",
135+
});
136+
return client.chat("把 hello world 翻译成日语。").text();
137+
});
138+
```

0 commit comments

Comments
 (0)