Skip to content

Commit 94fad17

Browse files
committed
Add default OpenAI Client config shortcut
1 parent 34b4bd1 commit 94fad17

File tree

13 files changed

+61
-44
lines changed

13 files changed

+61
-44
lines changed

README.md

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@
1111
|:---:|
1212
| [Documentation](docs/) - [C++ API](docs/cpp-api.md) - [Examples](docs/examples.md) |
1313

14-
`llmapi` provides a typed `Client<Provider>` API for chat, streaming, embeddings, tool calls, and conversation persistence. The repository ships built-in providers for OpenAI and Anthropic, and the OpenAI provider can target compatible endpoints through a custom `baseUrl`.
14+
`llmapi` provides a typed `Client<Provider>` API for chat, streaming, embeddings, tool calls, and conversation persistence. The default config alias `Config` maps to OpenAI-style providers, so the common case does not need an explicit `openai::OpenAI` wrapper.
1515

1616
## Features
1717

@@ -37,10 +37,10 @@ int main() {
3737
return 1;
3838
}
3939

40-
auto client = Client(openai::OpenAI({
40+
auto client = Client(Config{
4141
.apiKey = apiKey,
4242
.model = "gpt-4o-mini",
43-
}));
43+
});
4444

4545
client.system("You are a concise assistant.");
4646
auto resp = client.chat("Explain why C++23 modules are useful in two sentences.");
@@ -54,6 +54,7 @@ int main() {
5454

5555
- `openai::OpenAI` for OpenAI chat, streaming, embeddings, and OpenAI-compatible endpoints
5656
- `anthropic::Anthropic` for Anthropic chat and streaming
57+
- `Config` as a convenient alias for `openai::Config`
5758

5859
Compatible endpoints can reuse the OpenAI provider:
5960

docs/advanced.md

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -7,19 +7,19 @@ Advanced patterns using the current API surface.
77
Each `chat()` and `chat_stream()` call appends the user message and the assistant response to the in-memory `Conversation`.
88

99
```cpp
10-
auto client = Client(openai::OpenAI({
10+
auto client = Client(Config{
1111
.apiKey = std::getenv("OPENAI_API_KEY"),
1212
.model = "gpt-4o-mini",
13-
}));
13+
});
1414

1515
client.system("You are helpful.");
1616
client.chat("Remember that I prefer concise answers.");
1717
client.save_conversation("session.json");
1818

19-
auto restored = Client(openai::OpenAI({
19+
auto restored = Client(Config{
2020
.apiKey = std::getenv("OPENAI_API_KEY"),
2121
.model = "gpt-4o-mini",
22-
}));
22+
});
2323
restored.load_conversation("session.json");
2424
```
2525

docs/cpp-api.md

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -83,12 +83,14 @@ class Client;
8383
### Construction
8484

8585
```cpp
86-
auto client = Client(openai::OpenAI({
86+
auto client = Client(Config{
8787
.apiKey = std::getenv("OPENAI_API_KEY"),
8888
.model = "gpt-4o-mini",
89-
}));
89+
});
9090
```
9191

92+
`Config` is an exported alias for `openai::Config`, so the default path uses an OpenAI-style provider without explicitly writing `openai::OpenAI`.
93+
9294
### Configuration
9395

9496
```cpp
@@ -236,10 +238,10 @@ import std;
236238
int main() {
237239
using namespace mcpplibs::llmapi;
238240
239-
auto client = Client(openai::OpenAI({
241+
auto client = Client(Config{
240242
.apiKey = std::getenv("OPENAI_API_KEY"),
241243
.model = "gpt-4o-mini",
242-
}));
244+
});
243245
244246
client.default_params(ChatParams{
245247
.temperature = 0.2,

docs/examples.md

Lines changed: 14 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -11,10 +11,10 @@ import std;
1111
int main() {
1212
using namespace mcpplibs::llmapi;
1313

14-
auto client = Client(openai::OpenAI({
14+
auto client = Client(Config{
1515
.apiKey = std::getenv("OPENAI_API_KEY"),
1616
.model = "gpt-4o-mini",
17-
}));
17+
});
1818

1919
client.system("You are a helpful assistant.");
2020
auto resp = client.chat("In one sentence, explain C++23 modules.");
@@ -31,10 +31,10 @@ import std;
3131
int main() {
3232
using namespace mcpplibs::llmapi;
3333

34-
auto client = Client(openai::OpenAI({
34+
auto client = Client(Config{
3535
.apiKey = std::getenv("OPENAI_API_KEY"),
3636
.model = "gpt-4o-mini",
37-
}));
37+
});
3838

3939
std::string streamed;
4040
client.chat_stream("Write a 3-line poem about templates.", [&](std::string_view chunk) {
@@ -54,10 +54,10 @@ import std;
5454
int main() {
5555
using namespace mcpplibs::llmapi;
5656

57-
auto client = Client(openai::OpenAI({
57+
auto client = Client(Config{
5858
.apiKey = std::getenv("OPENAI_API_KEY"),
5959
.model = "gpt-4o-mini",
60-
}));
60+
});
6161

6262
client.system("Reply briefly.");
6363

@@ -79,18 +79,18 @@ import std;
7979
int main() {
8080
using namespace mcpplibs::llmapi;
8181

82-
auto client = Client(openai::OpenAI({
82+
auto client = Client(Config{
8383
.apiKey = std::getenv("OPENAI_API_KEY"),
8484
.model = "gpt-4o-mini",
85-
}));
85+
});
8686

8787
client.chat("Remember that my favorite language is C++.");
8888
client.save_conversation("conversation.json");
8989

90-
auto restored = Client(openai::OpenAI({
90+
auto restored = Client(Config{
9191
.apiKey = std::getenv("OPENAI_API_KEY"),
9292
.model = "gpt-4o-mini",
93-
}));
93+
});
9494
restored.load_conversation("conversation.json");
9595

9696
auto resp = restored.chat("What language do I like?");
@@ -107,10 +107,10 @@ import std;
107107
int main() {
108108
using namespace mcpplibs::llmapi;
109109

110-
auto client = Client(openai::OpenAI({
110+
auto client = Client(Config{
111111
.apiKey = std::getenv("OPENAI_API_KEY"),
112112
.model = "gpt-4o-mini",
113-
}));
113+
});
114114

115115
auto params = ChatParams{
116116
.tools = std::vector<ToolDef>{{
@@ -137,10 +137,10 @@ import std;
137137
int main() {
138138
using namespace mcpplibs::llmapi;
139139

140-
auto client = Client(openai::OpenAI({
140+
auto client = Client(Config{
141141
.apiKey = std::getenv("OPENAI_API_KEY"),
142142
.model = "gpt-4o-mini",
143-
}));
143+
});
144144

145145
auto embedding = client.embed(
146146
{"hello world", "modern c++"},

docs/getting-started.md

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -53,10 +53,10 @@ int main() {
5353
return 1;
5454
}
5555

56-
auto client = Client(openai::OpenAI({
56+
auto client = Client(Config{
5757
.apiKey = apiKey,
5858
.model = "gpt-4o-mini",
59-
}));
59+
});
6060

6161
client.system("You are a helpful assistant.");
6262
auto resp = client.chat("Hello, introduce yourself in one sentence.");
@@ -88,10 +88,10 @@ export DEEPSEEK_API_KEY="..."
8888
OpenAI:
8989

9090
```cpp
91-
auto client = Client(openai::OpenAI({
91+
auto client = Client(Config{
9292
.apiKey = std::getenv("OPENAI_API_KEY"),
9393
.model = "gpt-4o-mini",
94-
}));
94+
});
9595
```
9696

9797
Anthropic:
@@ -106,11 +106,11 @@ auto client = Client(anthropic::Anthropic({
106106
Compatible endpoint through the OpenAI provider:
107107

108108
```cpp
109-
auto client = Client(openai::OpenAI({
109+
auto client = Client(Config{
110110
.apiKey = std::getenv("DEEPSEEK_API_KEY"),
111111
.baseUrl = std::string(URL::DeepSeek),
112112
.model = "deepseek-chat",
113-
}));
113+
});
114114
```
115115

116116
## Next Steps

docs/providers.md

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -22,10 +22,10 @@ URL::Poe
2222
Use `openai::OpenAI` for OpenAI chat, streaming, tool calls, and embeddings.
2323
2424
```cpp
25-
auto client = Client(openai::OpenAI({
25+
auto client = Client(Config{
2626
.apiKey = std::getenv("OPENAI_API_KEY"),
2727
.model = "gpt-4o-mini",
28-
}));
28+
});
2929
```
3030

3131
- Get keys from [OpenAI Platform](https://platform.openai.com/api-keys)

examples/basic.cpp

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -13,10 +13,10 @@ int main() {
1313
return 1;
1414
}
1515

16-
auto client = Client(openai::OpenAI({
16+
auto client = Client(Config{
1717
.apiKey = apiKey,
1818
.model = "gpt-4o-mini",
19-
}));
19+
});
2020
client.system("You are a helpful assistant.");
2121

2222
println("=== llmapi Basic Usage Demo ===");

examples/chat.cpp

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -13,10 +13,10 @@ int main() {
1313
return 1;
1414
}
1515

16-
auto client = Client(openai::OpenAI({
16+
auto client = Client(Config{
1717
.apiKey = apiKey,
1818
.model = "gpt-4o-mini",
19-
}));
19+
});
2020
client.system("You are a helpful assistant.");
2121

2222
println("AI Chat CLI - Type 'quit' to exit");

examples/hello_mcpp.cpp

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -13,10 +13,10 @@ int main() {
1313
return 1;
1414
}
1515

16-
auto client = Client(openai::OpenAI({
16+
auto client = Client(Config{
1717
.apiKey = apiKey,
1818
.model = "gpt-4o-mini",
19-
}));
19+
});
2020

2121
auto resp = client.chat("Hello! In one sentence, introduce modern C++.");
2222
println(resp.text());

src/client.cppm

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,8 @@ export module mcpplibs.llmapi:client;
33
import :types;
44
import :provider;
55
import :coro;
6+
import :openai;
7+
import :anthropic;
68
import std;
79

810
export namespace mcpplibs::llmapi {
@@ -16,6 +18,12 @@ private:
1618

1719
public:
1820
explicit Client(P provider) : provider_(std::move(provider)) {}
21+
explicit Client(openai::Config config)
22+
requires std::same_as<P, openai::OpenAI>
23+
: provider_(openai::OpenAI(std::move(config))) {}
24+
explicit Client(anthropic::Config config)
25+
requires std::same_as<P, anthropic::Anthropic>
26+
: provider_(anthropic::Anthropic(std::move(config))) {}
1927

2028
// Config (chainable)
2129
Client& default_params(ChatParams params) {
@@ -107,4 +115,7 @@ public:
107115
P& provider() { return provider_; }
108116
};
109117

118+
Client(openai::Config) -> Client<openai::OpenAI>;
119+
Client(anthropic::Config) -> Client<anthropic::Anthropic>;
120+
110121
} // namespace mcpplibs::llmapi

0 commit comments

Comments
 (0)