Skip to content

Commit 183d42d

Browse files
Sunrisepeakclaude
andcommitted
refactor: reorganize providers into directory, add tests and error types
- Move provider.cppm, openai.cppm, anthropic.cppm into src/providers/ - Add ApiError and ConnectionError in src/errors.cppm - Add live API tests (OpenAI, Anthropic), tool calling, structured output, embeddings tests - Add integration test verifying concepts and type system - Update examples to new Client<Provider> API - Clean up xmake.lua (global c++23/modules policy) Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
1 parent 0568e65 commit 183d42d

16 files changed

+432
-76
lines changed

examples/basic.cpp

Lines changed: 20 additions & 37 deletions
Original file line numberDiff line numberDiff line change
@@ -1,68 +1,51 @@
11
// Basic usage example - demonstrates both streaming and non-streaming modes
2-
import std;
32
import mcpplibs.llmapi;
3+
import std;
44

5-
using namespace mcpplibs;
5+
using namespace mcpplibs::llmapi;
66

77
int main() {
8-
auto api_key = std::getenv("OPENAI_API_KEY");
9-
if (!api_key) {
8+
auto apiKey = std::getenv("OPENAI_API_KEY");
9+
if (!apiKey) {
1010
std::println("Error: OPENAI_API_KEY not set");
1111
return 1;
1212
}
1313

14-
llmapi::Client client(api_key, llmapi::URL::Poe);
15-
client.model("gpt-5").system("You are a helpful assistant.");
14+
auto client = Client(openai::OpenAI({
15+
.apiKey = apiKey,
16+
.model = "gpt-4o-mini",
17+
}));
18+
client.system("You are a helpful assistant.");
1619

1720
std::println("=== llmapi Basic Usage Demo ===\n");
1821

1922
try {
2023
// Example 1: Non-streaming request
2124
std::println("[Example 1] Non-streaming mode:");
2225
std::println("Question: What is the capital of China?\n");
23-
24-
client.user("What is the capital of China?");
25-
client.request();
26-
27-
std::println("Answer: {}\n", client.getAnswer());
26+
27+
auto resp = client.chat("What is the capital of China?");
28+
std::println("Answer: {}\n", resp.text());
2829

2930
// Example 2: Streaming request
3031
std::println("[Example 2] Streaming mode:");
3132
std::println("Question: Convince me to use modern C++ (100 words)\n");
3233

33-
client.user("Convince me to use modern C++ (100 words)");
34+
client.clear();
35+
client.system("You are a helpful assistant.");
3436
std::print("Answer: ");
35-
36-
client.request([](std::string_view chunk) {
37-
std::print("{}", chunk);
38-
std::cout.flush();
39-
});
40-
37+
auto resp2 = client.chat_stream("Convince me to use modern C++ (100 words)",
38+
[](std::string_view chunk) {
39+
std::print("{}", chunk);
40+
});
4141
std::println("\n");
42+
std::println("[Verification] Answer length: {} chars\n", resp2.text().size());
4243

43-
// Verify auto-save: get the last answer
44-
auto last_answer = client.getAnswer();
45-
std::println("[Verification] Last answer length: {} chars\n", last_answer.size());
46-
47-
// Example 3: Translate the story to Chinese
48-
std::println("[Example 3] Translation (streaming):");
49-
std::println("Question: 请把上个回答翻译成中文。\n");
50-
51-
client.user("请把上面的故事翻译成中文。");
52-
std::print("Answer: ");
53-
54-
client.request([](std::string_view chunk) {
55-
std::print("{}", chunk);
56-
std::cout.flush();
57-
});
58-
59-
std::println("\n");
60-
6144
} catch (const std::exception& e) {
6245
std::println("\nError: {}\n", e.what());
6346
return 1;
6447
}
6548

6649
std::println("=== Demo Complete ===");
6750
return 0;
68-
}
51+
}

examples/chat.cpp

Lines changed: 12 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -1,18 +1,21 @@
11
// Simple and elegant AI chat CLI tool using streaming
2-
import std;
32
import mcpplibs.llmapi;
3+
import std;
44

5-
using namespace mcpplibs;
5+
using namespace mcpplibs::llmapi;
66

77
int main() {
8-
auto api_key = std::getenv("OPENAI_API_KEY");
9-
if (!api_key) {
8+
auto apiKey = std::getenv("OPENAI_API_KEY");
9+
if (!apiKey) {
1010
std::println("Error: OPENAI_API_KEY not set");
1111
return 1;
1212
}
1313

14-
llmapi::Client client(api_key, llmapi::URL::Poe);
15-
client.model("gpt-5").system("You are a helpful assistant.");
14+
auto client = Client(openai::OpenAI({
15+
.apiKey = apiKey,
16+
.model = "gpt-4o-mini",
17+
}));
18+
client.system("You are a helpful assistant.");
1619

1720
std::println("AI Chat CLI - Type 'quit' to exit\n");
1821

@@ -29,20 +32,16 @@ int main() {
2932
if (input.empty()) continue;
3033

3134
try {
32-
client.user(input);
3335
std::print("\nAI: ");
34-
35-
client.request([](std::string_view chunk) {
36+
client.chat_stream(input, [](std::string_view chunk) {
3637
std::print("{}", chunk);
37-
std::cout.flush();
3838
});
39-
4039
std::println("\n");
41-
40+
4241
} catch (const std::exception& e) {
4342
std::println("\nError: {}\n", e.what());
4443
}
4544
}
4645

4746
return 0;
48-
}
47+
}

examples/hello_mcpp.cpp

Lines changed: 16 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -1,19 +1,23 @@
11
// Minimal example - simplest way to use llmapi
2-
import std;
32
import mcpplibs.llmapi;
3+
import std;
4+
5+
using namespace mcpplibs::llmapi;
46

57
int main() {
6-
using namespace mcpplibs;
7-
8-
llmapi::Client client(std::getenv("OPENAI_API_KEY"), llmapi::URL::Poe);
8+
auto apiKey = std::getenv("OPENAI_API_KEY");
9+
if (!apiKey) {
10+
std::println("Error: OPENAI_API_KEY not set");
11+
return 1;
12+
}
13+
14+
auto client = Client(openai::OpenAI({
15+
.apiKey = apiKey,
16+
.model = "gpt-4o-mini",
17+
}));
918

10-
client.model("gpt-5")
11-
.system("You are a helpful assistant.")
12-
.user("In one sentence, introduce modern C++. 并给出中文翻译")
13-
.request([](std::string_view chunk) {
14-
std::print("{}", chunk);
15-
std::cout.flush();
16-
});
19+
auto resp = client.chat("Hello! In one sentence, introduce modern C++.");
20+
std::println("{}", resp.text());
1721

1822
return 0;
19-
}
23+
}

src/errors.cppm

Lines changed: 28 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,28 @@
1+
export module mcpplibs.llmapi:errors;
2+
3+
import std;
4+
5+
export namespace mcpplibs::llmapi {
6+
7+
// Base API error with HTTP status and structured error info
8+
class ApiError : public std::runtime_error {
9+
public:
10+
int statusCode;
11+
std::string type;
12+
std::string body;
13+
14+
ApiError(int status, std::string errorType, std::string errorBody, const std::string& message)
15+
: std::runtime_error(message)
16+
, statusCode(status)
17+
, type(std::move(errorType))
18+
, body(std::move(errorBody))
19+
{}
20+
};
21+
22+
// Network/connection errors (DNS, TLS, timeout)
23+
class ConnectionError : public std::runtime_error {
24+
public:
25+
using std::runtime_error::runtime_error;
26+
};
27+
28+
} // namespace mcpplibs::llmapi

src/llmapi.cppm

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@ export import :provider;
77
export import :client;
88
export import :openai;
99
export import :anthropic;
10+
export import :errors;
1011

1112
import std;
1213

Lines changed: 44 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,44 @@
1+
import mcpplibs.llmapi;
2+
import std;
3+
4+
#include <cassert>
5+
6+
using namespace mcpplibs::llmapi;
7+
8+
int main() {
9+
auto apiKey = std::getenv("ANTHROPIC_API_KEY");
10+
if (!apiKey) {
11+
std::println("ANTHROPIC_API_KEY not set, skipping live test");
12+
return 0;
13+
}
14+
15+
auto client = Client(anthropic::Anthropic({
16+
.apiKey = apiKey,
17+
.model = "claude-haiku-4-5-20251001",
18+
}));
19+
20+
// Test 1: basic chat
21+
auto resp = client.chat("Say exactly: HELLO_TEST_OK");
22+
std::println("Response: {}", resp.text());
23+
assert(!resp.text().empty());
24+
assert(resp.usage.inputTokens > 0);
25+
26+
// Test 2: system message
27+
client.clear();
28+
client.system("Always respond with exactly one word.");
29+
auto resp2 = client.chat("What color is the sky?");
30+
std::println("System test: {}", resp2.text());
31+
32+
// Test 3: streaming
33+
client.clear();
34+
std::string streamed;
35+
auto resp3 = client.chat_stream("Say exactly: STREAM_OK", [&](std::string_view chunk) {
36+
streamed += chunk;
37+
std::print("{}", chunk);
38+
});
39+
std::println("");
40+
assert(!streamed.empty());
41+
42+
std::println("test_anthropic_live: ALL PASSED");
43+
return 0;
44+
}

tests/llmapi/test_embeddings.cpp

Lines changed: 32 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,32 @@
1+
import mcpplibs.llmapi;
2+
import std;
3+
4+
#include <cassert>
5+
6+
using namespace mcpplibs::llmapi;
7+
8+
int main() {
9+
auto apiKey = std::getenv("OPENAI_API_KEY");
10+
if (!apiKey) {
11+
std::println("OPENAI_API_KEY not set, skipping");
12+
return 0;
13+
}
14+
15+
auto provider = openai::OpenAI({
16+
.apiKey = apiKey,
17+
.model = "gpt-4o-mini",
18+
});
19+
20+
auto resp = provider.embed(
21+
{"Hello world", "How are you"},
22+
"text-embedding-3-small"
23+
);
24+
25+
assert(resp.embeddings.size() == 2);
26+
assert(!resp.embeddings[0].empty());
27+
assert(resp.usage.inputTokens > 0);
28+
std::println("Embedding dim: {}", resp.embeddings[0].size());
29+
30+
std::println("test_embeddings: ALL PASSED");
31+
return 0;
32+
}

0 commit comments

Comments
 (0)