Skip to content

Commit 18c46cd

Browse files
committed
Feat: Native AI Connector (#19) - Local/Network Inference
- Added 'NativeAIBridge' class using cpp-httplib and nlohmann/json. - Implemented OpenAI-compatible /v1/chat/completions Client. - Integrated into 'Engine' via 'AIGateway' interface. - Updated 'StrategyInterface' (C ABI) to expose 'query_ai' callback. - Strategies can now synchronously query local models (Llama.cpp/Ollama/vLLM) directly from C++ logic. - Configurable via QUANUX_AI_ENDPOINT, QUANUX_AI_KEY, QUANUX_AI_MODEL.
1 parent 728c5b7 commit 18c46cd

7 files changed

Lines changed: 189 additions & 3 deletions

File tree

Lines changed: 30 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,30 @@
1+
#pragma once
2+
#include <functional>
3+
#include <future>
4+
#include <string>
5+
6+
namespace quanux::common {
7+
8+
class AIGateway {
9+
public:
10+
virtual ~AIGateway() = default;
11+
12+
/**
13+
* Synchronous query to the AI model.
14+
* Warning: This MAY BLOCK for a significant time.
15+
* Prefer using this in on_start or separate threads.
16+
*/
17+
virtual std::string query(const std::string &prompt) = 0;
18+
19+
/**
20+
* Asynchronous query. Returns a future to the result.
21+
*/
22+
virtual std::future<std::string> query_async(const std::string &prompt) = 0;
23+
24+
/**
25+
* Status check.
26+
*/
27+
virtual bool is_connected() const = 0;
28+
};
29+
30+
} // namespace quanux::common

QuanuX-Common/cpp/include/quanux/common/StrategyInterface.h

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -50,6 +50,9 @@ struct OrderService {
5050
void *engine_ctx;
5151
uint64_t (*submit_order)(void *engine_ctx, const OrderRequest *request);
5252
void (*cancel_order)(void *engine_ctx, uint64_t order_id);
53+
// Returns true if successful, false if buffer too small or error
54+
bool (*query_ai)(void *engine_ctx, const char *prompt, char *buffer,
55+
uint32_t buffer_size);
5356
};
5457

5558
// V-Table for Strategy instance

execution-node/cpp/CMakeLists.txt

Lines changed: 19 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -40,20 +40,38 @@ FetchContent_MakeAvailable(duckdb)
4040
# Include Directories
4141
include_directories(include)
4242
include_directories(../../QuanuX-Common/cpp/include)
43+
include_directories(${CMAKE_BINARY_DIR}/_deps/cpp-httplib-src)
4344

4445
# Indicators Library (Self-Contained Strategy Support)
4546
# We pull this in so strategies can compute locally without phoning home.
4647
set(BUILD_PYTHON_BINDINGS OFF CACHE BOOL "" FORCE)
4748
add_subdirectory(../../server/indicators ${CMAKE_BINARY_DIR}/indicators)
4849

50+
# --- AI Connector Deps ---
51+
FetchContent_Declare(
52+
nlohmann_json
53+
GIT_REPOSITORY https://github.com/nlohmann/json.git
54+
GIT_TAG v3.11.3
55+
)
56+
FetchContent_MakeAvailable(nlohmann_json)
57+
58+
FetchContent_Declare(
59+
cpp-httplib
60+
GIT_REPOSITORY https://github.com/yhirose/cpp-httplib.git
61+
GIT_TAG v0.14.3
62+
)
63+
FetchContent_MakeAvailable(cpp-httplib)
64+
65+
4966
# Core Library (The Engine)
5067
add_library(node_core STATIC
5168
src/engine.cpp
5269
src/nats_bridge.cpp
5370
src/market_data_engine.cpp
5471
src/order_gateway.cpp
72+
src/native_ai_bridge.cpp
5573
)
56-
target_link_libraries(node_core PUBLIC nats_static pthread dl)
74+
target_link_libraries(node_core PUBLIC nats_static pthread dl cpp-httplib nlohmann_json::nlohmann_json)
5775

5876
# Main Executable
5977
add_executable(quanux_node src/main.cpp)

execution-node/cpp/include/engine.h

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
#pragma once
22
#include "market_data_engine.h"
3+
#include "native_ai_bridge.h"
34
#include "nats_bridge.h"
45
#include "order_gateway.h"
56
#include "plugin_loader.h"
@@ -19,6 +20,7 @@ struct LoadedStrategy {
1920

2021
class Engine {
2122
RingBuffer<MarketUpdate, 1024> ring_buffer_;
23+
NativeAIBridge ai_bridge_;
2224
NatsBridge nats_bridge_;
2325
MarketDataEngine market_data_engine_;
2426
OrderGateway order_gateway_;
@@ -27,6 +29,8 @@ class Engine {
2729
// Static callbacks for Strategy ABI
2830
static uint64_t static_submit_order(void *ctx, const OrderRequest *request);
2931
static void static_cancel_order(void *ctx, uint64_t order_id);
32+
static bool static_query_ai(void *ctx, const char *prompt, char *buffer,
33+
uint32_t buffer_size);
3034

3135
public:
3236
Engine();
Lines changed: 21 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,21 @@
1+
#pragma once
2+
#include "quanux/common/AIGateway.h"
3+
#include <memory>
4+
#include <string>
5+
6+
class NativeAIBridge : public quanux::common::AIGateway {
7+
public:
8+
// endpoint: e.g. "http://localhost:8080"
9+
// model: e.g. "llama-3-8b"
10+
NativeAIBridge(const std::string &endpoint, const std::string &api_key,
11+
const std::string &model);
12+
~NativeAIBridge() override;
13+
14+
std::string query(const std::string &prompt) override;
15+
std::future<std::string> query_async(const std::string &prompt) override;
16+
bool is_connected() const override;
17+
18+
private:
19+
struct Impl;
20+
std::shared_ptr<Impl> impl_;
21+
};

execution-node/cpp/src/engine.cpp

Lines changed: 27 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,13 @@
66

77
Engine::Engine()
88
: // ring_buffer_ default constructor is used (fixed size)
9+
// Retrieve AI Config from Env or Default
10+
ai_bridge_(
11+
std::getenv("QUANUX_AI_ENDPOINT") ? std::getenv("QUANUX_AI_ENDPOINT")
12+
: "http://localhost:8080",
13+
std::getenv("QUANUX_AI_KEY") ? std::getenv("QUANUX_AI_KEY") : "",
14+
std::getenv("QUANUX_AI_MODEL") ? std::getenv("QUANUX_AI_MODEL")
15+
: "llama3"),
916
nats_bridge_("nats://localhost:4222"), // Default NATS URL
1017
market_data_engine_(&ring_buffer_, &nats_bridge_),
1118
order_gateway_(&nats_bridge_) {}
@@ -20,8 +27,23 @@ void Engine::static_cancel_order(void *ctx, uint64_t order_id) {
2027
engine->order_gateway_.cancel_order(order_id);
2128
}
2229

30+
bool Engine::static_query_ai(void *ctx, const char *prompt, char *buffer,
31+
uint32_t buffer_size) {
32+
Engine *engine = static_cast<Engine *>(ctx);
33+
// Blocking query
34+
std::string response = engine->ai_bridge_.query(prompt);
35+
if (response.size() >= buffer_size) {
36+
return false; // Buffer too small
37+
}
38+
std::strcpy(buffer, response.c_str());
39+
return true;
40+
}
41+
2342
void Engine::init(const std::string &config_path) {
2443
std::cout << "[Engine] Initializing..." << std::endl;
44+
std::cout << "[Engine] AI Bridge Connected: "
45+
<< (ai_bridge_.is_connected() ? "YES" : "NO") << std::endl;
46+
2547
// In a real app, read config.json
2648

2749
// Determine platform-specific extension
@@ -81,7 +103,8 @@ void Engine::load_strategy(const std::string &strategy_path) {
81103
if (strategy_ptr->on_init) {
82104
OrderService service = {.engine_ctx = this,
83105
.submit_order = static_submit_order,
84-
.cancel_order = static_cancel_order};
106+
.cancel_order = static_cancel_order,
107+
.query_ai = static_query_ai};
85108
strategy_ptr->on_init(ctx, &service);
86109
}
87110

@@ -113,7 +136,9 @@ void Engine::run() {
113136
if (p_pos != std::string::npos && s_pos != std::string::npos) {
114137
double price = std::stod(msg.substr(p_pos + 9));
115138
double size = std::stod(msg.substr(s_pos + 8));
116-
bool is_trade = (msg.find("trade") != std::string::npos);
139+
bool is_trade =
140+
(msg.find("trade") !=
141+
std::string::npos); // Correct logic? msg has "type": "trade"
117142

118143
MarketUpdate update = {
119144
.timestamp = 0, // todo
Lines changed: 85 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,85 @@
1+
#include "native_ai_bridge.h"
2+
#include <httplib.h>
3+
#include <iostream>
4+
#include <nlohmann/json.hpp>
5+
6+
using json = nlohmann::json;
7+
8+
struct NativeAIBridge::Impl {
9+
std::string base_url;
10+
std::string api_key;
11+
std::string model;
12+
std::unique_ptr<httplib::Client> client;
13+
14+
Impl(const std::string &url, const std::string &key, const std::string &mdl)
15+
: base_url(url), api_key(key), model(mdl) {
16+
// Parse host and port from URL roughly or let httplib handle it
17+
// httplib::Client expects "http://host:port"
18+
client = std::make_unique<httplib::Client>(base_url.c_str());
19+
client->set_connection_timeout(5, 0); // 5s connection timeout
20+
client->set_read_timeout(10,
21+
0); // 10s read timeout (fast inference required)
22+
}
23+
};
24+
25+
NativeAIBridge::NativeAIBridge(const std::string &endpoint,
26+
const std::string &api_key,
27+
const std::string &model)
28+
: impl_(std::make_shared<Impl>(endpoint, api_key, model)) {}
29+
30+
NativeAIBridge::~NativeAIBridge() {}
31+
32+
std::string NativeAIBridge::query(const std::string &prompt) {
33+
json payload = {
34+
{"model", impl_->model},
35+
{"messages", {{{"role", "user"}, {"content", prompt}}}},
36+
{"max_tokens", 100}, // Limit response size for speed
37+
{"temperature", 0.0} // Deterministic
38+
};
39+
40+
httplib::Headers headers = {{"Content-Type", "application/json"},
41+
{"Authorization", "Bearer " + impl_->api_key}};
42+
43+
auto res = impl_->client->Post("/v1/chat/completions", headers,
44+
payload.dump(), "application/json");
45+
46+
if (res && res->status == 200) {
47+
try {
48+
auto response_json = json::parse(res->body);
49+
if (response_json.contains("choices") &&
50+
!response_json["choices"].empty()) {
51+
return response_json["choices"][0]["message"]["content"]
52+
.get<std::string>();
53+
}
54+
} catch (const std::exception &e) {
55+
std::cerr << "[AIBridge] JSON Parse Error: " << e.what() << std::endl;
56+
return "Error: Parse Failure";
57+
}
58+
} else {
59+
if (res) {
60+
std::cerr << "[AIBridge] HTTP Error: " << res->status
61+
<< " Body: " << res->body << std::endl;
62+
return "Error: HTTP " + std::to_string(res->status);
63+
} else {
64+
std::cerr << "[AIBridge] Connection Failed: " << to_string(res.error())
65+
<< std::endl;
66+
return "Error: Connection Failed";
67+
}
68+
}
69+
return "Error: Unknown";
70+
}
71+
72+
std::future<std::string>
73+
NativeAIBridge::query_async(const std::string &prompt) {
74+
// Simple std::async wrapper for now.
75+
// In production, this should use a thread pool or httplib's async features if
76+
// available (httplib is blocking sync usually).
77+
return std::async(std::launch::async,
78+
[this, prompt]() { return this->query(prompt); });
79+
}
80+
81+
bool NativeAIBridge::is_connected() const {
82+
// Simple health check
83+
auto res = impl_->client->Get("/v1/models");
84+
return (res && res->status == 200);
85+
}

0 commit comments

Comments
 (0)