Skip to content

Commit 342156e

Browse files
Peter MarreckPeter Marreck
authored andcommitted
fix: use detected server URL for embedder, handle oMLX auth gracefully
probeOpenAI via /health sets model_available=false since auth/model can't be verified without credentials. Init and auto-index use the detected URL/dialect for HttpEmbedder instead of hardcoded defaults. oMLX detection now tells user to configure API key in config.ini.
1 parent 75aef28 commit 342156e

1 file changed

Lines changed: 19 additions & 10 deletions

File tree

src/main.zig

Lines changed: 19 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -288,11 +288,16 @@ pub fn main() !void {
288288
_ = stderr.flush() catch {};
289289
use_embeddings = true;
290290
} else {
291-
// Server found but model missing
292-
_ = stderr.print(" Found {s} on {s} but model '{s}' not installed.\n" ++
293-
" Run 'ollama pull {s}' then 'codescan index' for semantic search.\n", .{
294-
if (d.dialect == .ollama) "Ollama" else "oMLX", d.url, settings.embedding_model, settings.embedding_model,
295-
}) catch {};
291+
// Server found but model/auth not verified
292+
if (d.dialect == .ollama) {
293+
_ = stderr.print(" Found Ollama on {s} but model '{s}' not installed.\n" ++
294+
" Run 'ollama pull {s}' then 'codescan index' for semantic search.\n", .{
295+
d.url, settings.embedding_model, settings.embedding_model,
296+
}) catch {};
297+
} else {
298+
_ = stderr.print(" Found oMLX on {s}. Configure embedding_api_key in .codescan/config.ini\n" ++
299+
" then run 'codescan index' for semantic search.\n", .{d.url}) catch {};
300+
}
296301
_ = stderr.flush() catch {};
297302
_ = stderr.print(" Index in lexical-only mode? [Y/n] ", .{}) catch {};
298303
if (!promptYesNo(stderr, true)) {
@@ -325,11 +330,13 @@ pub fn main() !void {
325330
};
326331
}
327332

333+
const emb_url = if (detected) |d| d.url else settings.embedding_url;
334+
const emb_dialect = if (detected) |d| d.dialect else settings.embedding_dialect;
328335
var embedder_adapter = embedding.HttpEmbedder{
329336
.transport = http_client.transport(),
330-
.base_url = settings.embedding_url,
337+
.base_url = emb_url,
331338
.model = settings.embedding_model,
332-
.dialect = settings.embedding_dialect,
339+
.dialect = emb_dialect,
333340
.auth_header = settings.embedding_auth_header,
334341
};
335342
const active_embedder = if (use_embeddings)
@@ -650,11 +657,13 @@ pub fn main() !void {
650657
_ = stderr.flush() catch {};
651658
}
652659

660+
const emb_url = if (detected) |d| d.url else settings.embedding_url;
661+
const emb_dialect = if (detected) |d| d.dialect else settings.embedding_dialect;
653662
var embedder_adapter = embedding.HttpEmbedder{
654663
.transport = http_client.transport(),
655-
.base_url = settings.embedding_url,
664+
.base_url = emb_url,
656665
.model = settings.embedding_model,
657-
.dialect = settings.embedding_dialect,
666+
.dialect = emb_dialect,
658667
.auth_header = settings.embedding_auth_header,
659668
};
660669
const active_embedder = if (use_embeddings)
@@ -1780,7 +1789,7 @@ fn probeOpenAI(
17801789
return .{
17811790
.url = base_url,
17821791
.dialect = .openai,
1783-
.model_available = true,
1792+
.model_available = false, // /health confirms server exists but not auth/model
17841793
};
17851794
}
17861795
} else |_| {}

0 commit comments

Comments
 (0)