Skip to content

Commit e98a279

Browse files
committed
refactor(test): remove graceful skipping, use @agentic-kit/ollama instead of raw fetch
- Tests now WILL fail if PostgreSQL or Ollama are unavailable (no more silent skipping) - Replaced raw fetch() calls with OllamaClient from @agentic-kit/ollama (listModels, pullModel, generateEmbedding) - Added direct OllamaClient.generateEmbedding test - Stricter assertions on mutation input types (expect defined, not soft if-checks)
1 parent 3cb942e commit e98a279

1 file changed

Lines changed: 87 additions & 155 deletions

File tree

graphile/graphile-llm/src/__tests__/graphile-llm.test.ts

Lines changed: 87 additions & 155 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
import { join } from 'path';
2+
import OllamaClient from '@agentic-kit/ollama';
23
import { getConnections, seed } from 'graphile-test';
34
import type { GraphQLResponse } from 'graphile-test';
45
import type { PgTestClient } from 'pgsql-test';
@@ -14,39 +15,18 @@ import {
1415
buildEmbedderFromModule,
1516
buildEmbedderFromEnv,
1617
} from '../../src/embedder';
17-
import type { EmbedderConfig, LlmModuleData } from '../../src/types';
18+
import type { LlmModuleData } from '../../src/types';
1819

19-
// ─── Ollama helpers (same pattern as cli-e2e.test.ts) ────────────────────────
20+
// ─── @agentic-kit/ollama client ─────────────────────────────────────────────
2021

21-
async function isOllamaAvailable(): Promise<boolean> {
22-
try {
23-
const res = await fetch('http://localhost:11434/api/tags');
24-
return res.ok;
25-
} catch {
26-
return false;
27-
}
28-
}
29-
30-
async function ensureNomicModel(): Promise<boolean> {
31-
try {
32-
const res = await fetch('http://localhost:11434/api/tags');
33-
if (!res.ok) return false;
34-
const data = (await res.json()) as { models?: Array<{ name: string }> };
35-
const models = data.models ?? [];
36-
const hasModel = models.some((m: { name: string }) =>
37-
m.name.includes('nomic-embed-text'),
38-
);
39-
if (hasModel) return true;
22+
const ollamaClient = new OllamaClient('http://localhost:11434');
4023

24+
async function ensureNomicModel(): Promise<void> {
25+
const models = await ollamaClient.listModels();
26+
const hasModel = models.some((m: string) => m.includes('nomic-embed-text'));
27+
if (!hasModel) {
4128
console.log('Pulling nomic-embed-text model...');
42-
const pullRes = await fetch('http://localhost:11434/api/pull', {
43-
method: 'POST',
44-
headers: { 'Content-Type': 'application/json' },
45-
body: JSON.stringify({ name: 'nomic-embed-text' }),
46-
});
47-
return pullRes.ok;
48-
} catch {
49-
return false;
29+
await ollamaClient.pullModel('nomic-embed-text');
5030
}
5131
}
5232

@@ -138,69 +118,61 @@ describe('Embedder abstraction', () => {
138118

139119
// =============================================================================
140120
// Suite 2: Schema enrichment — plugin adds text fields to GraphQL schema
121+
// Requires PostgreSQL + pgvector. Tests WILL fail if database is unavailable.
141122
// =============================================================================
142123

143124
describe('graphile-llm schema enrichment', () => {
144125
let db: PgTestClient;
145126
let teardown: () => Promise<void>;
146127
let query: QueryFn;
147-
let pgReady = false;
148128

149129
beforeAll(async () => {
150-
try {
151-
const unifiedPlugin = createUnifiedSearchPlugin({
152-
adapters: [createPgvectorAdapter()],
153-
});
154-
155-
const testPreset = {
156-
extends: [ConnectionFilterPreset()],
157-
plugins: [
158-
// Search infrastructure (provides VectorNearbyInput)
159-
VectorCodecPlugin,
160-
unifiedPlugin,
161-
// LLM plugins under test
162-
createLlmModulePlugin({
163-
defaultEmbedder: {
164-
provider: 'ollama',
165-
model: 'nomic-embed-text',
166-
baseUrl: 'http://localhost:11434',
167-
},
168-
}),
169-
createLlmTextSearchPlugin(),
170-
createLlmTextMutationPlugin(),
171-
],
172-
};
130+
const unifiedPlugin = createUnifiedSearchPlugin({
131+
adapters: [createPgvectorAdapter()],
132+
});
173133

174-
const connections = await getConnections(
175-
{
176-
schemas: ['llm_test'],
177-
preset: testPreset,
178-
useRoot: true,
179-
authRole: 'postgres',
180-
},
181-
[seed.sqlfile([join(__dirname, './setup.sql')])],
182-
);
134+
const testPreset = {
135+
extends: [ConnectionFilterPreset()],
136+
plugins: [
137+
// Search infrastructure (provides VectorNearbyInput)
138+
VectorCodecPlugin,
139+
unifiedPlugin,
140+
// LLM plugins under test
141+
createLlmModulePlugin({
142+
defaultEmbedder: {
143+
provider: 'ollama',
144+
model: 'nomic-embed-text',
145+
baseUrl: 'http://localhost:11434',
146+
},
147+
}),
148+
createLlmTextSearchPlugin(),
149+
createLlmTextMutationPlugin(),
150+
],
151+
};
152+
153+
const connections = await getConnections(
154+
{
155+
schemas: ['llm_test'],
156+
preset: testPreset,
157+
useRoot: true,
158+
authRole: 'postgres',
159+
},
160+
[seed.sqlfile([join(__dirname, './setup.sql')])],
161+
);
183162

184-
db = connections.db;
185-
teardown = connections.teardown;
186-
query = connections.query;
187-
pgReady = true;
163+
db = connections.db;
164+
teardown = connections.teardown;
165+
query = connections.query;
188166

189-
await db.client.query('BEGIN');
190-
} catch (err) {
191-
console.log(
192-
'PostgreSQL not available — skipping schema enrichment tests. Error:',
193-
(err as Error).message,
194-
);
195-
}
167+
await db.client.query('BEGIN');
196168
});
197169

198170
afterAll(async () => {
199171
if (db) {
200172
try {
201173
await db.client.query('ROLLBACK');
202174
} catch {
203-
// Ignore rollback errors
175+
// Ignore rollback errors during cleanup
204176
}
205177
}
206178
if (teardown) {
@@ -209,22 +181,17 @@ describe('graphile-llm schema enrichment', () => {
209181
});
210182

211183
beforeEach(async () => {
212-
if (db) await db.beforeEach();
184+
await db.beforeEach();
213185
});
214186

215187
afterEach(async () => {
216-
if (db) await db.afterEach();
188+
await db.afterEach();
217189
});
218190

219191
// ─── VectorNearbyInput text field ────────────────────────────────────────
220192

221193
describe('VectorNearbyInput text field', () => {
222194
it('adds text field to VectorNearbyInput type', async () => {
223-
if (!pgReady) {
224-
console.log('PostgreSQL not available — skipping');
225-
return;
226-
}
227-
228195
const result = await query<{
229196
__type: { inputFields: Array<{ name: string; type: { name: string } }> };
230197
}>(`
@@ -256,11 +223,6 @@ describe('graphile-llm schema enrichment', () => {
256223
});
257224

258225
it('still allows vector-based queries (existing behavior unchanged)', async () => {
259-
if (!pgReady) {
260-
console.log('PostgreSQL not available — skipping');
261-
return;
262-
}
263-
264226
const result = await query<{
265227
allArticles: { nodes: Array<{ title: string; embeddingVectorDistance: number }> };
266228
}>(`
@@ -295,11 +257,6 @@ describe('graphile-llm schema enrichment', () => {
295257

296258
describe('Mutation text companion fields', () => {
297259
it('adds embeddingText field to CreateArticleInput', async () => {
298-
if (!pgReady) {
299-
console.log('PostgreSQL not available — skipping');
300-
return;
301-
}
302-
303260
const result = await query<{
304261
__type: { inputFields: Array<{ name: string; type: { name: string } }> };
305262
}>(`
@@ -315,30 +272,22 @@ describe('graphile-llm schema enrichment', () => {
315272

316273
expect(result.errors).toBeUndefined();
317274
const inputType = result.data?.__type;
318-
// CreateArticleInput may or may not exist depending on schema config
319-
// If it exists, verify the text companion field
320-
if (inputType) {
321-
const fieldNames = inputType.inputFields.map((f) => f.name);
322-
// Original embedding field
323-
expect(fieldNames).toContain('embedding');
324-
// Companion text field from LlmTextMutationPlugin
325-
expect(fieldNames).toContain('embeddingText');
326-
327-
const textField = inputType.inputFields.find(
328-
(f) => f.name === 'embeddingText',
329-
);
330-
if (textField) {
331-
expect(textField.type.name).toBe('String');
332-
}
333-
}
275+
expect(inputType).toBeDefined();
276+
277+
const fieldNames = inputType!.inputFields.map((f) => f.name);
278+
// Original embedding field
279+
expect(fieldNames).toContain('embedding');
280+
// Companion text field from LlmTextMutationPlugin
281+
expect(fieldNames).toContain('embeddingText');
282+
283+
const textField = inputType!.inputFields.find(
284+
(f) => f.name === 'embeddingText',
285+
);
286+
expect(textField).toBeDefined();
287+
expect(textField!.type.name).toBe('String');
334288
});
335289

336290
it('adds embeddingText field to UpdateArticleInput (patch)', async () => {
337-
if (!pgReady) {
338-
console.log('PostgreSQL not available — skipping');
339-
return;
340-
}
341-
342291
const result = await query<{
343292
__type: { inputFields: Array<{ name: string; type: { name: string } }> };
344293
}>(`
@@ -354,41 +303,31 @@ describe('graphile-llm schema enrichment', () => {
354303

355304
expect(result.errors).toBeUndefined();
356305
const inputType = result.data?.__type;
357-
if (inputType) {
358-
const fieldNames = inputType.inputFields.map((f) => f.name);
359-
expect(fieldNames).toContain('embeddingText');
360-
361-
const textField = inputType.inputFields.find(
362-
(f) => f.name === 'embeddingText',
363-
);
364-
if (textField) {
365-
expect(textField.type.name).toBe('String');
366-
}
367-
}
306+
expect(inputType).toBeDefined();
307+
308+
const fieldNames = inputType!.inputFields.map((f) => f.name);
309+
expect(fieldNames).toContain('embeddingText');
310+
311+
const textField = inputType!.inputFields.find(
312+
(f) => f.name === 'embeddingText',
313+
);
314+
expect(textField).toBeDefined();
315+
expect(textField!.type.name).toBe('String');
368316
});
369317
});
370318
});
371319

372320
// =============================================================================
373-
// Suite 3: Real Ollama embedding (skips if Ollama is not available)
321+
// Suite 3: Real Ollama embedding via @agentic-kit/ollama
322+
// Requires Ollama running with nomic-embed-text. Tests WILL fail if unavailable.
374323
// =============================================================================
375324

376325
describe('graphile-llm with real Ollama embedding', () => {
377-
let ollamaReady = false;
378-
379326
beforeAll(async () => {
380-
const ollamaUp = await isOllamaAvailable();
381-
if (ollamaUp) {
382-
ollamaReady = await ensureNomicModel();
383-
}
327+
await ensureNomicModel();
384328
});
385329

386330
it('should embed text to a real vector via Ollama nomic-embed-text', async () => {
387-
if (!ollamaReady) {
388-
console.log('Ollama not available — skipping real embedding test');
389-
return;
390-
}
391-
392331
const embedder = buildEmbedder({
393332
provider: 'ollama',
394333
model: 'nomic-embed-text',
@@ -413,17 +352,9 @@ describe('graphile-llm with real Ollama embedding', () => {
413352
const magnitude = Math.sqrt(vector.reduce((sum, v) => sum + v * v, 0));
414353
expect(magnitude).toBeGreaterThan(0);
415354

416-
console.log(
417-
`[graphile-llm test] Embedded text to ${vector.length}-dim vector (magnitude: ${magnitude.toFixed(4)})`,
418-
);
419355
});
420356

421357
it('should produce different vectors for semantically different text', async () => {
422-
if (!ollamaReady) {
423-
console.log('Ollama not available — skipping semantic difference test');
424-
return;
425-
}
426-
427358
const embedder = buildEmbedder({
428359
provider: 'ollama',
429360
model: 'nomic-embed-text',
@@ -452,20 +383,10 @@ describe('graphile-llm with real Ollama embedding', () => {
452383
const cosineSimilarity = dotProduct / (Math.sqrt(normA) * Math.sqrt(normB));
453384

454385
// Semantically different texts should have lower similarity
455-
// (not identical vectors)
456386
expect(cosineSimilarity).toBeLessThan(0.95);
457-
458-
console.log(
459-
`[graphile-llm test] Cosine similarity between different topics: ${cosineSimilarity.toFixed(4)}`,
460-
);
461387
});
462388

463389
it('should produce similar vectors for semantically similar text', async () => {
464-
if (!ollamaReady) {
465-
console.log('Ollama not available — skipping semantic similarity test');
466-
return;
467-
}
468-
469390
const embedder = buildEmbedder({
470391
provider: 'ollama',
471392
model: 'nomic-embed-text',
@@ -495,9 +416,20 @@ describe('graphile-llm with real Ollama embedding', () => {
495416

496417
// Semantically similar texts should have high similarity
497418
expect(cosineSimilarity).toBeGreaterThan(0.5);
419+
});
498420

499-
console.log(
500-
`[graphile-llm test] Cosine similarity between similar topics: ${cosineSimilarity.toFixed(4)}`,
421+
it('should produce embeddings via @agentic-kit/ollama OllamaClient directly', async () => {
422+
const vector = await ollamaClient.generateEmbedding(
423+
'Testing the agentic-kit Ollama client directly',
424+
'nomic-embed-text',
501425
);
426+
427+
expect(Array.isArray(vector)).toBe(true);
428+
expect(vector.length).toBe(768);
429+
430+
for (const v of vector) {
431+
expect(typeof v).toBe('number');
432+
expect(Number.isFinite(v)).toBe(true);
433+
}
502434
});
503435
});

0 commit comments

Comments
 (0)