Skip to content

Commit 769d49c

Browse files
authored
Merge pull request #1011 from constructive-io/devin/1776586895-graphile-llm-getenv
refactor: replace process.env with getEnvOptions() in graphile-llm
2 parents e52a76e + 1d714a7 commit 769d49c

8 files changed

Lines changed: 2679 additions & 7436 deletions

File tree

graphile/graphile-llm/package.json

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,8 @@
2929
"url": "https://github.com/constructive-io/constructive/issues"
3030
},
3131
"dependencies": {
32-
"@agentic-kit/ollama": "^1.0.3"
32+
"@agentic-kit/ollama": "^1.0.3",
33+
"@constructive-io/graphql-env": "workspace:^"
3334
},
3435
"peerDependencies": {
3536
"@dataplan/pg": "1.0.0",

graphile/graphile-llm/src/chat.ts

Lines changed: 7 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,7 @@
1313
*/
1414

1515
import OllamaClient from '@agentic-kit/ollama';
16+
import { getEnvOptions } from '@constructive-io/graphql-env';
1617
import type { ChatConfig, ChatFunction, ChatMessage, ChatOptions, LlmModuleData } from './types';
1718

1819
// ─── Built-in Providers ─────────────────────────────────────────────────────
@@ -95,21 +96,22 @@ export function buildChatCompleterFromModule(data: LlmModuleData): ChatFunction
9596
}
9697

9798
/**
98-
* Resolve a chat completer from environment variables.
99+
* Resolve a chat completer from environment variables via getEnvOptions().
99100
* This is a fallback for development when no llm_module or defaultChatCompleter is configured.
100101
*
101-
* Environment variables:
102+
* Environment variables (parsed by @constructive-io/graphql-env):
102103
* CHAT_PROVIDER - Provider name ('ollama')
103104
* CHAT_MODEL - Model identifier (e.g. 'llama3')
104105
* CHAT_BASE_URL - Provider base URL
105106
*/
106107
export function buildChatCompleterFromEnv(): ChatFunction | null {
107-
const provider = process.env.CHAT_PROVIDER;
108+
const { llm } = getEnvOptions();
109+
const provider = llm?.chat?.provider;
108110
if (!provider) return null;
109111

110112
return buildChatCompleter({
111113
provider,
112-
model: process.env.CHAT_MODEL,
113-
baseUrl: process.env.CHAT_BASE_URL,
114+
model: llm?.chat?.model,
115+
baseUrl: llm?.chat?.baseUrl,
114116
});
115117
}

graphile/graphile-llm/src/embedder.ts

Lines changed: 7 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,7 @@
1111
*/
1212

1313
import OllamaClient from '@agentic-kit/ollama';
14+
import { getEnvOptions } from '@constructive-io/graphql-env';
1415
import type { EmbedderConfig, EmbedderFunction, LlmModuleData } from './types';
1516

1617
// ─── Built-in Providers ─────────────────────────────────────────────────────
@@ -63,21 +64,22 @@ export function buildEmbedderFromModule(data: LlmModuleData): EmbedderFunction |
6364
}
6465

6566
/**
66-
* Resolve an embedder from environment variables.
67+
* Resolve an embedder from environment variables via getEnvOptions().
6768
* This is a fallback for development when no llm_module or defaultEmbedder is configured.
6869
*
69-
* Environment variables:
70+
* Environment variables (parsed by @constructive-io/graphql-env):
7071
* EMBEDDER_PROVIDER - Provider name ('ollama')
7172
* EMBEDDER_MODEL - Model identifier
7273
* EMBEDDER_BASE_URL - Provider base URL
7374
*/
7475
export function buildEmbedderFromEnv(): EmbedderFunction | null {
75-
const provider = process.env.EMBEDDER_PROVIDER;
76+
const { llm } = getEnvOptions();
77+
const provider = llm?.embedder?.provider;
7678
if (!provider) return null;
7779

7880
return buildEmbedder({
7981
provider,
80-
model: process.env.EMBEDDER_MODEL,
81-
baseUrl: process.env.EMBEDDER_BASE_URL,
82+
model: llm?.embedder?.model,
83+
baseUrl: llm?.embedder?.baseUrl,
8284
});
8385
}

graphql/env/src/env.ts

Lines changed: 25 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -27,6 +27,13 @@ export const getGraphQLEnvVars = (env: NodeJS.ProcessEnv = process.env): Partial
2727
API_ANON_ROLE,
2828
API_ROLE_NAME,
2929
API_DEFAULT_DATABASE_ID,
30+
31+
EMBEDDER_PROVIDER,
32+
EMBEDDER_MODEL,
33+
EMBEDDER_BASE_URL,
34+
CHAT_PROVIDER,
35+
CHAT_MODEL,
36+
CHAT_BASE_URL,
3037
} = env;
3138

3239
return {
@@ -51,5 +58,23 @@ export const getGraphQLEnvVars = (env: NodeJS.ProcessEnv = process.env): Partial
5158
...(API_ROLE_NAME && { roleName: API_ROLE_NAME }),
5259
...(API_DEFAULT_DATABASE_ID && { defaultDatabaseId: API_DEFAULT_DATABASE_ID }),
5360
},
61+
...((EMBEDDER_PROVIDER || CHAT_PROVIDER) && {
62+
llm: {
63+
...((EMBEDDER_PROVIDER || EMBEDDER_MODEL || EMBEDDER_BASE_URL) && {
64+
embedder: {
65+
...(EMBEDDER_PROVIDER && { provider: EMBEDDER_PROVIDER }),
66+
...(EMBEDDER_MODEL && { model: EMBEDDER_MODEL }),
67+
...(EMBEDDER_BASE_URL && { baseUrl: EMBEDDER_BASE_URL }),
68+
},
69+
}),
70+
...((CHAT_PROVIDER || CHAT_MODEL || CHAT_BASE_URL) && {
71+
chat: {
72+
...(CHAT_PROVIDER && { provider: CHAT_PROVIDER }),
73+
...(CHAT_MODEL && { model: CHAT_MODEL }),
74+
...(CHAT_BASE_URL && { baseUrl: CHAT_BASE_URL }),
75+
},
76+
}),
77+
},
78+
}),
5479
};
5580
};

graphql/types/src/constructive.ts

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@ import {
1818
graphileFeatureDefaults,
1919
apiDefaults
2020
} from './graphile';
21+
import { LlmOptions } from './llm';
2122

2223
/**
2324
* GraphQL-specific options for Constructive
@@ -56,6 +57,8 @@ export interface ConstructiveOptions extends PgpmOptions, ConstructiveGraphQLOpt
5657
migrations?: MigrationOptions;
5758
/** Job system configuration */
5859
jobs?: JobsConfig;
60+
/** LLM provider configuration (embeddings, chat, RAG) */
61+
llm?: LlmOptions;
5962
}
6063

6164
/**

graphql/types/src/index.ts

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -22,3 +22,10 @@ export {
2222
GraphQLError,
2323
QueryResult
2424
} from './adapter';
25+
26+
// Export LLM types
27+
export {
28+
LlmOptions,
29+
LlmEmbedderOptions,
30+
LlmChatOptions
31+
} from './llm';

graphql/types/src/llm.ts

Lines changed: 48 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,48 @@
1+
/**
2+
* LLM provider configuration options.
3+
*
4+
* Used by graphile-llm to resolve embedding and chat completion providers
5+
* from the unified environment configuration system (getEnvOptions).
6+
*/
7+
8+
/**
9+
* Configuration for an LLM embedding provider.
10+
*/
11+
export interface LlmEmbedderOptions {
12+
/** Provider name (e.g. 'ollama') */
13+
provider?: string;
14+
/** Model identifier (e.g. 'nomic-embed-text') */
15+
model?: string;
16+
/** Provider base URL (e.g. 'http://localhost:11434') */
17+
baseUrl?: string;
18+
}
19+
20+
/**
21+
* Configuration for an LLM chat completion provider.
22+
*/
23+
export interface LlmChatOptions {
24+
/** Provider name (e.g. 'ollama') */
25+
provider?: string;
26+
/** Model identifier (e.g. 'llama3') */
27+
model?: string;
28+
/** Provider base URL (e.g. 'http://localhost:11434') */
29+
baseUrl?: string;
30+
}
31+
32+
/**
33+
* Top-level LLM configuration options.
34+
*
35+
* Environment variables:
36+
* EMBEDDER_PROVIDER - Embedding provider name
37+
* EMBEDDER_MODEL - Embedding model identifier
38+
* EMBEDDER_BASE_URL - Embedding provider base URL
39+
* CHAT_PROVIDER - Chat completion provider name
40+
* CHAT_MODEL - Chat completion model identifier
41+
* CHAT_BASE_URL - Chat completion provider base URL
42+
*/
43+
export interface LlmOptions {
44+
/** Embedding provider configuration */
45+
embedder?: LlmEmbedderOptions;
46+
/** Chat completion provider configuration */
47+
chat?: LlmChatOptions;
48+
}

0 commit comments

Comments
 (0)