|
13 | 13 | */ |
14 | 14 |
|
15 | 15 | import OllamaClient from '@agentic-kit/ollama'; |
| 16 | +import { getEnvOptions } from '@constructive-io/graphql-env'; |
16 | 17 | import type { ChatConfig, ChatFunction, ChatMessage, ChatOptions, LlmModuleData } from './types'; |
17 | 18 |
|
18 | 19 | // ─── Built-in Providers ───────────────────────────────────────────────────── |
@@ -95,21 +96,22 @@ export function buildChatCompleterFromModule(data: LlmModuleData): ChatFunction |
95 | 96 | } |
96 | 97 |
|
97 | 98 | /** |
98 | | - * Resolve a chat completer from environment variables. |
| 99 | + * Resolve a chat completer from environment variables via getEnvOptions(). |
99 | 100 | * This is a fallback for development when no llm_module or defaultChatCompleter is configured. |
100 | 101 | * |
101 | | - * Environment variables: |
| 102 | + * Environment variables (parsed by @constructive-io/graphql-env): |
102 | 103 | * CHAT_PROVIDER - Provider name ('ollama') |
103 | 104 | * CHAT_MODEL - Model identifier (e.g. 'llama3') |
104 | 105 | * CHAT_BASE_URL - Provider base URL |
105 | 106 | */ |
106 | 107 | export function buildChatCompleterFromEnv(): ChatFunction | null { |
107 | | - const provider = process.env.CHAT_PROVIDER; |
| 108 | + const { llm } = getEnvOptions(); |
| 109 | + const provider = llm?.chat?.provider; |
108 | 110 | if (!provider) return null; |
109 | 111 |
|
110 | 112 | return buildChatCompleter({ |
111 | 113 | provider, |
112 | | - model: process.env.CHAT_MODEL, |
113 | | - baseUrl: process.env.CHAT_BASE_URL, |
| 114 | + model: llm?.chat?.model, |
| 115 | + baseUrl: llm?.chat?.baseUrl, |
114 | 116 | }); |
115 | 117 | } |
0 commit comments