Skip to content

Commit d88b73a

Browse files
committed
feat(config): support custom LLM API paths
- add `llm-api-path` configuration option - refactor constants for LLM hosts and API paths - implement dynamic API path resolution in client - update tests for provider detection and path handling Signed-off-by: kovacs <mritd@linux.com>
1 parent 61739ea commit d88b73a

5 files changed

Lines changed: 133 additions & 56 deletions

File tree

README.md

Lines changed: 18 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -111,6 +111,7 @@ All settings are configured via `~/.gitconfig` under the `[gitflow]` section.
111111

112112
# LLM settings
113113
llm-api-host = https://openrouter.ai
114+
llm-api-path = /api/v1/chat/completions
114115
llm-model = mistralai/devstral-2512:free
115116
llm-temperature = 0.3
116117
llm-diff-context = 5
@@ -138,6 +139,7 @@ All settings are configured via `~/.gitconfig` under the `[gitflow]` section.
138139
|-----|-------------|---------|
139140
| `llm-api-key` | API key for cloud LLM providers | - |
140141
| `llm-api-host` | LLM API endpoint | see below |
142+
| `llm-api-path` | API path (auto-detected for known providers) | see below |
141143
| `llm-model` | LLM model name | see below |
142144
| `llm-temperature` | Model temperature | `0.3` |
143145
| `llm-diff-context` | Diff context lines | `5` |
@@ -165,12 +167,22 @@ Generate commit messages automatically using LLM:
165167

166168
**Provider Selection:**
167169

168-
| Provider | When | Default Host | Default Model |
169-
|----------|------|--------------|---------------|
170-
| OpenRouter | API key is set | `https://openrouter.ai` | `mistralai/devstral-2512:free` |
171-
| Groq | API key set + host contains `groq.com` | - | - |
172-
| OpenAI | API key set + host contains `openai.com` | - | - |
173-
| Ollama | No API key | `http://localhost:11434` | `qwen2.5-coder:7b` |
170+
| Provider | When | Default Host | Default Path | Default Model |
171+
|----------|------|--------------|--------------|---------------|
172+
| OpenRouter | API key is set | `https://openrouter.ai` | `/api/v1/chat/completions` | `mistralai/devstral-2512:free` |
173+
| Groq | Host contains `groq.com` | `https://api.groq.com` | `/openai/v1/chat/completions` | - |
174+
| OpenAI | Host contains `openai.com` | `https://api.openai.com` | `/v1/chat/completions` | - |
175+
| DeepSeek | Host contains `deepseek.com` | `https://api.deepseek.com` | `/v1/chat/completions` | - |
176+
| Mistral | Host contains `mistral.ai` | `https://api.mistral.ai` | `/v1/chat/completions` | - |
177+
| Ollama | No API key | `http://localhost:11434` | `/api/generate` | `qwen2.5-coder:7b` |
178+
| Other | Unknown host | - | `/v1/chat/completions` | - |
179+
180+
**Custom API Path:**
181+
182+
If your provider uses a non-standard path, set it explicitly:
183+
```bash
184+
git config --global gitflow.llm-api-path "/custom/v1/chat/completions"
185+
```
174186

175187
**Quick Start with OpenRouter (recommended):**
176188
```bash

config/gitconfig.go

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@ const GitConfigSection = "gitflow"
1515
const (
1616
GitConfigLLMAPIKey = "llm-api-key"
1717
GitConfigLLMAPIHost = "llm-api-host"
18+
GitConfigLLMAPIPath = "llm-api-path"
1819
GitConfigLLMModel = "llm-model"
1920
GitConfigLLMTemperature = "llm-temperature"
2021
GitConfigLLMRequestTimeout = "llm-request-timeout"

consts/consts.go

Lines changed: 20 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -55,8 +55,6 @@ const (
5555

5656
// LLM defaults.
5757
const (
58-
LLMDefaultOllamaHost = "http://localhost:11434"
59-
LLMDefaultOpenRouterHost = "https://openrouter.ai"
6058
LLMDefaultDiffContext = 5
6159
LLMDefaultRequestTimeout = 2 * time.Minute
6260
LLMDefaultRetries = 0
@@ -65,6 +63,24 @@ const (
6563
LLMDefaultConcurrency = 3
6664
)
6765

66+
// LLM provider hosts (OpenAI-compatible APIs only).
67+
const (
68+
LLMHostOllama = "http://localhost:11434"
69+
LLMHostOpenRouter = "https://openrouter.ai"
70+
LLMHostGroq = "https://api.groq.com"
71+
LLMHostOpenAI = "https://api.openai.com"
72+
LLMHostDeepSeek = "https://api.deepseek.com"
73+
LLMHostMistral = "https://api.mistral.ai"
74+
)
75+
76+
// LLM API paths for chat completions.
77+
const (
78+
LLMPathOllama = "/api/generate"
79+
LLMPathOpenAI = "/v1/chat/completions" // OpenAI, DeepSeek, Mistral, most compatible APIs
80+
LLMPathOpenRouter = "/api/v1/chat/completions" // OpenRouter
81+
LLMPathGroq = "/openai/v1/chat/completions" // Groq
82+
)
83+
6884
// LLM language options.
6985
const (
7086
LLMLangEN = "en"
@@ -74,8 +90,8 @@ const (
7490

7591
// LLM default models.
7692
const (
77-
LLMDefaultOllamaModel = "qwen2.5-coder:7b"
78-
LLMDefaultOpenRouterModel = "mistralai/devstral-2512:free"
93+
LLMModelOllama = "qwen2.5-coder:7b"
94+
LLMModelOpenRouter = "mistralai/devstral-2512:free"
7995
)
8096

8197
// LLM default prompts (can be overridden via gitconfig).

internal/llm/client.go

Lines changed: 48 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -39,6 +39,7 @@ const (
3939
type Client struct {
4040
provider Provider
4141
host string
42+
apiPath string
4243
apiKey string
4344
timeout time.Duration
4445
retries int
@@ -62,36 +63,45 @@ type GenerateOptions struct {
6263
// Provider selection:
6364
// - If API key is set, uses OpenAI-compatible API (OpenRouter by default)
6465
// - Otherwise, uses local Ollama
66+
//
67+
// API path resolution:
68+
// 1. User-defined llm-api-path takes highest priority
69+
// 2. Auto-detect from host for known providers
70+
// 3. Fall back to OpenAI-compatible path (/v1/chat/completions)
6571
func NewClient() *Client {
6672
// Get API key from gitconfig
6773
apiKey := config.GetString(config.GitConfigLLMAPIKey, "")
6874

6975
// Determine provider and defaults based on API key presence
7076
provider := ProviderOllama
71-
defaultHost := consts.LLMDefaultOllamaHost
72-
defaultModel := consts.LLMDefaultOllamaModel
77+
defaultHost := consts.LLMHostOllama
78+
defaultModel := consts.LLMModelOllama
79+
defaultPath := consts.LLMPathOllama
7380

7481
if apiKey != "" {
7582
provider = ProviderOpenRouter
76-
defaultHost = consts.LLMDefaultOpenRouterHost
77-
defaultModel = consts.LLMDefaultOpenRouterModel
83+
defaultHost = consts.LLMHostOpenRouter
84+
defaultModel = consts.LLMModelOpenRouter
85+
defaultPath = consts.LLMPathOpenRouter
7886
}
7987

80-
// Get host
88+
// Get host and normalize
8189
host := config.GetString(config.GitConfigLLMAPIHost, "")
8290
if host == "" {
8391
host = defaultHost
8492
} else {
85-
host = normalizeHost(host, defaultHost)
93+
host = normalizeHost(host)
8694
}
8795

88-
// Detect provider from host
96+
// Detect provider from host and set appropriate API path
8997
if apiKey != "" {
90-
if strings.Contains(host, "groq.com") {
91-
provider = ProviderGroq
92-
} else if strings.Contains(host, "openai.com") {
93-
provider = ProviderOpenAI
94-
}
98+
provider, defaultPath = detectProvider(host)
99+
}
100+
101+
// Get user-defined API path (highest priority)
102+
apiPath := config.GetString(config.GitConfigLLMAPIPath, "")
103+
if apiPath == "" {
104+
apiPath = defaultPath
95105
}
96106

97107
// Get model
@@ -120,6 +130,7 @@ func NewClient() *Client {
120130
return &Client{
121131
provider: provider,
122132
host: host,
133+
apiPath: apiPath,
123134
apiKey: apiKey,
124135
timeout: timeout,
125136
retries: retries,
@@ -133,15 +144,33 @@ func NewClient() *Client {
133144
}
134145
}
135146

136-
// normalizeHost ensures the host has a proper scheme.
137-
func normalizeHost(host, defaultHost string) string {
138-
if host == "" {
139-
return defaultHost
147+
// detectProvider detects the LLM provider from host and returns the provider type and default API path.
148+
// For unknown hosts, returns OpenRouter provider with OpenAI-compatible path.
149+
func detectProvider(host string) (Provider, string) {
150+
switch {
151+
case strings.Contains(host, "groq.com"):
152+
return ProviderGroq, consts.LLMPathGroq
153+
case strings.Contains(host, "openai.com"):
154+
return ProviderOpenAI, consts.LLMPathOpenAI
155+
case strings.Contains(host, "deepseek.com"):
156+
return ProviderOpenAI, consts.LLMPathOpenAI // DeepSeek uses OpenAI-compatible API
157+
case strings.Contains(host, "mistral.ai"):
158+
return ProviderOpenAI, consts.LLMPathOpenAI // Mistral uses OpenAI-compatible API
159+
case strings.Contains(host, "openrouter.ai"):
160+
return ProviderOpenRouter, consts.LLMPathOpenRouter
161+
default:
162+
// Unknown provider, assume OpenAI-compatible API
163+
return ProviderOpenAI, consts.LLMPathOpenAI
140164
}
165+
}
166+
167+
// normalizeHost ensures the host has a proper scheme and no trailing slash.
168+
func normalizeHost(host string) string {
169+
host = strings.TrimSuffix(host, "/")
141170
if strings.HasPrefix(host, "http://") || strings.HasPrefix(host, "https://") {
142-
return strings.TrimSuffix(host, "/")
171+
return host
143172
}
144-
return "https://" + strings.TrimSuffix(host, "/")
173+
return "https://" + host
145174
}
146175

147176
// Generate calls the LLM API to generate text.
@@ -296,17 +325,7 @@ func (c *Client) doGenerateOpenAI(ctx context.Context, model, prompt string, opt
296325
ctx, cancel := context.WithTimeout(ctx, c.timeout)
297326
defer cancel()
298327

299-
// Build endpoint URL based on provider
300-
// Groq uses /openai/v1/..., OpenRouter/OpenAI use /api/v1/...
301-
var endpoint string
302-
switch c.provider {
303-
case ProviderGroq:
304-
endpoint = c.host + "/openai/v1/chat/completions"
305-
default:
306-
endpoint = c.host + "/api/v1/chat/completions"
307-
}
308-
309-
req, err := http.NewRequestWithContext(ctx, http.MethodPost, endpoint, bytes.NewReader(body))
328+
req, err := http.NewRequestWithContext(ctx, http.MethodPost, c.host+c.apiPath, bytes.NewReader(body))
310329
if err != nil {
311330
return "", fmt.Errorf("failed to create request: %w", err)
312331
}

internal/llm/client_test.go

Lines changed: 46 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -14,25 +14,50 @@ import (
1414

1515
func TestNormalizeHost(t *testing.T) {
1616
tests := []struct {
17-
name string
18-
input string
19-
defaultHost string
20-
want string
17+
name string
18+
input string
19+
want string
2120
}{
22-
{"empty with default", "", "http://default:1234", "http://default:1234"},
23-
{"empty no default", "", "", ""},
24-
{"host only", "localhost:11434", "", "https://localhost:11434"},
25-
{"with http", "http://localhost:11434", "", "http://localhost:11434"},
26-
{"with https", "https://ollama.example.com", "", "https://ollama.example.com"},
27-
{"with trailing slash", "http://localhost:11434/", "", "http://localhost:11434"},
28-
{"ip address", "192.168.1.100:11434", "", "https://192.168.1.100:11434"},
21+
{"host only", "localhost:11434", "https://localhost:11434"},
22+
{"with http", "http://localhost:11434", "http://localhost:11434"},
23+
{"with https", "https://ollama.example.com", "https://ollama.example.com"},
24+
{"with trailing slash", "http://localhost:11434/", "http://localhost:11434"},
25+
{"ip address", "192.168.1.100:11434", "https://192.168.1.100:11434"},
2926
}
3027

3128
for _, tt := range tests {
3229
t.Run(tt.name, func(t *testing.T) {
33-
got := normalizeHost(tt.input, tt.defaultHost)
30+
got := normalizeHost(tt.input)
3431
if got != tt.want {
35-
t.Errorf("normalizeHost(%q, %q) = %q, want %q", tt.input, tt.defaultHost, got, tt.want)
32+
t.Errorf("normalizeHost(%q) = %q, want %q", tt.input, got, tt.want)
33+
}
34+
})
35+
}
36+
}
37+
38+
func TestDetectProvider(t *testing.T) {
39+
tests := []struct {
40+
name string
41+
host string
42+
wantProvider Provider
43+
wantPath string
44+
}{
45+
{"groq", "https://api.groq.com", ProviderGroq, consts.LLMPathGroq},
46+
{"openai", "https://api.openai.com", ProviderOpenAI, consts.LLMPathOpenAI},
47+
{"deepseek", "https://api.deepseek.com", ProviderOpenAI, consts.LLMPathOpenAI},
48+
{"mistral", "https://api.mistral.ai", ProviderOpenAI, consts.LLMPathOpenAI},
49+
{"openrouter", "https://openrouter.ai", ProviderOpenRouter, consts.LLMPathOpenRouter},
50+
{"unknown", "https://custom-llm.example.com", ProviderOpenAI, consts.LLMPathOpenAI},
51+
}
52+
53+
for _, tt := range tests {
54+
t.Run(tt.name, func(t *testing.T) {
55+
provider, path := detectProvider(tt.host)
56+
if provider != tt.wantProvider {
57+
t.Errorf("detectProvider(%q) provider = %q, want %q", tt.host, provider, tt.wantProvider)
58+
}
59+
if path != tt.wantPath {
60+
t.Errorf("detectProvider(%q) path = %q, want %q", tt.host, path, tt.wantPath)
3661
}
3762
})
3863
}
@@ -52,11 +77,11 @@ func TestNewClient_Defaults(t *testing.T) {
5277
if c.provider != ProviderOllama {
5378
t.Errorf("provider = %q, want %q", c.provider, ProviderOllama)
5479
}
55-
if c.host != consts.LLMDefaultOllamaHost {
56-
t.Errorf("host = %q, want %q", c.host, consts.LLMDefaultOllamaHost)
80+
if c.host != consts.LLMHostOllama {
81+
t.Errorf("host = %q, want %q", c.host, consts.LLMHostOllama)
5782
}
58-
if c.model != consts.LLMDefaultOllamaModel {
59-
t.Errorf("model = %q, want %q", c.model, consts.LLMDefaultOllamaModel)
83+
if c.model != consts.LLMModelOllama {
84+
t.Errorf("model = %q, want %q", c.model, consts.LLMModelOllama)
6085
}
6186
if c.timeout != consts.LLMDefaultRequestTimeout {
6287
t.Errorf("timeout = %v, want %v", c.timeout, consts.LLMDefaultRequestTimeout)
@@ -91,6 +116,7 @@ func TestGenerate_Ollama(t *testing.T) {
91116
c := &Client{
92117
provider: ProviderOllama,
93118
host: server.URL,
119+
apiPath: consts.LLMPathOllama,
94120
timeout: 10 * time.Second,
95121
retries: 0,
96122
}
@@ -120,6 +146,7 @@ func TestGenerate_Ollama(t *testing.T) {
120146
c := &Client{
121147
provider: ProviderOllama,
122148
host: server.URL,
149+
apiPath: consts.LLMPathOllama,
123150
timeout: 10 * time.Second,
124151
retries: 2,
125152
}
@@ -174,6 +201,7 @@ func TestGenerate_OpenAI(t *testing.T) {
174201
c := &Client{
175202
provider: ProviderGroq,
176203
host: server.URL,
204+
apiPath: consts.LLMPathGroq,
177205
apiKey: "test-key",
178206
timeout: 10 * time.Second,
179207
retries: 0,
@@ -201,6 +229,7 @@ func TestGenerate_OpenAI(t *testing.T) {
201229
c := &Client{
202230
provider: ProviderGroq,
203231
host: server.URL,
232+
apiPath: consts.LLMPathGroq,
204233
apiKey: "test-key",
205234
timeout: 10 * time.Second,
206235
retries: 0,

0 commit comments

Comments
 (0)