Skip to content

Commit ff14e72

Browse files
feat(ai-gateway): accept ~prefixed model ids in provider checks (#2841)
* feat(ai-gateway): accept ~prefixed model ids in provider checks Model ids that start with '~anthropic/', '~openai/', '~google/', or '~moonshotai/' are now treated the same as their untilded counterparts everywhere the gateway keys behaviour off a provider prefix. * docs: clarify ~ routing-variant semantics in model-prefix --------- Co-authored-by: kiloconnect[bot] <240665456+kiloconnect[bot]@users.noreply.github.com>
1 parent 21ec9db commit ff14e72

8 files changed

Lines changed: 95 additions & 12 deletions

File tree

apps/web/src/lib/ai-gateway/providers/anthropic.ts

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,14 @@
1+
import { modelStartsWith } from '@/lib/ai-gateway/providers/model-prefix';
12
import { addCacheBreakpoints } from '@/lib/ai-gateway/providers/openrouter/request-helpers';
23
import type { GatewayRequest } from '@/lib/ai-gateway/providers/openrouter/types';
34
import { normalizeToolCallIds } from '@/lib/ai-gateway/tool-calling';
45

56
export function isAnthropicModel(requestedModel: string) {
6-
return requestedModel.startsWith('anthropic/');
7+
return modelStartsWith(requestedModel, 'anthropic/');
78
}
89

910
export function isHaikuModel(requestedModel: string) {
10-
return requestedModel.startsWith('anthropic/claude-haiku');
11+
return modelStartsWith(requestedModel, 'anthropic/claude-haiku');
1112
}
1213

1314
function appendAnthropicBetaHeader(extraHeaders: Record<string, string>, betaFlag: string) {

apps/web/src/lib/ai-gateway/providers/google.ts

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,13 +1,14 @@
11
import type { KiloExclusiveModel } from '@/lib/ai-gateway/providers/kilo-exclusive-model';
2+
import { modelStartsWith } from '@/lib/ai-gateway/providers/model-prefix';
23
import type { GatewayRequest } from '@/lib/ai-gateway/providers/openrouter/types';
34
import type { ProviderId } from '@/lib/ai-gateway/providers/types';
45

56
export function isGeminiModel(model: string) {
6-
return model.startsWith('google/gemini');
7+
return modelStartsWith(model, 'google/gemini');
78
}
89

910
export function isGemmaModel(model: string) {
10-
return model.startsWith('google/gemma');
11+
return modelStartsWith(model, 'google/gemma');
1112
}
1213

1314
export const GEMMA_4_31B_IT_ID = 'google/gemma-4-31b-it';
@@ -29,7 +30,7 @@ export const gemma_4_26b_a4b_it_free_model: KiloExclusiveModel = {
2930
};
3031

3132
export function isGemini3Model(model: string) {
32-
return model.startsWith('google/gemini-3');
33+
return modelStartsWith(model, 'google/gemini-3');
3334
}
3435

3536
type ReadFileParametersSchema = {
Lines changed: 62 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,62 @@
1+
import { modelStartsWith, stripModelTilde } from './model-prefix';
2+
import { isAnthropicModel, isHaikuModel } from './anthropic';
3+
import { isOpenAiModel, isOpenAiOssModel } from './openai';
4+
import { isGeminiModel, isGemmaModel, isGemini3Model } from './google';
5+
import { isMoonshotModel } from './moonshotai';
6+
import { inferVercelFirstPartyInferenceProviderForModel } from './openrouter/inference-provider-id';
7+
8+
describe('modelStartsWith', () => {
9+
test('matches the bare prefix', () => {
10+
expect(modelStartsWith('anthropic/claude-sonnet-4.5', 'anthropic/')).toBe(true);
11+
});
12+
13+
test('matches the tilde-prefixed variant', () => {
14+
expect(modelStartsWith('~anthropic/claude-sonnet-4.5', 'anthropic/')).toBe(true);
15+
});
16+
17+
test('rejects unrelated prefixes', () => {
18+
expect(modelStartsWith('openai/gpt-5', 'anthropic/')).toBe(false);
19+
expect(modelStartsWith('~openai/gpt-5', 'anthropic/')).toBe(false);
20+
});
21+
});
22+
23+
describe('stripModelTilde', () => {
24+
test('removes a leading tilde', () => {
25+
expect(stripModelTilde('~anthropic/claude-sonnet-4.5')).toBe('anthropic/claude-sonnet-4.5');
26+
});
27+
28+
test('leaves untilded ids alone', () => {
29+
expect(stripModelTilde('anthropic/claude-sonnet-4.5')).toBe('anthropic/claude-sonnet-4.5');
30+
});
31+
});
32+
33+
describe('provider predicates accept tilde-prefixed model ids', () => {
34+
test('isAnthropicModel / isHaikuModel', () => {
35+
expect(isAnthropicModel('~anthropic/claude-sonnet-4.5')).toBe(true);
36+
expect(isHaikuModel('~anthropic/claude-haiku-4.5')).toBe(true);
37+
});
38+
39+
test('isOpenAiModel / isOpenAiOssModel', () => {
40+
expect(isOpenAiModel('~openai/gpt-5-nano')).toBe(true);
41+
expect(isOpenAiModel('~openai/gpt-oss')).toBe(false);
42+
expect(isOpenAiOssModel('~openai/gpt-oss')).toBe(true);
43+
});
44+
45+
test('google helpers', () => {
46+
expect(isGeminiModel('~google/gemini-2.5-flash-lite')).toBe(true);
47+
expect(isGemmaModel('~google/gemma-4-31b-it')).toBe(true);
48+
expect(isGemini3Model('~google/gemini-3-pro')).toBe(true);
49+
});
50+
51+
test('isMoonshotModel', () => {
52+
expect(isMoonshotModel('~moonshotai/kimi-k2.6')).toBe(true);
53+
});
54+
55+
test('inferVercelFirstPartyInferenceProviderForModel strips tilde', () => {
56+
expect(inferVercelFirstPartyInferenceProviderForModel('~anthropic/claude-sonnet-4.5')).toBe(
57+
'anthropic'
58+
);
59+
expect(inferVercelFirstPartyInferenceProviderForModel('~openai/gpt-5-nano')).toBe('openai');
60+
expect(inferVercelFirstPartyInferenceProviderForModel('~openai/gpt-oss')).toBe(null);
61+
});
62+
});
Lines changed: 12 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,12 @@
1+
// Model ids may be prefixed with a leading '~' to indicate a routing variant
2+
// whose concrete target can differ from the untilded id — for example,
3+
// '~anthropic/claude-haiku-latest' may route to 'anthropic/claude-haiku-4.5'.
4+
// Any code that keys off a provider prefix such as 'anthropic/' or 'openai/'
5+
// should still treat both forms equivalently.
6+
export function modelStartsWith(model: string, prefix: string) {
7+
return model.startsWith(prefix) || model.startsWith(`~${prefix}`);
8+
}
9+
10+
export function stripModelTilde(model: string) {
11+
return model.startsWith('~') ? model.slice(1) : model;
12+
}

apps/web/src/lib/ai-gateway/providers/model-settings.ts

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
import { seed_20_pro_free_model } from '@/lib/ai-gateway/providers/bytedance';
22
import { isGemini3Model, isGeminiModel, isGemmaModel } from '@/lib/ai-gateway/providers/google';
33
import { isMinimaxModel } from '@/lib/ai-gateway/providers/minimax';
4+
import { modelStartsWith } from '@/lib/ai-gateway/providers/model-prefix';
45
import { isMoonshotModel } from '@/lib/ai-gateway/providers/moonshotai';
56
import { isOpenAiModel } from '@/lib/ai-gateway/providers/openai';
67
import { qwen36_plus_model } from '@/lib/ai-gateway/providers/qwen';
@@ -56,7 +57,7 @@ export const REASONING_VARIANTS_MINIMAL_LOW_MEDIUM_HIGH = {
5657
} as const;
5758

5859
export function getModelVariants(model: string): OpenCodeSettings['variants'] {
59-
if (model.startsWith('anthropic/claude-opus-4.7')) {
60+
if (modelStartsWith(model, 'anthropic/claude-opus-4.7')) {
6061
return {
6162
none: { reasoning: { enabled: false, effort: 'none' } },
6263
low: { reasoning: { enabled: true, effort: 'low' }, verbosity: 'low' },
@@ -66,7 +67,7 @@ export function getModelVariants(model: string): OpenCodeSettings['variants'] {
6667
max: { reasoning: { enabled: true, effort: 'xhigh' }, verbosity: 'max' },
6768
};
6869
}
69-
if (model.startsWith('anthropic/')) {
70+
if (modelStartsWith(model, 'anthropic/')) {
7071
return {
7172
none: { reasoning: { enabled: false, effort: 'none' } },
7273
low: { reasoning: { enabled: true, effort: 'low' }, verbosity: 'low' },

apps/web/src/lib/ai-gateway/providers/moonshotai.ts

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,8 @@
1+
import { modelStartsWith } from '@/lib/ai-gateway/providers/model-prefix';
12
import type { GatewayRequest } from '@/lib/ai-gateway/providers/openrouter/types';
23

34
export function isMoonshotModel(model: string) {
4-
return model.startsWith('moonshotai/');
5+
return modelStartsWith(model, 'moonshotai/');
56
}
67

78
export function applyMoonshotModelSettings(requestToMutate: GatewayRequest) {

apps/web/src/lib/ai-gateway/providers/openai.ts

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,13 @@
1+
import { modelStartsWith } from '@/lib/ai-gateway/providers/model-prefix';
2+
13
export function isOpenAiModel(requestedModel: string) {
2-
return requestedModel.startsWith('openai/') && !requestedModel.startsWith('openai/gpt-oss');
4+
return (
5+
modelStartsWith(requestedModel, 'openai/') && !modelStartsWith(requestedModel, 'openai/gpt-oss')
6+
);
37
}
48

59
export function isOpenAiOssModel(requestedModel: string) {
6-
return requestedModel.startsWith('openai/gpt-oss');
10+
return modelStartsWith(requestedModel, 'openai/gpt-oss');
711
}
812

913
export const GPT_5_NANO_ID = 'openai/gpt-5-nano';

apps/web/src/lib/ai-gateway/providers/openrouter/inference-provider-id.ts

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
import * as z from 'zod';
2+
import { modelStartsWith, stripModelTilde } from '@/lib/ai-gateway/providers/model-prefix';
23

34
export const OpenRouterInferenceProviderIdSchema = z.enum([
45
'alibaba',
@@ -127,9 +128,9 @@ const modelPrefixToVercelInferenceProviderMapping = {
127128
export function inferVercelFirstPartyInferenceProviderForModel(
128129
model: string
129130
): VercelInferenceProviderId | null {
130-
return model.startsWith('openai/gpt-oss')
131+
return modelStartsWith(model, 'openai/gpt-oss')
131132
? null
132-
: (modelPrefixToVercelInferenceProviderMapping[model.split('/')[0]] ?? null);
133+
: (modelPrefixToVercelInferenceProviderMapping[stripModelTilde(model).split('/')[0]] ?? null);
133134
}
134135

135136
export const AwsCredentialsSchema = z.object({

0 commit comments

Comments
 (0)