Skip to content

Commit 179a553

Browse files
authored
Remove unused token estimation code (#945)
2 parents 0ccb095 + d5d36b7 commit 179a553

8 files changed

Lines changed: 1 addition & 117 deletions

File tree

src/app/api/dev/consume-credits/route.ts

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -79,8 +79,6 @@ export async function POST(request: NextRequest): Promise<NextResponse> {
7979
},
8080
max_tokens: null,
8181
has_middle_out_transform: null,
82-
estimatedInputTokens: 0,
83-
estimatedOutputTokens: 0,
8482
isStreaming: false,
8583
prior_microdollar_usage: user.microdollars_used,
8684
posthog_distinct_id: user.google_user_email,

src/app/api/fim/completions/route.ts

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,6 @@ import { getUserFromAuth } from '@/lib/user.server';
1111
import {
1212
checkOrganizationModelRestrictions,
1313
countAndStoreFimUsage,
14-
estimateFimTokens,
1514
extractFimPromptInfo,
1615
extractFraudAndProjectHeaders,
1716
invalidRequestResponse,
@@ -122,7 +121,6 @@ export async function POST(request: NextRequest) {
122121
const taskId = extractHeaderAndLimitLength(request, 'x-kilocode-taskid') ?? undefined;
123122

124123
// Extract properties for usage context
125-
const tokenEstimates = estimateFimTokens(requestBody);
126124
const promptInfo = extractFimPromptInfo(requestBody);
127125

128126
const userByok = organizationId
@@ -136,8 +134,6 @@ export async function POST(request: NextRequest) {
136134
promptInfo,
137135
max_tokens: requestBody.max_tokens ?? null,
138136
has_middle_out_transform: null, // N/A for FIM
139-
estimatedInputTokens: tokenEstimates.estimatedInputTokens,
140-
estimatedOutputTokens: tokenEstimates.estimatedOutputTokens,
141137
fraudHeaders,
142138
isStreaming: requestBody.stream === true,
143139
organizationId,

src/app/api/openrouter/[...path]/route.ts

Lines changed: 0 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,6 @@ import {
2222
captureProxyError,
2323
checkOrganizationModelRestrictions,
2424
dataCollectionRequiredResponse,
25-
estimateChatTokens_ignoringToolDefinitions,
2625
extractFraudAndProjectHeaders,
2726
invalidPathResponse,
2827
invalidRequestResponse,
@@ -268,7 +267,6 @@ export async function POST(request: NextRequest): Promise<NextResponseType<unkno
268267
}
269268

270269
// Extract properties for usage context
271-
const tokenEstimates = estimateChatTokens_ignoringToolDefinitions(requestBodyParsed);
272270
const promptInfo = extractPromptInfo(requestBodyParsed);
273271

274272
const usageContext: MicrodollarUsageContext = {
@@ -278,8 +276,6 @@ export async function POST(request: NextRequest): Promise<NextResponseType<unkno
278276
promptInfo,
279277
max_tokens: requestBodyParsed.max_tokens ?? null,
280278
has_middle_out_transform: requestBodyParsed.transforms?.includes('middle-out') ?? false,
281-
estimatedInputTokens: tokenEstimates.estimatedInputTokens,
282-
estimatedOutputTokens: tokenEstimates.estimatedOutputTokens,
283279
fraudHeaders,
284280
isStreaming: requestBodyParsed.stream === true,
285281
organizationId,

src/lib/llm-proxy-helpers.test.ts

Lines changed: 1 addition & 60 deletions
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,5 @@
11
import { describe, it, expect } from '@jest/globals';
2-
import {
3-
checkOrganizationModelRestrictions,
4-
estimateChatTokens_ignoringToolDefinitions,
5-
} from './llm-proxy-helpers';
6-
import type { OpenRouterChatCompletionRequest } from './providers/openrouter/types';
2+
import { checkOrganizationModelRestrictions } from './llm-proxy-helpers';
73

84
describe('checkOrganizationModelRestrictions', () => {
95
describe('enterprise plan - model deny list restrictions', () => {
@@ -209,58 +205,3 @@ describe('checkOrganizationModelRestrictions', () => {
209205
});
210206
});
211207
});
212-
213-
describe('estimateChatTokens', () => {
214-
it('should estimate tokens from valid messages', () => {
215-
const body = {
216-
model: 'anthropic/claude-3-opus',
217-
messages: [
218-
{ role: 'user', content: 'Hello, how are you?' },
219-
{ role: 'assistant', content: 'I am doing well, thank you!' },
220-
],
221-
} as OpenRouterChatCompletionRequest;
222-
223-
const result = estimateChatTokens_ignoringToolDefinitions(body);
224-
225-
expect(result.estimatedInputTokens).toBeGreaterThan(0);
226-
expect(result.estimatedOutputTokens).toBeGreaterThan(0);
227-
});
228-
229-
it('should handle missing messages gracefully (regression test for KILOCODE-WEB-5ND)', () => {
230-
// This test ensures we don't crash when messages is undefined/null/invalid
231-
// which can happen with malformed API requests from abuse attempts
232-
const undefinedMessages = { model: 'test' } as OpenRouterChatCompletionRequest;
233-
const nullMessages = {
234-
model: 'test',
235-
messages: null,
236-
} as unknown as OpenRouterChatCompletionRequest;
237-
238-
expect(estimateChatTokens_ignoringToolDefinitions(undefinedMessages)).toEqual({
239-
estimatedInputTokens: 0,
240-
estimatedOutputTokens: 0,
241-
});
242-
expect(estimateChatTokens_ignoringToolDefinitions(nullMessages)).toEqual({
243-
estimatedInputTokens: 0,
244-
estimatedOutputTokens: 0,
245-
});
246-
});
247-
248-
it('should handle content parts with undefined text', () => {
249-
const body = {
250-
model: 'test',
251-
messages: [
252-
{
253-
role: 'user',
254-
content: [
255-
{ type: 'text', text: undefined },
256-
{ type: 'text', text: 'hello' },
257-
],
258-
},
259-
],
260-
} as unknown as OpenRouterChatCompletionRequest;
261-
262-
const result = estimateChatTokens_ignoringToolDefinitions(body);
263-
expect(result.estimatedInputTokens).toBeGreaterThan(0);
264-
expect(result.estimatedOutputTokens).toBeGreaterThan(0);
265-
});
266-
});

src/lib/llm-proxy-helpers.ts

Lines changed: 0 additions & 41 deletions
Original file line numberDiff line numberDiff line change
@@ -353,47 +353,6 @@ export function extractFimPromptInfo(body: { prompt: string; suffix?: string | n
353353
};
354354
}
355355

356-
export function estimateChatTokens_ignoringToolDefinitions(body: OpenRouterChatCompletionRequest): {
357-
estimatedInputTokens: number;
358-
estimatedOutputTokens: number;
359-
} {
360-
if (!body.messages || !Array.isArray(body.messages)) {
361-
return { estimatedInputTokens: 0, estimatedOutputTokens: 0 };
362-
}
363-
const overallLength = body.messages.reduce(
364-
(sum, m) =>
365-
sum +
366-
(typeof m.content === 'string'
367-
? m.content?.length
368-
: Array.isArray(m.content)
369-
? m.content
370-
.filter(c => c.type === 'text')
371-
.map(c => (c.text ?? '').length)
372-
.reduce((l, str) => str + 1 + l, 0)
373-
: 0),
374-
0
375-
);
376-
return {
377-
estimatedInputTokens: overallLength / 4,
378-
estimatedOutputTokens: overallLength / 4, // Conservative estimate
379-
};
380-
}
381-
382-
export function estimateFimTokens(body: {
383-
prompt: string;
384-
suffix?: string | null;
385-
max_tokens?: number | null;
386-
}): {
387-
estimatedInputTokens: number;
388-
estimatedOutputTokens: number;
389-
} {
390-
const promptLength = body.prompt.length + (body.suffix?.length || 0);
391-
return {
392-
estimatedInputTokens: promptLength / 4,
393-
estimatedOutputTokens: (body.max_tokens || 1024) / 2,
394-
};
395-
}
396-
397356
// ============================================================================
398357
// FIM-Specific Code
399358
// ============================================================================

src/lib/processUsage.test.ts

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -329,8 +329,6 @@ describe('logMicrodollarUsage', () => {
329329
},
330330
max_tokens: 200,
331331
has_middle_out_transform: true,
332-
estimatedInputTokens: 100,
333-
estimatedOutputTokens: 100,
334332
status_code: 200,
335333
editor_name: null,
336334
machine_id: null,

src/lib/processUsage.ts

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -164,8 +164,6 @@ export type MicrodollarUsageContext = {
164164
promptInfo: PromptInfo;
165165
max_tokens: number | null;
166166
has_middle_out_transform: boolean | null;
167-
estimatedInputTokens: number;
168-
estimatedOutputTokens: number;
169167
isStreaming: boolean;
170168
prior_microdollar_usage: number;
171169
/** User email for authenticated users - used as PostHog distinctId. Undefined for anonymous users. */

src/tests/helpers/microdollar-usage.helper.ts

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -100,8 +100,6 @@ export function createMockUsageContext(
100100
},
101101
max_tokens: null,
102102
has_middle_out_transform: null,
103-
estimatedInputTokens: 100,
104-
estimatedOutputTokens: 50,
105103
isStreaming: false,
106104
prior_microdollar_usage,
107105
posthog_distinct_id,

0 commit comments

Comments
 (0)