Skip to content

Commit b39d743

Browse files
author
echoVic
committed
refactor: 移除 blade-claude 相关代码及依赖
移除不再使用的 blade-claude 服务相关代码,包括: - 删除 BladeClaudeChatService 实现 - 从 ProviderType 中移除 blade-claude 类型 - 从 ModelConfigWizard 中移除相关配置 - 移除 blade-auth-service 依赖包 - 清理 pnpm-lock.yaml 中相关依赖 同时调整 maxOutputTokens 处理逻辑,改为仅在显式配置时传递
1 parent fe66084 commit b39d743

12 files changed

Lines changed: 44 additions & 206 deletions

package.json

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -116,7 +116,6 @@
116116
"ansi-escapes": "^7.2.0",
117117
"async-mutex": "^0.5.0",
118118
"axios": "^1.12.2",
119-
"blade-auth-service": "^1.0.1",
120119
"chalk": "^5.4.1",
121120
"diff": "^8.0.2",
122121
"fast-glob": "^3.3.3",

pnpm-lock.yaml

Lines changed: 23 additions & 12 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

src/agent/Agent.ts

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1877,7 +1877,8 @@ IMPORTANT: Execute according to the approved plan above. Follow the steps exactl
18771877
const modelName = chatConfig.model;
18781878
const maxContextTokens =
18791879
chatConfig.maxContextTokens ?? this.config.maxContextTokens;
1880-
const maxOutputTokens = chatConfig.maxOutputTokens ?? this.config.maxOutputTokens;
1880+
// 用于计算压缩阈值的 maxOutputTokens,如果未配置则使用保守的默认值 8192
1881+
const maxOutputTokens = chatConfig.maxOutputTokens ?? this.config.maxOutputTokens ?? 8192;
18811882

18821883
// 计算可用于输入的空间:上下文窗口 - 预留给输出的空间
18831884
const availableForInput = maxContextTokens - maxOutputTokens;

src/config/defaults.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@ export const DEFAULT_CONFIG: BladeConfig = {
1616
// 全局默认参数
1717
temperature: 0.0,
1818
maxContextTokens: 128000, // 128K - 主流大模型的标准上下文窗口
19-
maxOutputTokens: 32768, // 32K - 主流大模型的输出 token 限制
19+
maxOutputTokens: undefined, // 不设置默认值,让各 API 使用自己的默认限制
2020
stream: true,
2121
topP: 0.9,
2222
topK: 50,

src/config/types.ts

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -13,16 +13,14 @@
1313
* - azure-openai: Azure OpenAI Service
1414
* - antigravity: Google Antigravity(OAuth 认证,统一网关访问 Claude/Gemini/GPT-OSS)
1515
* - copilot: GitHub Copilot(OAuth 认证,访问 GPT-4o/Claude/Gemini 等模型)
16-
* - blade-claude: Blade 内置 Claude(通过私有包访问)
1716
*/
1817
export type ProviderType =
1918
| 'openai-compatible'
2019
| 'anthropic'
2120
| 'gemini'
2221
| 'azure-openai'
2322
| 'antigravity'
24-
| 'copilot'
25-
| 'blade-claude';
23+
| 'copilot';
2624

2725
/**
2826
* 权限模式枚举
@@ -111,7 +109,7 @@ export interface BladeConfig {
111109
// 全局默认参数
112110
temperature: number;
113111
maxContextTokens: number; // 上下文窗口大小(用于压缩判断)
114-
maxOutputTokens: number; // 输出 token 限制(传给 API 的 max_tokens)
112+
maxOutputTokens?: number; // 输出 token 限制(传给 API 的 max_tokens),undefined 表示让 API 使用默认值
115113
stream: boolean;
116114
topP: number;
117115
topK: number;

src/services/AntigravityChatService.ts

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -668,7 +668,8 @@ export class AntigravityChatService implements IChatService {
668668
contents,
669669
systemInstruction,
670670
generationConfig: {
671-
maxOutputTokens: this.config.maxOutputTokens ?? 8192,
671+
// 只有显式配置了 maxOutputTokens 才传,否则让 API 使用默认值
672+
...(this.config.maxOutputTokens && { maxOutputTokens: this.config.maxOutputTokens }),
672673
temperature: this.config.temperature ?? 0.7,
673674
},
674675
tools: antigravityTools,
@@ -781,7 +782,8 @@ export class AntigravityChatService implements IChatService {
781782
contents,
782783
systemInstruction,
783784
generationConfig: {
784-
maxOutputTokens: this.config.maxOutputTokens ?? 8192,
785+
// 只有显式配置了 maxOutputTokens 才传,否则让 API 使用默认值
786+
...(this.config.maxOutputTokens && { maxOutputTokens: this.config.maxOutputTokens }),
785787
temperature: this.config.temperature ?? 0.7,
786788
},
787789
tools: antigravityTools,

src/services/AzureOpenAIChatService.ts

Lines changed: 4 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -208,7 +208,8 @@ export class AzureOpenAIChatService implements IChatService {
208208
tools: openaiTools,
209209
tool_choice:
210210
openaiTools && openaiTools.length > 0 ? ('auto' as const) : undefined,
211-
max_tokens: this.config.maxOutputTokens ?? 32768,
211+
// 只有显式配置了 maxOutputTokens 才传 max_tokens,否则让 API 使用默认值
212+
...(this.config.maxOutputTokens && { max_tokens: this.config.maxOutputTokens }),
212213
temperature: this.config.temperature ?? 0.0,
213214
};
214215

@@ -309,7 +310,8 @@ export class AzureOpenAIChatService implements IChatService {
309310
tools: openaiTools,
310311
tool_choice:
311312
openaiTools && openaiTools.length > 0 ? ('auto' as const) : undefined,
312-
max_tokens: this.config.maxOutputTokens ?? 32768,
313+
// 只有显式配置了 maxOutputTokens 才传 max_tokens,否则让 API 使用默认值
314+
...(this.config.maxOutputTokens && { max_tokens: this.config.maxOutputTokens }),
313315
temperature: this.config.temperature ?? 0.0,
314316
stream: true as const,
315317
stream_options: { include_usage: true },

0 commit comments

Comments
 (0)