diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml
index dc8270feb..76a26341c 100644
--- a/pnpm-lock.yaml
+++ b/pnpm-lock.yaml
@@ -3987,6 +3987,9 @@ packages:
tinybench@2.9.0:
resolution: {integrity: sha512-0+DUvqWMValLmha6lr4kD8iAMK1HzV0/aKnCtWb9v9641TnP/MFb7Pc2bxoxQjTXAErryXVgUOfv2YqNllqGeg==}
+ tinyexec@1.0.1:
+ resolution: {integrity: sha512-5uC6DDlmeqiOwCPmK9jMSdOuZTh8bU39Ys6yidB+UTt5hfZUPGAypSgFRiEp+jbi9qH40BLDvy85jIU88wKSqw==}
+
tinyexec@1.0.2:
resolution: {integrity: sha512-W/KYk+NFhkmsYpuHq5JykngiOCnxeVL8v8dFnqxSD8qEEdRfXk1SDM6JzNqcERbcGYj9tMrDQBYV9cjgnunFIg==}
engines: {node: '>=18'}
@@ -4493,7 +4496,7 @@ snapshots:
'@antfu/install-pkg@1.1.0':
dependencies:
package-manager-detector: 1.3.0
- tinyexec: 1.0.2
+ tinyexec: 1.0.1
'@antfu/utils@9.3.0': {}
@@ -4541,7 +4544,7 @@ snapshots:
'@babel/parser': 7.28.5
'@babel/types': 7.28.5
'@jridgewell/gen-mapping': 0.3.12
- '@jridgewell/trace-mapping': 0.3.31
+ '@jridgewell/trace-mapping': 0.3.29
jsesc: 3.1.0
'@babel/helper-string-parser@7.27.1': {}
@@ -4853,7 +4856,7 @@ snapshots:
'@jridgewell/gen-mapping@0.3.12':
dependencies:
'@jridgewell/sourcemap-codec': 1.5.5
- '@jridgewell/trace-mapping': 0.3.31
+ '@jridgewell/trace-mapping': 0.3.29
'@jridgewell/remapping@2.3.5':
dependencies:
@@ -8897,6 +8900,8 @@ snapshots:
tinybench@2.9.0: {}
+ tinyexec@1.0.1: {}
+
tinyexec@1.0.2: {}
tinyglobby@0.2.15:
diff --git a/src/app/service/agent/core/providers/openai.test.ts b/src/app/service/agent/core/providers/openai.test.ts
index 9c853c772..a62b9fdf6 100644
--- a/src/app/service/agent/core/providers/openai.test.ts
+++ b/src/app/service/agent/core/providers/openai.test.ts
@@ -426,6 +426,96 @@ describe("parseOpenAIStream", () => {
}
});
+ it("应解析单个 chunk 内的 ... 标签", async () => {
+ const reader = createMockReader([
+ 'data: {"choices":[{"delta":{"content":"beforereasoningafter"}}]}\n\n',
+ "data: [DONE]\n\n",
+ ]);
+
+ const events: ChatStreamEvent[] = [];
+ const controller = new AbortController();
+
+ await parseOpenAIStream(reader, (e) => events.push(e), controller.signal);
+
+ expect(events).toEqual([
+ { type: "content_delta", delta: "before" },
+ { type: "thinking_delta", delta: "reasoning" },
+ { type: "content_delta", delta: "after" },
+ { type: "done" },
+ ]);
+ });
+
+ it("应处理 标签被 SSE chunk 拆开的情况", async () => {
+ // 标签跨 chunk:chunk1 以 "| " 开头
+ const reader = createMockReader([
+ 'data: {"choices":[{"delta":{"content":"before | thoughtafter"}}]}\n\n',
+ "data: [DONE]\n\n",
+ ]);
+
+ const events: ChatStreamEvent[] = [];
+ const controller = new AbortController();
+
+ await parseOpenAIStream(reader, (e) => events.push(e), controller.signal);
+
+ // 拼接所有 content_delta 与 thinking_delta 以验证内容未泄露标签片段
+ const contentParts = events.filter((e) => e.type === "content_delta").map((e: any) => e.delta);
+ const thinkingParts = events.filter((e) => e.type === "thinking_delta").map((e: any) => e.delta);
+ expect(contentParts.join("")).toBe("beforeafter");
+ expect(thinkingParts.join("")).toBe("thought");
+ });
+
+ it("应处理 标签被 SSE chunk 拆开的情况", async () => {
+ // 结束标签跨 chunk:chunk1 末尾是 ""
+ const reader = createMockReader([
+ 'data: {"choices":[{"delta":{"content":"thinkingnormal"}}]}\n\n',
+ "data: [DONE]\n\n",
+ ]);
+
+ const events: ChatStreamEvent[] = [];
+ const controller = new AbortController();
+
+ await parseOpenAIStream(reader, (e) => events.push(e), controller.signal);
+
+ const contentParts = events.filter((e) => e.type === "content_delta").map((e: any) => e.delta);
+ const thinkingParts = events.filter((e) => e.type === "thinking_delta").map((e: any) => e.delta);
+ expect(contentParts.join("")).toBe("normal");
+ expect(thinkingParts.join("")).toBe("thinking");
+ });
+
+ it("应处理 标签逐字符跨 chunk 到达", async () => {
+ // 每个字符独立到达,模拟 token 级别拆分
+ const chunks = "beforereasoningafter"
+ .split("")
+ .map((ch) => `data: {"choices":[{"delta":{"content":${JSON.stringify(ch)}}}]}\n\n`);
+ chunks.push("data: [DONE]\n\n");
+ const reader = createMockReader(chunks);
+
+ const events: ChatStreamEvent[] = [];
+ const controller = new AbortController();
+
+ await parseOpenAIStream(reader, (e) => events.push(e), controller.signal);
+
+ const contentParts = events.filter((e) => e.type === "content_delta").map((e: any) => e.delta);
+ const thinkingParts = events.filter((e) => e.type === "thinking_delta").map((e: any) => e.delta);
+ expect(contentParts.join("")).toBe("beforeafter");
+ expect(thinkingParts.join("")).toBe("reasoning");
+ });
+
+ it("流结束时仍停留在标签残片则原样作为 content 输出", async () => {
+ // 看起来像 的残片,但后续再也没有到达 -> 按内容输出
+ const reader = createMockReader(['data: {"choices":[{"delta":{"content":"hello | events.push(e), controller.signal);
+
+ const contentParts = events.filter((e) => e.type === "content_delta").map((e: any) => e.delta);
+ expect(contentParts.join("")).toBe("hello | {
const reader = createMockReader([
'data: {"choices":[{"delta":{"role":"assistant","content":null,"reasoning_content":"分析页面"}}]}\n\n',
diff --git a/src/app/service/agent/core/providers/openai.ts b/src/app/service/agent/core/providers/openai.ts
index a5efb95fd..d1004eeff 100644
--- a/src/app/service/agent/core/providers/openai.ts
+++ b/src/app/service/agent/core/providers/openai.ts
@@ -126,6 +126,17 @@ export function buildOpenAIRequest(
};
}
+// 返回 input 末尾与 tag 前缀匹配的最长长度(用于跨 chunk 缓存被拆开的标签残片)
+function longestTagPrefixSuffix(input: string, tag: string): number {
+ const max = Math.min(input.length, tag.length - 1);
+ for (let i = max; i > 0; i--) {
+ if (input.endsWith(tag.slice(0, i))) {
+ return i;
+ }
+ }
+ return 0;
+}
+
// 解析 OpenAI SSE 流,生成 ChatStreamEvent
export function parseOpenAIStream(
reader: ReadableStreamDefaultReader,
@@ -139,11 +150,28 @@ export function parseOpenAIStream(
// 标记是否已通过 [DONE] 信号发出了 done 事件,避免 .then() 再次发出
let doneSent = false;
+ // 跨 chunk 追踪 ... 块状态(用于把思考混在 content 里的模型)
+ let inThinkBlock = false;
+ // 跨 chunk 保留可能属于标签前缀的残片(例如 chunk 末尾 "| ")
+ let thinkTagCarry = "";
+
+ // 流结束时将未匹配到完整标签的残片原样输出,避免丢内容
+ const flushThinkCarry = () => {
+ if (thinkTagCarry.length > 0) {
+ onEvent({
+ type: inThinkBlock ? "thinking_delta" : "content_delta",
+ delta: thinkTagCarry,
+ });
+ thinkTagCarry = "";
+ }
+ };
+
return readSSEStream(
reader,
signal,
(sseEvent) => {
if (sseEvent.data === "[DONE]") {
+ flushThinkCarry();
doneSent = true;
onEvent({ type: "done", usage: lastUsage });
return true;
@@ -196,7 +224,39 @@ export function parseOpenAIStream(
}
}
} else {
- onEvent({ type: "content_delta", delta: delta.content });
+ // 处理 ... 内联标签(reasoning 模型)
+ // 思考内容路由为 thinking_delta,避免裸露标签出现在对话里
+ // 标签可能被 SSE chunk 拆开(如 " | "),用 carry 保留末尾可能的标签前缀
+ let remaining: string = thinkTagCarry + delta.content;
+ thinkTagCarry = "";
+
+ while (remaining.length > 0) {
+ const tag = inThinkBlock ? "" : "";
+ const idx = remaining.indexOf(tag);
+ if (idx === -1) {
+ // 未找到完整标签,保留末尾可能匹配标签前缀的残片
+ const carryLen = longestTagPrefixSuffix(remaining, tag);
+ const emittable = remaining.slice(0, remaining.length - carryLen);
+ if (emittable.length > 0) {
+ onEvent({
+ type: inThinkBlock ? "thinking_delta" : "content_delta",
+ delta: emittable,
+ });
+ }
+ thinkTagCarry = remaining.slice(remaining.length - carryLen);
+ remaining = "";
+ } else {
+ // 找到标签:标签前的部分按当前状态输出,之后切换状态
+ if (idx > 0) {
+ onEvent({
+ type: inThinkBlock ? "thinking_delta" : "content_delta",
+ delta: remaining.slice(0, idx),
+ });
+ }
+ inThinkBlock = !inThinkBlock;
+ remaining = remaining.slice(idx + tag.length);
+ }
+ }
}
}
@@ -245,6 +305,7 @@ export function parseOpenAIStream(
).then(() => {
// 流正常结束但没收到 [DONE](某些 API 可能如此)
if (!signal.aborted && !doneSent) {
+ flushThinkCarry();
onEvent({ type: "done", usage: lastUsage });
}
});
diff --git a/src/pages/options/routes/AgentProvider.tsx b/src/pages/options/routes/AgentProvider.tsx
index ac9eacc46..dedb93dcb 100644
--- a/src/pages/options/routes/AgentProvider.tsx
+++ b/src/pages/options/routes/AgentProvider.tsx
@@ -377,6 +377,9 @@ function AgentProvider() {
let chatUrl: string;
let body: string;
+ const systemMessage = "Reply in one brief sentence only. No thinking or reasoning.";
+ const userMessage = "Greet the user warmly in a short, concise sentence.";
+
if (editingModel.provider === "anthropic") {
chatUrl = `${baseUrl}/v1/messages`;
headers["x-api-key"] = editingModel.apiKey;
@@ -385,7 +388,9 @@ function AgentProvider() {
body = JSON.stringify({
model: editingModel.model || "claude-sonnet-4-20250514",
max_tokens: 256,
- messages: [{ role: "user", content: "hi" }],
+ system: systemMessage,
+ messages: [{ role: "user", content: userMessage }],
+ stream: false,
});
} else {
chatUrl = `${baseUrl}/chat/completions`;
@@ -396,7 +401,11 @@ function AgentProvider() {
body = JSON.stringify({
model: editingModel.model || defaultModel,
max_tokens: 256,
- messages: [{ role: "user", content: "hi" }],
+ messages: [
+ { role: "system", content: systemMessage },
+ { role: "user", content: userMessage },
+ ],
+ stream: false,
});
}
| | |