Skip to content

Commit 5ff0cb0

Browse files
authored
refactor: remove obsolete support and fix response (#1382)
* refactor: remove obsolete MCP UI support * fix(openai): serialize assistant history * chore: update openai deps
1 parent a97e477 commit 5ff0cb0

File tree

27 files changed

+186
-470
lines changed

27 files changed

+186
-470
lines changed

docs/specs/new-agent/v2-spec.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,7 @@
33
## Status: Complete (superseded by v3 refactor)
44

55
> The v2 implementation has been refactored into the v3 module structure. See `v3-spec.md` for the current architecture. This spec is retained for historical context on design decisions.
6+
> Note: the historical MCP UI resource exploration mentioned below is no longer supported in the current codebase.
67
78
## Overview
89

@@ -22,7 +23,6 @@ v0 proved single-turn chat, v1 added multi-turn context assembly. The LLM curren
2223
- Question tool — halting the loop for user input
2324
- ACP agent tool routing — ACP handles tools internally
2425
- Search result extraction from tool responses
25-
- MCP UI resources extraction
2626
- Tool system prompt injection (`ToolPresenter.buildToolSystemPrompt`)
2727

2828
## Data Model

docs/specs/tool-output-guardrails/spec.md

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@
2222
## 非目标
2323

2424
- 不改动或替换 `agentPresenter/tool` 下的 `ToolRegistry`/`toolRouter`.
25-
- 不改变 MCP UI 资源与搜索结果的解析逻辑.
25+
- 不改变搜索结果的解析逻辑.
2626
- 不改 legacy `AgentPresenter` 链路, 本次仅覆盖新 session agent.
2727

2828
## 用户故事
@@ -60,7 +60,7 @@
6060
- 完整文件的绝对路径
6161
- 模型可以通过文件类工具读取上述路径.
6262
- 文件类读取工具仅放行当前会话 `conversationId` 对应目录.
63-
- `tool_call_response_raw` 不被改写, 避免影响 MCP UI/搜索结果处理.
63+
- `tool_call_response_raw` 不被改写, 避免影响搜索结果处理.
6464

6565
### 同轮批量尾部降级
6666

package.json

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -94,7 +94,7 @@
9494
"nanoid": "^5.1.6",
9595
"node-pty": "^1.1.0",
9696
"ollama": "^0.5.18",
97-
"openai": "^5.23.2",
97+
"openai": "^6.32.0",
9898
"pdf-parse-new": "^1.4.1",
9999
"run-applescript": "^7.1.0",
100100
"safe-regex2": "^5.0.0",
@@ -114,7 +114,6 @@
114114
"@iconify-json/vscode-icons": "^1.2.37",
115115
"@iconify/vue": "^5.0.0",
116116
"@lingual/i18n-check": "0.8.12",
117-
"@mcp-ui/client": "^5.13.3",
118117
"@pinia/colada": "^0.20.0",
119118
"@tailwindcss/typography": "^0.5.19",
120119
"@tailwindcss/vite": "^4.1.18",

src/main/presenter/exporter/formats/conversationExporter.ts

Lines changed: 0 additions & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -228,16 +228,6 @@ function exportToMarkdown(conversation: CONVERSATION, messages: Message[]): stri
228228
}
229229
lines.push('')
230230
break
231-
case 'mcp_ui_resource':
232-
if (block.mcp_ui_resource) {
233-
lines.push('### 🧩 MCP UI 资源')
234-
lines.push('')
235-
lines.push(
236-
`资源: ${block.mcp_ui_resource.uri} (${block.mcp_ui_resource.mimeType ?? ''})`
237-
)
238-
lines.push('')
239-
}
240-
break
241231
case 'image':
242232
lines.push('### 🖼️ 图片')
243233
lines.push('*[图片内容]*')
@@ -458,17 +448,6 @@ function exportToHtml(conversation: CONVERSATION, messages: Message[]): string {
458448
})
459449
)
460450
break
461-
case 'mcp_ui_resource':
462-
if (block.mcp_ui_resource) {
463-
blockLines.push(
464-
...renderTemplate(templates.assistantContent, {
465-
content: formatInlineHtml(
466-
`MCP UI 资源: ${block.mcp_ui_resource.uri} (${block.mcp_ui_resource.mimeType ?? ''})`
467-
)
468-
})
469-
)
470-
}
471-
break
472451
case 'image':
473452
blockLines.push(...renderTemplate(templates.assistantImage))
474453
break
@@ -645,13 +624,6 @@ function exportToText(conversation: CONVERSATION, messages: Message[]): string {
645624
}
646625
lines.push('')
647626
break
648-
case 'mcp_ui_resource':
649-
if (block.mcp_ui_resource) {
650-
lines.push('[MCP UI 资源]')
651-
lines.push(`${block.mcp_ui_resource.uri} (${block.mcp_ui_resource.mimeType ?? ''})`)
652-
lines.push('')
653-
}
654-
break
655627
case 'image':
656628
lines.push('[图片内容]')
657629
lines.push('')

src/main/presenter/exporter/formats/nowledgeMemExporter.ts

Lines changed: 0 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -120,12 +120,6 @@ export function convertDeepChatToNowledgeMemFormat(
120120
content += '[Image Content]\n'
121121
break
122122

123-
case 'mcp_ui_resource':
124-
if (block.mcp_ui_resource) {
125-
content += `[MCP Resource] ${block.mcp_ui_resource.uri}\n`
126-
}
127-
break
128-
129123
case 'error':
130124
if (block.content) {
131125
content += `[Error] ${block.content}\n`

src/main/presenter/llmProviderPresenter/providers/openAIResponsesProvider.ts

Lines changed: 33 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -220,6 +220,20 @@ export class OpenAIResponsesProvider extends BaseLLMProvider {
220220
continue
221221
}
222222

223+
if (msg.role === 'assistant') {
224+
const assistantContent = this.flattenAssistantContent(msg.content)
225+
if (!assistantContent) {
226+
continue
227+
}
228+
229+
// Responses API assistant history does not accept input_text content parts.
230+
result.push({
231+
role: 'assistant',
232+
content: assistantContent
233+
})
234+
continue
235+
}
236+
223237
const content: OpenAI.Responses.ResponseInputMessageContentList = []
224238

225239
if (msg.content !== undefined) {
@@ -256,6 +270,25 @@ export class OpenAIResponsesProvider extends BaseLLMProvider {
256270
return result
257271
}
258272

273+
private flattenAssistantContent(content: ChatMessage['content']): string | null {
274+
if (typeof content === 'string') {
275+
return content.length > 0 ? content : null
276+
}
277+
278+
if (!Array.isArray(content)) {
279+
return null
280+
}
281+
282+
const textContent = content.reduce((result, part) => {
283+
if (part.type !== 'text' || part.text.length === 0) {
284+
return result
285+
}
286+
return `${result}${part.text}`
287+
}, '')
288+
289+
return textContent.length > 0 ? textContent : null
290+
}
291+
259292
// OpenAI完成方法
260293
protected async openAICompletion(
261294
messages: ChatMessage[],

src/renderer/src/components/chat/messageListItems.ts

Lines changed: 0 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -84,7 +84,6 @@ export type DisplayAssistantMessageBlock = {
8484
| 'image'
8585
| 'audio'
8686
| 'artifact-thinking'
87-
| 'mcp_ui_resource'
8887
id?: string
8988
content?: string
9089
extra?: DisplayAssistantMessageExtra
@@ -111,13 +110,6 @@ export type DisplayAssistantMessageBlock = {
111110
| 'application/vnd.ant.react'
112111
language?: string
113112
}
114-
mcp_ui_resource?: {
115-
uri: string
116-
mimeType: 'text/html' | 'text/uri-list' | 'application/vnd.mcp-ui.remote-dom'
117-
text?: string
118-
blob?: string
119-
_meta?: Record<string, unknown>
120-
}
121113
tool_call?: {
122114
id?: string
123115
name?: string

src/renderer/src/components/message/MessageBlockMcpUi.vue

Lines changed: 0 additions & 184 deletions
This file was deleted.

src/renderer/src/components/message/MessageItemAssistant.vue

Lines changed: 0 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -69,12 +69,6 @@
6969
@continue="handleBlockContinue"
7070
@switch-provider="handleBlockSwitchProvider"
7171
/>
72-
<MessageBlockMcpUi
73-
v-else-if="block.type === 'mcp_ui_resource'"
74-
:block="block"
75-
:message-id="currentMessage.id"
76-
:thread-id="currentThreadId"
77-
/>
7872
<MessageBlockAudio
7973
v-else-if="isAudioBlock(block)"
8074
:block="block"
@@ -187,7 +181,6 @@ import MessageBlockAction from './MessageBlockAction.vue'
187181
import { useI18n } from 'vue-i18n'
188182
import MessageBlockImage from './MessageBlockImage.vue'
189183
import MessageBlockAudio from './MessageBlockAudio.vue'
190-
import MessageBlockMcpUi from './MessageBlockMcpUi.vue'
191184
import MessageBlockPlan from './MessageBlockPlan.vue'
192185
193186
import {

0 commit comments

Comments
 (0)