Skip to content

Commit 3d8a3f7

Browse files
committed
feat: implement StreamingThrottler for optimized content updates in useAIService
1 parent f2db754 commit 3d8a3f7

1 file changed

Lines changed: 99 additions & 3 deletions

File tree

src/renderer/src/components/pages/chat/hooks/useAIService.ts

Lines changed: 99 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
import { useState, useCallback, useRef } from 'react'
1+
import { useState, useCallback, useRef, useEffect } from 'react'
22
import { App } from 'antd'
33
import { v4 as uuidv4 } from 'uuid'
44
import { useSettingsStore } from '../../../../stores/settingsStore'
@@ -8,6 +8,87 @@ import { usePagesStore } from '../../../../stores/pagesStore'
88
import { ChatMessage, LLMConfig, AITask } from '../../../../types/type'
99
import { createAIService } from '../../../../services/aiService'
1010

11+
// 流式更新节流器 - 使用 requestAnimationFrame 限制更新频率
12+
class StreamingThrottler {
13+
private pendingContent: Map<string, string> = new Map()
14+
private pendingReasoning: Map<string, string> = new Map()
15+
private rafId: number | null = null
16+
private updateContentFn: ((chatId: string, messageId: string, content: string) => void) | null =
17+
null
18+
private updateReasoningFn:
19+
| ((chatId: string, messageId: string, reasoning: string) => void)
20+
| null = null
21+
22+
setUpdateFunctions(
23+
contentFn: (chatId: string, messageId: string, content: string) => void,
24+
reasoningFn: (chatId: string, messageId: string, reasoning: string) => void
25+
) {
26+
this.updateContentFn = contentFn
27+
this.updateReasoningFn = reasoningFn
28+
}
29+
30+
scheduleContentUpdate(chatId: string, messageId: string, content: string) {
31+
const key = `${chatId}:${messageId}`
32+
this.pendingContent.set(key, content)
33+
this.scheduleFlush()
34+
}
35+
36+
scheduleReasoningUpdate(chatId: string, messageId: string, reasoning: string) {
37+
const key = `${chatId}:${messageId}`
38+
this.pendingReasoning.set(key, reasoning)
39+
this.scheduleFlush()
40+
}
41+
42+
private scheduleFlush() {
43+
if (this.rafId !== null) return
44+
this.rafId = requestAnimationFrame(() => {
45+
this.flush()
46+
this.rafId = null
47+
})
48+
}
49+
50+
private flush() {
51+
// 批量更新所有待处理的内容
52+
if (this.updateContentFn) {
53+
this.pendingContent.forEach((content, key) => {
54+
const [chatId, messageId] = key.split(':')
55+
this.updateContentFn!(chatId, messageId, content)
56+
})
57+
}
58+
this.pendingContent.clear()
59+
60+
// 批量更新所有待处理的推理内容
61+
if (this.updateReasoningFn) {
62+
this.pendingReasoning.forEach((reasoning, key) => {
63+
const [chatId, messageId] = key.split(':')
64+
this.updateReasoningFn!(chatId, messageId, reasoning)
65+
})
66+
}
67+
this.pendingReasoning.clear()
68+
}
69+
70+
// 强制立即刷新(用于完成时确保内容同步)
71+
forceFlush() {
72+
if (this.rafId !== null) {
73+
cancelAnimationFrame(this.rafId)
74+
this.rafId = null
75+
}
76+
this.flush()
77+
}
78+
79+
cleanup() {
80+
if (this.rafId !== null) {
81+
cancelAnimationFrame(this.rafId)
82+
this.rafId = null
83+
}
84+
this.pendingContent.clear()
85+
this.pendingReasoning.clear()
86+
}
87+
}
88+
89+
// 全局节流器实例
90+
const streamingThrottler = new StreamingThrottler()
91+
1192
export interface UseAIServiceReturn {
1293
isLoading: boolean
1394
activeAIServices: Map<string, any>
@@ -38,6 +119,17 @@ export function useAIService(chatId: string): UseAIServiceReturn {
38119
const [activeAIServices, setActiveAIServices] = useState<Map<string, any>>(new Map())
39120
const { message } = App.useApp()
40121

122+
// 初始化节流器的更新函数
123+
useEffect(() => {
124+
streamingThrottler.setUpdateFunctions(
125+
updateStreamingMessageContent,
126+
updateStreamingMessageReasoning
127+
)
128+
return () => {
129+
streamingThrottler.cleanup()
130+
}
131+
}, [updateStreamingMessageContent, updateStreamingMessageReasoning])
132+
41133
const getLLMConfig = useCallback(
42134
(modelId?: string): LLMConfig | null => {
43135
const targetModelId = modelId || settings.defaultLLMId
@@ -114,13 +206,17 @@ export function useAIService(chatId: string): UseAIServiceReturn {
114206
aiService.sendMessage(messages, {
115207
onChunk: (chunk: string) => {
116208
streamingContent += chunk
117-
updateStreamingMessageContent(chatId, messageId, streamingContent)
209+
// 使用节流器批量更新,避免高频状态更新导致界面卡顿
210+
streamingThrottler.scheduleContentUpdate(chatId, messageId, streamingContent)
118211
},
119212
onReasoning: (reasoning_content: string) => {
120213
streamingReasoning += reasoning_content
121-
updateStreamingMessageReasoning(chatId, messageId, streamingReasoning)
214+
// 使用节流器批量更新
215+
streamingThrottler.scheduleReasoningUpdate(chatId, messageId, streamingReasoning)
122216
},
123217
onComplete: async (fullResponse: string, reasoning_content?: string) => {
218+
// 完成前强制刷新,确保所有内容都已更新到 UI
219+
streamingThrottler.forceFlush()
124220
const finalContent = fullResponse || streamingContent
125221
const finalReasoning = reasoning_content || streamingReasoning || undefined
126222

0 commit comments

Comments
 (0)