Skip to content

Commit 2f41c23

Browse files
authored
fix: chat issue (opentiny#1770)
1 parent a6add41 commit 2f41c23

7 files changed

Lines changed: 44 additions & 16 deletions

File tree

packages/plugins/robot/src/Main.vue

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@
1818
v-model:fullscreen="fullscreen"
1919
v-model:show="robotVisible"
2020
v-model:input="inputMessage"
21-
:status="messageState.status"
21+
:status="chatStatus"
2222
:prompt-items="promptItems"
2323
:bubble-renderers="bubbleRenderers"
2424
:allowFiles="isVisualModel && robotSettingState.chatMode === ChatMode.Agent"
@@ -147,9 +147,9 @@ const showTeleport = ref(false)
147147
const showSetting = ref(false)
148148
149149
const {
150+
chatStatus,
150151
inputMessage,
151152
messages,
152-
messageState,
153153
changeChatMode,
154154
abortRequest,
155155
initChatClient,

packages/plugins/robot/src/components/chat/RobotChat.vue

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -36,8 +36,7 @@
3636
:placeholder="GeneratingStatus.includes(props.status) ? '正在思考中...' : '请输入您的问题'"
3737
:clearable="true"
3838
:loading="GeneratingStatus.includes(props.status)"
39-
:showWordLimit="true"
40-
:maxLength="4000"
39+
:showWordLimit="false"
4140
@submit="handleSendMessage"
4241
@cancel="handleAbortRequest"
4342
:allowFiles="selectedAttachments.length < 1 && props.allowFiles"

packages/plugins/robot/src/composables/core/useConversation.ts

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,6 +11,7 @@ export interface ConversationAdapterOptions {
1111
statusManager: {
1212
isProcessing: () => boolean
1313
setProcessing: () => void
14+
resetProcessing: () => void
1415
}
1516
}
1617

@@ -43,7 +44,8 @@ export function useConversationAdapter(options: ConversationAdapterOptions) {
4344
const contextMessages = toRaw(messages.value.slice(0, -1))
4445
await onFinishRequest(finishReason ?? 'unknown', messages.value, contextMessages, messageState)
4546
const lastMessage = messages.value.at(-1)
46-
if (lastMessage) {
47+
if (lastMessage && finishReason === 'stop' && !lastMessage.tool_calls && statusManager.isProcessing()) {
48+
statusManager.resetProcessing()
4749
await onMessageProcessed(finishReason ?? 'unknown', lastMessage.content ?? '', messages.value, {})
4850
}
4951
}

packages/plugins/robot/src/composables/core/useMessageStream.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -29,6 +29,7 @@ const handleDeltaReasoning = (choice: ChatCompletionStreamResponseChoice, lastMe
2929
})
3030
}
3131
lastMessage.renderContent.at(-1)!.content += choice.delta.reasoning_content
32+
lastMessage.reasoning_content = (lastMessage.reasoning_content || '') + choice.delta.reasoning_content
3233
}
3334
}
3435

packages/plugins/robot/src/composables/features/useToolCalls.ts

Lines changed: 8 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -74,14 +74,19 @@ export interface ToolCallHandlerConfig {
7474
onDone: (finishReason: string, messages: any[], contextMessages: any[], messageState: any) => Promise<void>
7575
}
7676
getMessageState: () => any
77+
statusManager?: {
78+
isProcessing: () => boolean
79+
setProcessing: () => void
80+
resetProcessing: () => void
81+
}
7782
}
7883

7984
/**
8085
* 创建工具调用处理器
8186
* 使用工厂函数模式,将所有依赖通过配置注入
8287
*/
8388
export function createToolCallHandler(config: ToolCallHandlerConfig) {
84-
const { client, getAbortController, formatMessages, hooks, streamHandlers, getMessageState } = config
89+
const { client, getAbortController, formatMessages, hooks, streamHandlers, getMessageState, statusManager } = config
8590

8691
return async (tool_calls: ResponseToolCall[], messages: any[], contextMessages: RobotMessage[]) => {
8792
const hasToolCall = tool_calls?.length > 0
@@ -118,6 +123,8 @@ export function createToolCallHandler(config: ToolCallHandlerConfig) {
118123

119124
delete currentMessage.tool_calls
120125

126+
statusManager?.setProcessing()
127+
121128
// 使用工具调用结果继续对话
122129
await client.chatStream(
123130
{ messages: toolMessages as any, options: { signal: abortController.signal } },

packages/plugins/robot/src/composables/useChat.ts

Lines changed: 27 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
import { nextTick } from 'vue'
1+
import { nextTick, ref } from 'vue'
22
import { GeneratingStatus, STATUS, type ChatMessage, type MessageState } from '@opentiny/tiny-robot-kit'
33
import { formatMessages, removeLoading } from '../utils'
44
import { getClientConfig as getConfig, updateClientConfig as updateConfig, client } from '../services/aiClient'
@@ -39,7 +39,7 @@ enum CHAT_STATUS {
3939
FINISHED = 'finished' // 本轮对话结束
4040
}
4141

42-
let chatStatus: CHAT_STATUS = CHAT_STATUS.PROCESSING
42+
const chatStatus = ref<CHAT_STATUS>(CHAT_STATUS.FINISHED)
4343

4444
const abortControllerMap: Record<string, AbortController> = {}
4545

@@ -52,9 +52,9 @@ const handleStreamData = createStreamDataHandler({
5252
onStreamTools
5353
},
5454
statusManager: {
55-
isStreaming: () => chatStatus === CHAT_STATUS.STREAMING,
55+
isStreaming: () => chatStatus.value === CHAT_STATUS.STREAMING,
5656
setStreaming: () => {
57-
chatStatus = CHAT_STATUS.STREAMING
57+
chatStatus.value = CHAT_STATUS.STREAMING
5858
}
5959
}
6060
})
@@ -110,11 +110,15 @@ const handleFinishRequest = async (
110110

111111
if (finishReason === 'aborted' || messageState?.status === STATUS.ABORTED) {
112112
messageState.status = STATUS.ABORTED
113+
} else if (finishReason === 'stop' && !lastMessage.tool_calls) {
114+
messageState.status = STATUS.FINISHED
115+
chatStatus.value = CHAT_STATUS.FINISHED
116+
await onMessageProcessed(finishReason, lastMessage.content ?? '', messages.value, {})
113117
}
114118
}
115119

116120
const handleRequestError = async (error: Error, messages: ChatMessage[], messageState: MessageState) => {
117-
chatStatus = CHAT_STATUS.FINISHED
121+
chatStatus.value = CHAT_STATUS.FINISHED
118122
delete abortControllerMap.main
119123
await onRequestEnd('error', messages.at(-1).content, messages, { error }) // 本次请求结束
120124
messageState.status = STATUS.ERROR
@@ -140,12 +144,15 @@ const {
140144
if (GeneratingStatus.includes(messageManager.messageState.status)) {
141145
messageManager.messageState.status = STATUS.FINISHED
142146
}
143-
chatStatus = CHAT_STATUS.FINISHED
147+
chatStatus.value = CHAT_STATUS.FINISHED
144148
},
145149
statusManager: {
146-
isProcessing: () => chatStatus === CHAT_STATUS.PROCESSING,
150+
isProcessing: () => chatStatus.value === CHAT_STATUS.PROCESSING,
147151
setProcessing: () => {
148-
chatStatus = CHAT_STATUS.PROCESSING
152+
chatStatus.value = CHAT_STATUS.PROCESSING
153+
},
154+
resetProcessing: () => {
155+
chatStatus.value = CHAT_STATUS.FINISHED
149156
}
150157
}
151158
})
@@ -168,7 +175,16 @@ const handleToolCall = createToolCallHandler({
168175
onError: handleRequestError,
169176
onDone: handleFinishRequest
170177
},
171-
getMessageState: () => messageManager.messageState
178+
getMessageState: () => messageManager.messageState,
179+
statusManager: {
180+
isProcessing: () => chatStatus.value === CHAT_STATUS.PROCESSING,
181+
setProcessing: () => {
182+
chatStatus.value = CHAT_STATUS.PROCESSING
183+
},
184+
resetProcessing: () => {
185+
chatStatus.value = CHAT_STATUS.FINISHED
186+
}
187+
}
172188
})
173189

174190
// 包装 conversation 方法,添加业务特定逻辑
@@ -235,6 +251,7 @@ const abortRequest = () => {
235251
for (const key of Object.keys(abortControllerMap)) {
236252
delete abortControllerMap[key]
237253
}
254+
chatStatus.value = CHAT_STATUS.FINISHED
238255

239256
onRequestEnd('aborted', messageManager.messages.value.at(-1)?.content as string, messageManager.messages.value)
240257
}
@@ -254,6 +271,7 @@ const changeChatMode = (chatMode: string) => {
254271

255272
export default function () {
256273
return {
274+
chatStatus,
257275
initChatClient,
258276
updateConfig,
259277
...messageManager,

packages/plugins/robot/src/utils/chat.utils.ts

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,8 @@ export const formatMessages = (messages: LLMMessage[]) => {
1111
role: message.role,
1212
content: message.content,
1313
...(message.tool_calls ? { tool_calls: message.tool_calls } : {}),
14-
...(message.tool_call_id ? { tool_call_id: message.tool_call_id } : {})
14+
...(message.tool_call_id ? { tool_call_id: message.tool_call_id } : {}),
15+
...(message.reasoning_content ? { reasoning_content: message.reasoning_content } : {})
1516
}))
1617
}
1718

0 commit comments

Comments
 (0)