Skip to content

Commit 4c5dc71

Browse files
committed
refactor: update AIRequest structure and streamline IPC handler setup
1 parent 1923b8c commit 4c5dc71

6 files changed

Lines changed: 22 additions & 129 deletions

File tree

src/main/aiHandler.ts

Lines changed: 17 additions & 95 deletions
Original file line numberDiff line numberDiff line change
@@ -20,10 +20,9 @@ export interface ChatMessage {
2020

2121
export interface AIRequest {
2222
requestId: string
23-
config: LLMConfig
23+
llmConfig: LLMConfig
2424
modelConfig: ModelConfig
2525
messages: ChatMessage[]
26-
streaming?: boolean
2726
}
2827

2928
export interface AIStreamChunk {
@@ -38,20 +37,7 @@ class AIHandler {
3837
// 使用 Map 管理多个并行请求的 AbortController
3938
private abortControllers = new Map<string, AbortController>()
4039

41-
constructor() {
42-
this.setupHandlers()
43-
}
44-
45-
private setupHandlers() {
46-
ipcMain.handle('ai:send-message', (event, request: AIRequest) => this.sendMessage(request))
47-
ipcMain.handle('ai:send-message-streaming', (event, request: AIRequest) =>
48-
this.sendMessageStreaming(event, request)
49-
)
50-
ipcMain.handle('ai:stop-streaming', (event, requestId: string) => this.stopStreaming(requestId))
51-
ipcMain.handle('ai:test-connection', (event, config: LLMConfig) => this.testConnection(config))
52-
}
53-
54-
private async sendMessageStreaming(
40+
public async sendMessageStreaming(
5541
event: Electron.IpcMainInvokeEvent,
5642
request: AIRequest
5743
): Promise<void> {
@@ -87,19 +73,19 @@ class AIHandler {
8773
}
8874

8975
const response = await fetch(
90-
`${request.config.apiHost.replace(/\/$/, '')}/chat/completions`,
76+
`${request.llmConfig.apiHost.replace(/\/$/, '')}/chat/completions`,
9177
{
9278
method: 'POST',
9379
headers: {
9480
'Content-Type': 'application/json',
95-
Authorization: `Bearer ${request.config.apiKey}`
81+
Authorization: `Bearer ${request.llmConfig.apiKey}`
9682
},
9783
body: JSON.stringify({
98-
model: request.config.modelName,
84+
model: request.llmConfig.modelName,
9985
messages: apiMessages,
100-
stream: true,
10186
temperature: modelConfig.temperature,
102-
top_p: modelConfig.topP
87+
top_p: modelConfig.topP,
88+
stream: true
10389
}),
10490
signal: abortController.signal
10591
}
@@ -231,87 +217,15 @@ class AIHandler {
231217
}
232218
}
233219

234-
private async stopStreaming(requestId: string): Promise<void> {
220+
public async stopStreaming(requestId: string): Promise<void> {
235221
const abortController = this.abortControllers.get(requestId)
236222
if (abortController) {
237223
abortController.abort()
238224
this.abortControllers.delete(requestId)
239225
}
240226
}
241227

242-
private async sendMessage(
243-
request: AIRequest
244-
): Promise<{ success: boolean; content?: string; reasoning_content?: string; error?: string }> {
245-
try {
246-
// 准备消息数组,如果有systemPrompt,插入system消息
247-
const apiMessages = request.messages.map((msg) => ({
248-
role: msg.role,
249-
content: msg.content
250-
}))
251-
252-
let modelConfig = request.modelConfig
253-
if (!request.modelConfig) {
254-
modelConfig = {
255-
systemPrompt: '',
256-
topP: 1,
257-
temperature: 1
258-
}
259-
}
260-
261-
// 如果有systemPrompt且第一条消息不是system消息,则插入system消息
262-
if (
263-
modelConfig.systemPrompt &&
264-
(apiMessages.length === 0 || apiMessages[0].role !== 'system')
265-
) {
266-
apiMessages.unshift({
267-
role: 'system',
268-
content: modelConfig.systemPrompt
269-
})
270-
}
271-
272-
const response = await fetch(
273-
`${request.config.apiHost.replace(/\/$/, '')}/chat/completions`,
274-
{
275-
method: 'POST',
276-
headers: {
277-
'Content-Type': 'application/json',
278-
Authorization: `Bearer ${request.config.apiKey}`
279-
},
280-
body: JSON.stringify({
281-
model: request.config.modelName,
282-
messages: apiMessages,
283-
temperature: modelConfig.temperature,
284-
top_p: modelConfig.topP
285-
})
286-
}
287-
)
288-
289-
if (!response.ok) {
290-
return {
291-
success: false,
292-
error: `HTTP error! status: ${response.status}`
293-
}
294-
}
295-
296-
const data = await response.json()
297-
const content = data.choices?.[0]?.message?.content || ''
298-
const reasoning_content =
299-
data.choices?.[0]?.message?.reasoning_content || data.choices?.[0]?.message?.reasoning
300-
301-
return {
302-
success: true,
303-
content,
304-
reasoning_content
305-
}
306-
} catch (error) {
307-
return {
308-
success: false,
309-
error: error instanceof Error ? error.message : 'Unknown error'
310-
}
311-
}
312-
}
313-
314-
private async testConnection(config: LLMConfig): Promise<{ success: boolean; error?: string }> {
228+
public async testConnection(config: LLMConfig): Promise<{ success: boolean; error?: string }> {
315229
try {
316230
const response = await fetch(`${config.apiHost.replace(/\/$/, '')}/models`, {
317231
method: 'GET',
@@ -338,3 +252,11 @@ class AIHandler {
338252
}
339253

340254
export const aiHandler = new AIHandler()
255+
256+
export function setupAIHandlers() {
257+
ipcMain.handle('ai:send-message-streaming', (event, request: AIRequest) =>
258+
aiHandler.sendMessageStreaming(event, request)
259+
)
260+
ipcMain.handle('ai:stop-streaming', (event, requestId: string) => aiHandler.stopStreaming(requestId))
261+
ipcMain.handle('ai:test-connection', (event, config: LLMConfig) => aiHandler.testConnection(config))
262+
}

src/main/autoUpdater.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ import { is } from '@electron-toolkit/utils'
44
import { join } from 'path'
55

66
export function setupAutoUpdater(): void {
7-
// 设置自动下载为false,我们想手动控制下载过程
7+
// 设置自动下载为false,手动控制下载过程
88
autoUpdater.autoDownload = false
99
autoUpdater.autoInstallOnAppQuit = true
1010

src/main/index.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
import { app, BrowserWindow } from 'electron'
22
import { electronApp, optimizer } from '@electron-toolkit/utils'
3-
import { aiHandler } from './aiHandler'
3+
import { setupAIHandlers } from './aiHandler'
44
import { setupAutoUpdater } from './autoUpdater'
55
import { setupIpcHandlers } from './ipcHandlers'
66
import { createWindow } from './window'
@@ -20,7 +20,7 @@ app.whenReady().then(() => {
2020
})
2121

2222
// Initialize AI handler
23-
aiHandler
23+
setupAIHandlers()
2424

2525
// Setup auto updater
2626
setupAutoUpdater()

src/preload/index.d.ts

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -6,9 +6,8 @@ declare global {
66
api: {
77
ai: {
88
sendMessageStreaming: (request: any) => Promise<any>
9-
sendMessage: (request: any) => Promise<any>
10-
testConnection: (config: any) => Promise<any>
119
stopStreaming: (requestId: string) => Promise<void>
10+
testConnection: (config: any) => Promise<any>
1211
onStreamData: (requestId: string, callback: (data: any) => void) => void
1312
removeStreamListener: (requestId: string) => void
1413
}

src/preload/index.ts

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,6 @@ const streamListeners = new Map<string, (data: any) => void>()
66

77
// Custom APIs for renderer
88
const api = {
9-
// AI相关API
109
ai: {
1110
sendMessageStreaming: (request: any) =>
1211
ipcRenderer.invoke('ai:send-message-streaming', request),

src/renderer/src/services/aiService.ts

Lines changed: 1 addition & 28 deletions
Original file line numberDiff line numberDiff line change
@@ -86,7 +86,7 @@ export class AIService {
8686
// 发送流式请求,包含请求ID
8787
await window.api.ai.sendMessageStreaming({
8888
requestId: this.requestId,
89-
config: this.llmConfig,
89+
llmConfig: this.llmConfig,
9090
modelConfig: this.modelConfig,
9191
messages: messages
9292
})
@@ -102,33 +102,6 @@ export class AIService {
102102
window.api.ai.removeStreamListener(this.requestId)
103103
}
104104

105-
// 备用的非流式方法
106-
async sendMessageNonStreaming(
107-
messages: ChatMessage[]
108-
): Promise<{ content: string; reasoning_content?: string }> {
109-
try {
110-
const result = await window.api.ai.sendMessage({
111-
requestId: this.requestId,
112-
config: this.llmConfig,
113-
modelConfig: this.modelConfig,
114-
messages: messages
115-
})
116-
117-
if (result.success) {
118-
return {
119-
content: result.content || '抱歉,我无法生成回复。',
120-
reasoning_content: result.reasoning_content
121-
}
122-
} else {
123-
throw new Error(result.error || 'Unknown error')
124-
}
125-
} catch (error) {
126-
console.error('AI Service Error:', error)
127-
throw error
128-
}
129-
}
130-
131-
// 测试连接
132105
async testConnection(): Promise<boolean> {
133106
try {
134107
const result = await window.api.ai.testConnection(this.llmConfig)

0 commit comments

Comments
 (0)