@@ -20,10 +20,9 @@ export interface ChatMessage {
2020
2121export interface AIRequest {
2222 requestId : string
23- config : LLMConfig
23+ llmConfig : LLMConfig
2424 modelConfig : ModelConfig
2525 messages : ChatMessage [ ]
26- streaming ?: boolean
2726}
2827
2928export interface AIStreamChunk {
@@ -38,20 +37,7 @@ class AIHandler {
3837 // 使用 Map 管理多个并行请求的 AbortController
3938 private abortControllers = new Map < string , AbortController > ( )
4039
41- constructor ( ) {
42- this . setupHandlers ( )
43- }
44-
45- private setupHandlers ( ) {
46- ipcMain . handle ( 'ai:send-message' , ( event , request : AIRequest ) => this . sendMessage ( request ) )
47- ipcMain . handle ( 'ai:send-message-streaming' , ( event , request : AIRequest ) =>
48- this . sendMessageStreaming ( event , request )
49- )
50- ipcMain . handle ( 'ai:stop-streaming' , ( event , requestId : string ) => this . stopStreaming ( requestId ) )
51- ipcMain . handle ( 'ai:test-connection' , ( event , config : LLMConfig ) => this . testConnection ( config ) )
52- }
53-
54- private async sendMessageStreaming (
40+ public async sendMessageStreaming (
5541 event : Electron . IpcMainInvokeEvent ,
5642 request : AIRequest
5743 ) : Promise < void > {
@@ -87,19 +73,19 @@ class AIHandler {
8773 }
8874
8975 const response = await fetch (
90- `${ request . config . apiHost . replace ( / \/ $ / , '' ) } /chat/completions` ,
76+ `${ request . llmConfig . apiHost . replace ( / \/ $ / , '' ) } /chat/completions` ,
9177 {
9278 method : 'POST' ,
9379 headers : {
9480 'Content-Type' : 'application/json' ,
95- Authorization : `Bearer ${ request . config . apiKey } `
81+ Authorization : `Bearer ${ request . llmConfig . apiKey } `
9682 } ,
9783 body : JSON . stringify ( {
98- model : request . config . modelName ,
84+ model : request . llmConfig . modelName ,
9985 messages : apiMessages ,
100- stream : true ,
10186 temperature : modelConfig . temperature ,
102- top_p : modelConfig . topP
87+ top_p : modelConfig . topP ,
88+ stream : true
10389 } ) ,
10490 signal : abortController . signal
10591 }
@@ -231,87 +217,15 @@ class AIHandler {
231217 }
232218 }
233219
234- private async stopStreaming ( requestId : string ) : Promise < void > {
220+ public async stopStreaming ( requestId : string ) : Promise < void > {
235221 const abortController = this . abortControllers . get ( requestId )
236222 if ( abortController ) {
237223 abortController . abort ( )
238224 this . abortControllers . delete ( requestId )
239225 }
240226 }
241227
242- private async sendMessage (
243- request : AIRequest
244- ) : Promise < { success : boolean ; content ?: string ; reasoning_content ?: string ; error ?: string } > {
245- try {
246- // 准备消息数组,如果有systemPrompt,插入system消息
247- const apiMessages = request . messages . map ( ( msg ) => ( {
248- role : msg . role ,
249- content : msg . content
250- } ) )
251-
252- let modelConfig = request . modelConfig
253- if ( ! request . modelConfig ) {
254- modelConfig = {
255- systemPrompt : '' ,
256- topP : 1 ,
257- temperature : 1
258- }
259- }
260-
261- // 如果有systemPrompt且第一条消息不是system消息,则插入system消息
262- if (
263- modelConfig . systemPrompt &&
264- ( apiMessages . length === 0 || apiMessages [ 0 ] . role !== 'system' )
265- ) {
266- apiMessages . unshift ( {
267- role : 'system' ,
268- content : modelConfig . systemPrompt
269- } )
270- }
271-
272- const response = await fetch (
273- `${ request . config . apiHost . replace ( / \/ $ / , '' ) } /chat/completions` ,
274- {
275- method : 'POST' ,
276- headers : {
277- 'Content-Type' : 'application/json' ,
278- Authorization : `Bearer ${ request . config . apiKey } `
279- } ,
280- body : JSON . stringify ( {
281- model : request . config . modelName ,
282- messages : apiMessages ,
283- temperature : modelConfig . temperature ,
284- top_p : modelConfig . topP
285- } )
286- }
287- )
288-
289- if ( ! response . ok ) {
290- return {
291- success : false ,
292- error : `HTTP error! status: ${ response . status } `
293- }
294- }
295-
296- const data = await response . json ( )
297- const content = data . choices ?. [ 0 ] ?. message ?. content || ''
298- const reasoning_content =
299- data . choices ?. [ 0 ] ?. message ?. reasoning_content || data . choices ?. [ 0 ] ?. message ?. reasoning
300-
301- return {
302- success : true ,
303- content,
304- reasoning_content
305- }
306- } catch ( error ) {
307- return {
308- success : false ,
309- error : error instanceof Error ? error . message : 'Unknown error'
310- }
311- }
312- }
313-
314- private async testConnection ( config : LLMConfig ) : Promise < { success : boolean ; error ?: string } > {
228+ public async testConnection ( config : LLMConfig ) : Promise < { success : boolean ; error ?: string } > {
315229 try {
316230 const response = await fetch ( `${ config . apiHost . replace ( / \/ $ / , '' ) } /models` , {
317231 method : 'GET' ,
@@ -338,3 +252,11 @@ class AIHandler {
338252}
339253
340254export const aiHandler = new AIHandler ( )
255+
256+ export function setupAIHandlers ( ) {
257+ ipcMain . handle ( 'ai:send-message-streaming' , ( event , request : AIRequest ) =>
258+ aiHandler . sendMessageStreaming ( event , request )
259+ )
260+ ipcMain . handle ( 'ai:stop-streaming' , ( event , requestId : string ) => aiHandler . stopStreaming ( requestId ) )
261+ ipcMain . handle ( 'ai:test-connection' , ( event , config : LLMConfig ) => aiHandler . testConnection ( config ) )
262+ }
0 commit comments