11"use server" ;
22
33import { GoogleGenAI } from "@google/genai" ;
4+ import OpenAI from "openai" ;
5+
6+ export async function generateContent (
7+ prompt : string ,
8+ systemInstruction ?: string
9+ ) : Promise < { text : string } > {
10+ const openRouterApiKey = process . env . OPENROUTER_API_KEY ;
11+ const openRouterModel = process . env . OPENROUTER_MODEL ;
12+
13+ if ( openRouterApiKey && openRouterModel ) {
14+ const client = new OpenAI ( {
15+ apiKey : openRouterApiKey ,
16+ baseURL : "https://openrouter.ai/api/v1" ,
17+ } ) ;
18+
19+ const messages : OpenAI . Chat . ChatCompletionMessageParam [ ] = [ ] ;
20+ if ( systemInstruction ) {
21+ messages . push ( { role : "system" , content : systemInstruction } ) ;
22+ }
23+ messages . push ( { role : "user" , content : prompt } ) ;
24+
25+ const completion = await client . chat . completions . create ( {
26+ model : openRouterModel ,
27+ messages,
28+ } ) ;
29+
30+ const text = completion . choices [ 0 ] ?. message ?. content ;
31+ if ( ! text ) {
32+ throw new Error ( "OpenRouterからの応答が空でした" ) ;
33+ }
34+ return { text } ;
35+ }
436
5- export async function generateContent ( prompt : string , systemInstruction ?: string ) {
637 const params = {
738 model : "gemini-2.5-flash" ,
839 contents : prompt ,
940 config : {
1041 systemInstruction,
11- }
42+ } ,
1243 } ;
1344
1445 const ai = new GoogleGenAI ( { apiKey : process . env . API_KEY ! } ) ;
1546
1647 try {
17- return await ai . models . generateContent ( params ) ;
48+ const result = await ai . models . generateContent ( params ) ;
49+ const text = result . text ;
50+ if ( ! text ) {
51+ throw new Error ( "Geminiからの応答が空でした" ) ;
52+ }
53+ return { text } ;
1854 } catch ( e : unknown ) {
1955 if ( String ( e ) . includes ( "User location is not supported" ) ) {
2056 // For the new API, we can use httpOptions to set a custom baseUrl
@@ -24,7 +60,12 @@ export async function generateContent(prompt: string, systemInstruction?: string
2460 baseUrl : "https://gemini-proxy.utcode.net" ,
2561 } ,
2662 } ) ;
27- return await aiWithProxy . models . generateContent ( params ) ;
63+ const result = await aiWithProxy . models . generateContent ( params ) ;
64+ const text = result . text ;
65+ if ( ! text ) {
66+ throw new Error ( "Geminiからの応答が空でした" ) ;
67+ }
68+ return { text } ;
2869 } else {
2970 throw e ;
3071 }
0 commit comments