Skip to content

Commit 9a28d39

Browse files
Copilotna-trium-144
andcommitted
Consolidate gemini.ts and openrouter.ts into a single generateContent function
Co-authored-by: na-trium-144 <100704180+na-trium-144@users.noreply.github.com>
1 parent 0e190c7 commit 9a28d39

File tree

3 files changed

+46
-49
lines changed

3 files changed

+46
-49
lines changed

app/actions/chatActions.ts

Lines changed: 1 addition & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,20 +1,12 @@
11
"use server";
22

33
// import { z } from "zod";
4-
import { generateContent as generateContentGemini } from "./gemini";
5-
import { generateContent as generateContentOpenRouter } from "./openrouter";
4+
import { generateContent } from "./gemini";
65
import { DynamicMarkdownSection } from "../[lang]/[pageId]/pageContent";
76
import { ReplCommand, ReplOutput } from "@my-code/runtime/interface";
87
import { addChat, ChatWithMessages } from "@/lib/chatHistory";
98
import { getPagesList, introSectionId, PagePath, SectionId } from "@/lib/docs";
109

11-
function generateContent(prompt: string, systemInstruction?: string) {
12-
if (process.env.OPENROUTER_API_KEY && process.env.OPENROUTER_MODEL) {
13-
return generateContentOpenRouter(prompt, systemInstruction);
14-
}
15-
return generateContentGemini(prompt, systemInstruction);
16-
}
17-
1810
type ChatResult =
1911
| {
2012
error: string;

app/actions/gemini.ts

Lines changed: 45 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,20 +1,56 @@
11
"use server";
22

33
import { GoogleGenAI } from "@google/genai";
4+
import OpenAI from "openai";
5+
6+
export async function generateContent(
7+
prompt: string,
8+
systemInstruction?: string
9+
): Promise<{ text: string }> {
10+
const openRouterApiKey = process.env.OPENROUTER_API_KEY;
11+
const openRouterModel = process.env.OPENROUTER_MODEL;
12+
13+
if (openRouterApiKey && openRouterModel) {
14+
const client = new OpenAI({
15+
apiKey: openRouterApiKey,
16+
baseURL: "https://openrouter.ai/api/v1",
17+
});
18+
19+
const messages: OpenAI.Chat.ChatCompletionMessageParam[] = [];
20+
if (systemInstruction) {
21+
messages.push({ role: "system", content: systemInstruction });
22+
}
23+
messages.push({ role: "user", content: prompt });
24+
25+
const completion = await client.chat.completions.create({
26+
model: openRouterModel,
27+
messages,
28+
});
29+
30+
const text = completion.choices[0]?.message?.content;
31+
if (!text) {
32+
throw new Error("OpenRouterからの応答が空でした");
33+
}
34+
return { text };
35+
}
436

5-
export async function generateContent(prompt: string, systemInstruction?: string) {
637
const params = {
738
model: "gemini-2.5-flash",
839
contents: prompt,
940
config: {
1041
systemInstruction,
11-
}
42+
},
1243
};
1344

1445
const ai = new GoogleGenAI({ apiKey: process.env.API_KEY! });
1546

1647
try {
17-
return await ai.models.generateContent(params);
48+
const result = await ai.models.generateContent(params);
49+
const text = result.text;
50+
if (!text) {
51+
throw new Error("Geminiからの応答が空でした");
52+
}
53+
return { text };
1854
} catch (e: unknown) {
1955
if (String(e).includes("User location is not supported")) {
2056
// For the new API, we can use httpOptions to set a custom baseUrl
@@ -24,7 +60,12 @@ export async function generateContent(prompt: string, systemInstruction?: string
2460
baseUrl: "https://gemini-proxy.utcode.net",
2561
},
2662
});
27-
return await aiWithProxy.models.generateContent(params);
63+
const result = await aiWithProxy.models.generateContent(params);
64+
const text = result.text;
65+
if (!text) {
66+
throw new Error("Geminiからの応答が空でした");
67+
}
68+
return { text };
2869
} else {
2970
throw e;
3071
}

app/actions/openrouter.ts

Lines changed: 0 additions & 36 deletions
This file was deleted.

0 commit comments

Comments
 (0)