Skip to content

Commit 0118623

Browse files
authored
Merge pull request #17 from juhuaxia/feat/llm-model-config-refactor
feat(chat): centralize LLM model configs and add provider presets
2 parents d3fa5b2 + fc6c812 commit 0118623

File tree

9 files changed

+421
-230
lines changed

9 files changed

+421
-230
lines changed

apps/webuiapps/src/components/ChatPanel/index.module.scss

Lines changed: 32 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -356,6 +356,38 @@
356356
cursor: pointer;
357357
}
358358

359+
.modelSelectorWrapper {
360+
display: flex;
361+
align-items: center;
362+
gap: 4px;
363+
364+
.select {
365+
flex: 1;
366+
}
367+
368+
.fieldInput {
369+
flex: 1;
370+
}
371+
}
372+
373+
.manualToggleBtn {
374+
padding: 6px 8px;
375+
border: 1px solid rgba(255, 255, 255, 0.1);
376+
border-radius: 6px;
377+
background: transparent;
378+
cursor: pointer;
379+
display: flex;
380+
align-items: center;
381+
justify-content: center;
382+
color: rgba(255, 255, 255, 0.6);
383+
transition: all 0.2s;
384+
385+
&:hover {
386+
background: #282a2a;
387+
color: rgba(255, 255, 255, 0.9);
388+
}
389+
}
390+
359391
.settingsActions {
360392
display: flex;
361393
gap: 8px;

apps/webuiapps/src/components/ChatPanel/index.tsx

Lines changed: 68 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -7,17 +7,16 @@ import {
77
Maximize2,
88
ChevronDown,
99
ChevronRight,
10+
Pencil,
11+
List,
1012
} from 'lucide-react';
13+
import { chat, loadConfig, loadConfigSync, saveConfig, type ChatMessage } from '@/lib/llmClient';
1114
import {
12-
chat,
13-
loadConfig,
14-
loadConfigSync,
15-
saveConfig,
16-
getDefaultConfig,
15+
PROVIDER_MODELS,
16+
getDefaultProviderConfig,
1717
type LLMConfig,
1818
type LLMProvider,
19-
type ChatMessage,
20-
} from '@/lib/llmClient';
19+
} from '@/lib/llmModels';
2120
import {
2221
loadImageGenConfig,
2322
loadImageGenConfigSync,
@@ -1008,6 +1007,7 @@ const ChatPanel: React.FC<{ onClose: () => void; visible?: boolean }> = ({
10081007
{messages.map((msg) => (
10091008
<React.Fragment key={msg.id}>
10101009
<div
1010+
data-testid="chat-message"
10111011
className={`${styles.message} ${
10121012
msg.role === 'user'
10131013
? styles.user
@@ -1121,9 +1121,15 @@ const SettingsModal: React.FC<{
11211121
// LLM settings
11221122
const [provider, setProvider] = useState<LLMProvider>(config?.provider || 'minimax');
11231123
const [apiKey, setApiKey] = useState(config?.apiKey || '');
1124-
const [baseUrl, setBaseUrl] = useState(config?.baseUrl || getDefaultConfig('minimax').baseUrl);
1125-
const [model, setModel] = useState(config?.model || getDefaultConfig('minimax').model);
1124+
const [baseUrl, setBaseUrl] = useState(
1125+
config?.baseUrl || getDefaultProviderConfig('minimax').baseUrl,
1126+
);
1127+
const [model, setModel] = useState(config?.model || getDefaultProviderConfig('minimax').model);
11261128
const [customHeaders, setCustomHeaders] = useState(config?.customHeaders || '');
1129+
const [manualModelMode, setManualModelMode] = useState(false);
1130+
1131+
const isPresetModel = PROVIDER_MODELS[provider]?.includes(model) ?? false;
1132+
const showDropdown = !manualModelMode && isPresetModel;
11271133

11281134
// Image gen settings
11291135
const [igProvider, setIgProvider] = useState<ImageGenProvider>(
@@ -1140,9 +1146,15 @@ const SettingsModal: React.FC<{
11401146

11411147
const handleProviderChange = (p: LLMProvider) => {
11421148
setProvider(p);
1143-
const defaults = getDefaultConfig(p);
1149+
const defaults = getDefaultProviderConfig(p);
11441150
setBaseUrl(defaults.baseUrl);
11451151
setModel(defaults.model);
1152+
setManualModelMode(false);
1153+
};
1154+
1155+
const handleModelChange = (newModel: string) => {
1156+
setModel(newModel);
1157+
setManualModelMode(false);
11461158
};
11471159

11481160
const handleIgProviderChange = (p: ImageGenProvider) => {
@@ -1168,6 +1180,8 @@ const SettingsModal: React.FC<{
11681180
<option value="anthropic">Anthropic</option>
11691181
<option value="deepseek">DeepSeek</option>
11701182
<option value="minimax">MiniMax</option>
1183+
<option value="z.ai">Z.ai</option>
1184+
<option value="kimi">Kimi</option>
11711185
</select>
11721186
</div>
11731187

@@ -1193,11 +1207,50 @@ const SettingsModal: React.FC<{
11931207

11941208
<div className={styles.field}>
11951209
<label className={styles.label}>Model</label>
1196-
<input
1197-
className={styles.fieldInput}
1198-
value={model}
1199-
onChange={(e) => setModel(e.target.value)}
1200-
/>
1210+
<div className={styles.modelSelectorWrapper}>
1211+
{showDropdown ? (
1212+
<>
1213+
<select
1214+
className={styles.select}
1215+
value={model}
1216+
onChange={(e) => handleModelChange(e.target.value)}
1217+
>
1218+
{PROVIDER_MODELS[provider]?.map((m) => (
1219+
<option key={m} value={m}>
1220+
{m}
1221+
</option>
1222+
))}
1223+
</select>
1224+
<button
1225+
type="button"
1226+
onClick={() => setManualModelMode(true)}
1227+
className={styles.manualToggleBtn}
1228+
title="Enter custom model name"
1229+
>
1230+
<Pencil size={14} />
1231+
</button>
1232+
</>
1233+
) : (
1234+
<>
1235+
<input
1236+
className={styles.fieldInput}
1237+
value={model}
1238+
onChange={(e) => setModel(e.target.value)}
1239+
placeholder="e.g. gpt-4-turbo"
1240+
/>
1241+
{isPresetModel && (
1242+
<button
1243+
type="button"
1244+
onClick={() => setManualModelMode(false)}
1245+
className={styles.manualToggleBtn}
1246+
title="Back to model list"
1247+
>
1248+
<List size={14} />
1249+
</button>
1250+
)}
1251+
</>
1252+
)}
1253+
</div>
12011254
</div>
12021255

12031256
<div className={styles.field}>

apps/webuiapps/src/lib/__tests__/chatHistoryStorage.test.ts

Lines changed: 32 additions & 84 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,11 @@ import type { ChatMessage } from '../llmClient';
55
const fetchMock = vi.fn();
66
vi.stubGlobal('fetch', fetchMock);
77

8-
const STORAGE_KEY = 'webuiapps-chat-history';
8+
const SESSION_PATH = 'char-1/mod-1';
9+
10+
function expectedUrl(file: string): string {
11+
return `/api/session-data?path=${encodeURIComponent(`${SESSION_PATH}/chat/${file}`)}`;
12+
}
913

1014
const sampleMessages: DisplayMessage[] = [
1115
{ id: '1', role: 'user', content: 'Hello' },
@@ -24,157 +28,101 @@ function makeSavedData(msgs = sampleMessages, history = sampleChatHistory): Chat
2428
describe('chatHistoryStorage', () => {
2529
beforeEach(() => {
2630
fetchMock.mockReset();
27-
localStorage.clear();
2831
vi.resetModules();
2932
});
3033

31-
// ============ loadChatHistorySync ============
32-
3334
describe('loadChatHistorySync', () => {
34-
it('returns null when localStorage is empty', async () => {
35-
const { loadChatHistorySync } = await import('../chatHistoryStorage');
36-
expect(loadChatHistorySync()).toBeNull();
37-
});
38-
39-
it('returns data from localStorage', async () => {
40-
const data = makeSavedData();
41-
localStorage.setItem(STORAGE_KEY, JSON.stringify(data));
42-
const { loadChatHistorySync } = await import('../chatHistoryStorage');
43-
const result = loadChatHistorySync();
44-
expect(result).not.toBeNull();
45-
expect(result!.messages).toHaveLength(2);
46-
expect(result!.chatHistory).toHaveLength(2);
47-
expect(result!.version).toBe(1);
48-
});
49-
50-
it('returns null for invalid JSON', async () => {
51-
localStorage.setItem(STORAGE_KEY, 'not-json');
52-
const { loadChatHistorySync } = await import('../chatHistoryStorage');
53-
expect(loadChatHistorySync()).toBeNull();
54-
});
55-
56-
it('returns null for wrong version', async () => {
57-
localStorage.setItem(
58-
STORAGE_KEY,
59-
JSON.stringify({ version: 99, savedAt: 0, messages: [], chatHistory: [] }),
60-
);
35+
it('returns null', async () => {
6136
const { loadChatHistorySync } = await import('../chatHistoryStorage');
62-
expect(loadChatHistorySync()).toBeNull();
37+
expect(loadChatHistorySync(SESSION_PATH)).toBeNull();
6338
});
6439
});
6540

66-
// ============ loadChatHistory (async) ============
67-
6841
describe('loadChatHistory', () => {
69-
it('loads from API and syncs to localStorage', async () => {
42+
it('loads from API', async () => {
7043
const data = makeSavedData();
7144
fetchMock.mockResolvedValueOnce({
7245
ok: true,
7346
json: () => Promise.resolve(data),
7447
});
7548
const { loadChatHistory } = await import('../chatHistoryStorage');
7649

77-
const result = await loadChatHistory();
50+
const result = await loadChatHistory(SESSION_PATH);
7851

79-
expect(fetchMock).toHaveBeenCalledWith('/api/chat-history');
52+
expect(fetchMock).toHaveBeenCalledWith(expectedUrl('chat.json'));
8053
expect(result).not.toBeNull();
8154
expect(result!.messages).toEqual(sampleMessages);
82-
// Verify synced to localStorage
83-
const stored = JSON.parse(localStorage.getItem(STORAGE_KEY)!);
84-
expect(stored.version).toBe(1);
8555
});
8656

87-
it('falls back to localStorage when API returns non-ok', async () => {
88-
const data = makeSavedData();
89-
localStorage.setItem(STORAGE_KEY, JSON.stringify(data));
57+
it('returns null when API returns non-ok', async () => {
9058
fetchMock.mockResolvedValueOnce({ ok: false, status: 404 });
9159
const { loadChatHistory } = await import('../chatHistoryStorage');
9260

93-
const result = await loadChatHistory();
61+
const result = await loadChatHistory(SESSION_PATH);
9462

95-
expect(result).not.toBeNull();
96-
expect(result!.messages).toEqual(sampleMessages);
63+
expect(result).toBeNull();
9764
});
9865

99-
it('falls back to localStorage when fetch throws', async () => {
100-
const data = makeSavedData();
101-
localStorage.setItem(STORAGE_KEY, JSON.stringify(data));
66+
it('returns null when fetch throws', async () => {
10267
fetchMock.mockRejectedValueOnce(new Error('network error'));
10368
const { loadChatHistory } = await import('../chatHistoryStorage');
10469

105-
const result = await loadChatHistory();
70+
const result = await loadChatHistory(SESSION_PATH);
10671

107-
expect(result).not.toBeNull();
108-
expect(result!.messages).toEqual(sampleMessages);
72+
expect(result).toBeNull();
10973
});
11074

111-
it('returns null when both API and localStorage are empty', async () => {
75+
it('returns null when API is empty', async () => {
11276
fetchMock.mockResolvedValueOnce({ ok: false, status: 404 });
11377
const { loadChatHistory } = await import('../chatHistoryStorage');
11478

115-
const result = await loadChatHistory();
79+
const result = await loadChatHistory(SESSION_PATH);
11680
expect(result).toBeNull();
11781
});
11882
});
11983

120-
// ============ saveChatHistory ============
121-
12284
describe('saveChatHistory', () => {
123-
it('saves to localStorage and POSTs to API', async () => {
85+
it('POSTs to API with expected payload', async () => {
12486
fetchMock.mockResolvedValueOnce({ ok: true });
12587
const { saveChatHistory } = await import('../chatHistoryStorage');
12688

127-
await saveChatHistory(sampleMessages, sampleChatHistory);
128-
129-
// Check localStorage
130-
const stored = JSON.parse(localStorage.getItem(STORAGE_KEY)!);
131-
expect(stored.version).toBe(1);
132-
expect(stored.messages).toEqual(sampleMessages);
133-
expect(stored.chatHistory).toEqual(sampleChatHistory);
134-
expect(typeof stored.savedAt).toBe('number');
89+
await saveChatHistory(SESSION_PATH, sampleMessages, sampleChatHistory);
13590

136-
// Check fetch call
13791
expect(fetchMock).toHaveBeenCalledOnce();
13892
const [url, options] = fetchMock.mock.calls[0];
139-
expect(url).toBe('/api/chat-history');
93+
expect(url).toBe(expectedUrl('chat.json'));
14094
expect(options.method).toBe('POST');
14195
const body = JSON.parse(options.body);
14296
expect(body.version).toBe(1);
97+
expect(body.messages).toEqual(sampleMessages);
98+
expect(body.chatHistory).toEqual(sampleChatHistory);
14399
});
144100

145-
it('saves to localStorage even when fetch fails', async () => {
101+
it('does not throw when fetch fails', async () => {
146102
fetchMock.mockRejectedValueOnce(new Error('network error'));
147103
const { saveChatHistory } = await import('../chatHistoryStorage');
148104

149-
await saveChatHistory(sampleMessages, sampleChatHistory);
150-
151-
const stored = JSON.parse(localStorage.getItem(STORAGE_KEY)!);
152-
expect(stored.messages).toEqual(sampleMessages);
105+
await expect(
106+
saveChatHistory(SESSION_PATH, sampleMessages, sampleChatHistory),
107+
).resolves.toBeUndefined();
153108
});
154109
});
155110

156-
// ============ clearChatHistory ============
157-
158111
describe('clearChatHistory', () => {
159-
it('removes from localStorage and sends DELETE to API', async () => {
160-
localStorage.setItem(STORAGE_KEY, JSON.stringify(makeSavedData()));
112+
it('sends DELETE to API', async () => {
161113
fetchMock.mockResolvedValueOnce({ ok: true });
162114
const { clearChatHistory } = await import('../chatHistoryStorage');
163115

164-
await clearChatHistory();
116+
await clearChatHistory(SESSION_PATH);
165117

166-
expect(localStorage.getItem(STORAGE_KEY)).toBeNull();
167-
expect(fetchMock).toHaveBeenCalledWith('/api/chat-history', { method: 'DELETE' });
118+
expect(fetchMock).toHaveBeenCalledWith(expectedUrl('chat.json'), { method: 'DELETE' });
168119
});
169120

170-
it('clears localStorage even when DELETE fetch fails', async () => {
171-
localStorage.setItem(STORAGE_KEY, JSON.stringify(makeSavedData()));
121+
it('does not throw when DELETE fetch fails', async () => {
172122
fetchMock.mockRejectedValueOnce(new Error('network error'));
173123
const { clearChatHistory } = await import('../chatHistoryStorage');
174124

175-
await clearChatHistory();
176-
177-
expect(localStorage.getItem(STORAGE_KEY)).toBeNull();
125+
await expect(clearChatHistory(SESSION_PATH)).resolves.toBeUndefined();
178126
});
179127
});
180128
});

apps/webuiapps/src/lib/__tests__/configPersistence.test.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@ import {
1010
savePersistedConfig,
1111
type PersistedConfig,
1212
} from '../configPersistence';
13-
import type { LLMConfig } from '../llmClient';
13+
import type { LLMConfig } from '../llmModels';
1414
import type { ImageGenConfig } from '../imageGenClient';
1515

1616
// ─── Constants ──────────────────────────────────────────────────────────────────

0 commit comments

Comments
 (0)