Skip to content

Commit 02a8eb1

Browse files
committed
fix: address Copilot review findings across security, a11y, and UX
PR MiniMax-AI#33 and MiniMax-AI#34 review feedback: Security: - Config file now written with mode 0600, directory with mode 0700 - Block hop-by-hop headers (RFC 9110) in proxy passthrough - Validate header values are strings, not arrays, before use - Fix misleading comment about symlink resolution in sanitizeRelativePath - Redact sensitive data from file tool call summaries in chat history UX / Correctness: - Thread AbortSignal through chat() to fetch() for real request cancellation - SettingsModal trims all inputs on save to prevent whitespace issues - onSave returns proper error feedback when config reload fails - hasUsableLLMConfig normalizes config in-place (trimmed baseUrl/model) - Whitespace-only image gen API key no longer treated as enabled Accessibility: - ErrorBoundary: add role=alert, aria-live=assertive, type=button Race conditions: - handleMediaReady ignores stale loads when user switches emotions quickly Tests: 118 passing
1 parent 1ce893d commit 02a8eb1

8 files changed

Lines changed: 96 additions & 22 deletions

File tree

apps/webuiapps/src/components/ChatPanel/ChatSubComponents.tsx

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -142,7 +142,13 @@ export const CharacterAvatar: React.FC<{
142142
});
143143
}, [media?.url, activeUrl]);
144144

145+
const desiredUrlRef = useRef<string | undefined>(media?.url);
146+
desiredUrlRef.current = media?.url;
147+
145148
const handleMediaReady = useCallback((readyUrl: string) => {
149+
// Ignore stale loads — if the user has since switched to a different emotion,
150+
// don't activate the old layer
151+
if (desiredUrlRef.current && readyUrl !== desiredUrlRef.current) return;
146152
setLayers((prev) => {
147153
const staleUrls = prev.filter((l) => l.url !== readyUrl).map((l) => l.url);
148154
if (cleanupRef.current) clearTimeout(cleanupRef.current);

apps/webuiapps/src/components/ChatPanel/SettingsModal.tsx

Lines changed: 19 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -344,20 +344,33 @@ const SettingsModal: React.FC<SettingsModalProps> = ({
344344
setSaving(true);
345345
const llmCfg: LLMConfigUpdate = {
346346
provider,
347-
baseUrl,
348-
model,
349-
customHeaders,
347+
baseUrl: baseUrl.trim(),
348+
model: model.trim(),
349+
customHeaders: customHeaders.trim(),
350350
};
351351
if (apiKeyDirty) {
352-
llmCfg.apiKey = apiKey;
352+
llmCfg.apiKey = apiKey.trim();
353353
} else if (llmEndpointChanged) {
354354
llmCfg.apiKey = '';
355355
}
356356
const nextImageGenConfig = buildImageGenConfig();
357357
const igCfg: ImageGenConfigUpdate | null = imageGenConfig
358-
? nextImageGenConfig
358+
? {
359+
...nextImageGenConfig,
360+
baseUrl: nextImageGenConfig.baseUrl.trim(),
361+
model: nextImageGenConfig.model.trim(),
362+
customHeaders: nextImageGenConfig.customHeaders?.trim(),
363+
...(nextImageGenConfig.apiKey
364+
? { apiKey: nextImageGenConfig.apiKey.trim() }
365+
: {}),
366+
}
359367
: igApiKey.trim()
360-
? { ...nextImageGenConfig, apiKey: igApiKey }
368+
? {
369+
...nextImageGenConfig,
370+
apiKey: igApiKey.trim(),
371+
baseUrl: nextImageGenConfig.baseUrl.trim(),
372+
model: nextImageGenConfig.model.trim(),
373+
}
361374
: null;
362375
try {
363376
const result = await onSave(llmCfg, igCfg);

apps/webuiapps/src/components/ChatPanel/index.tsx

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -689,6 +689,17 @@ const ChatPanel: React.FC<{
689689
setShowSettings(false);
690690
}
691691

692+
if (nextConfig === null || !imageGenReloadSucceeded) {
693+
const reloadFailures = [
694+
...(nextConfig === null ? ['chat settings'] : []),
695+
...(!imageGenReloadSucceeded ? ['image generation settings'] : []),
696+
];
697+
return {
698+
ok: false,
699+
error: `Settings were saved, but reloading ${reloadFailures.join(' and ')} failed. Please reopen settings to verify.`,
700+
};
701+
}
702+
692703
return { ok: true };
693704
}}
694705
onClose={() => setShowSettings(false)}

apps/webuiapps/src/components/ChatPanel/toolDefinitions.ts

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,11 @@ import type { LLMConfig } from '@/lib/llmModels';
1818
export function hasUsableLLMConfig(config: LLMConfig | null | undefined): config is LLMConfig {
1919
const baseUrl = typeof config?.baseUrl === 'string' ? config.baseUrl.trim() : '';
2020
const model = typeof config?.model === 'string' ? config.model.trim() : '';
21+
// Normalize the config in-place so downstream callers get trimmed values
22+
if (config) {
23+
config.baseUrl = baseUrl;
24+
config.model = model;
25+
}
2126
return !!baseUrl && !!model;
2227
}
2328

apps/webuiapps/src/components/ChatPanel/useConversationEngine.ts

Lines changed: 14 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@ import {
1515
} from 'react';
1616
import { chat, type ChatMessage, type ToolCall } from '@/lib/llmClient';
1717
import type { LLMConfig } from '@/lib/llmModels';
18-
import { hasUsableImageGenConfig, type ImageGenConfig } from '@/lib/imageGenClient';
18+
import { type ImageGenConfig } from '@/lib/imageGenClient';
1919
import type { CharacterConfig } from '@/lib/characterManager';
2020
import { clearEmotionVideoCache } from '@/lib/characterManager';
2121
import type { MemoryEntry } from '@/lib/memoryManager';
@@ -110,7 +110,7 @@ export function useConversationEngine(deps: ConversationEngineDeps) {
110110
async (history: ChatMessage[], cfg: LLMConfig, signal?: AbortSignal) => {
111111
await seedMetaFiles();
112112
await loadActionsFromMeta();
113-
const hasImageGen = hasUsableImageGenConfig(imageGenConfigRef.current);
113+
const hasImageGen = (imageGenConfigRef.current?.apiKey?.trim().length ?? 0) > 0;
114114
const mm = modManagerRef.current;
115115
const char = characterRef.current;
116116

@@ -138,7 +138,7 @@ export function useConversationEngine(deps: ConversationEngineDeps) {
138138
while (iterations < maxIterations) {
139139
if (signal?.aborted) break;
140140
iterations++;
141-
const response = await chat(currentMessages, tools, cfg);
141+
const response = await chat(currentMessages, tools, cfg, signal);
142142

143143
if (response.toolCalls.length === 0) {
144144
// No tool calls — fallback plain text
@@ -290,9 +290,17 @@ async function executeToolCall(
290290

291291
// ---- File tools ----
292292
if (isFileTool(tc.function.name)) {
293-
ctx.pendingToolCallsRef.current.push(
294-
`${tc.function.name}(${JSON.stringify(params).slice(0, 60)})`,
295-
);
293+
// Only log tool name + path to avoid persisting sensitive file contents
294+
const pathKeys = ['path', 'filePath', 'sourcePath', 'targetPath', 'destinationPath'] as const;
295+
let summary = tc.function.name;
296+
for (const key of pathKeys) {
297+
const value = (params as Record<string, unknown>)[key];
298+
if (typeof value === 'string' && value.length > 0) {
299+
summary = `${tc.function.name}(${key}: ${value})`;
300+
break;
301+
}
302+
}
303+
ctx.pendingToolCallsRef.current.push(summary);
296304
try {
297305
const result = await executeFileTool(tc.function.name, params as Record<string, string>);
298306
return toolResult(result);

apps/webuiapps/src/components/ErrorBoundary.tsx

Lines changed: 7 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -40,13 +40,18 @@ export class ErrorBoundary extends Component<Props, State> {
4040
render(): ReactNode {
4141
if (this.state.hasError) {
4242
return (
43-
<div className={styles.fallback} data-testid="error-boundary-fallback">
43+
<div
44+
className={styles.fallback}
45+
data-testid="error-boundary-fallback"
46+
role="alert"
47+
aria-live="assertive"
48+
>
4449
<div className={styles.icon}>⚠️</div>
4550
<div className={styles.message}>
4651
{this.props.name ? `${this.props.name} crashed` : 'Something went wrong'}
4752
</div>
4853
<div className={styles.detail}>{this.state.error?.message}</div>
49-
<button className={styles.retryBtn} onClick={this.handleRetry}>
54+
<button type="button" className={styles.retryBtn} onClick={this.handleRetry}>
5055
Retry
5156
</button>
5257
</div>

apps/webuiapps/src/lib/llmClient.ts

Lines changed: 7 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -178,6 +178,7 @@ export async function chat(
178178
messages: ChatMessage[],
179179
tools: ToolDef[],
180180
config: LLMConfig,
181+
signal?: AbortSignal,
181182
): Promise<LLMResponse> {
182183
logger.info(
183184
'LLM',
@@ -189,15 +190,16 @@ export async function chat(
189190
messages.length,
190191
);
191192
if (config.provider === 'anthropic' || config.provider === 'minimax') {
192-
return chatAnthropic(messages, tools, config);
193+
return chatAnthropic(messages, tools, config, signal);
193194
}
194-
return chatOpenAI(messages, tools, config);
195+
return chatOpenAI(messages, tools, config, signal);
195196
}
196197

197198
async function chatOpenAI(
198199
messages: ChatMessage[],
199200
tools: ToolDef[],
200201
config: LLMConfig,
202+
signal?: AbortSignal,
201203
): Promise<LLMResponse> {
202204
const body: Record<string, unknown> = {
203205
model: config.model,
@@ -227,6 +229,7 @@ async function chatOpenAI(
227229
method: 'POST',
228230
headers,
229231
body: JSON.stringify(body),
232+
signal,
230233
});
231234

232235
logger.info('LLM', 'Response status:', res.status);
@@ -263,6 +266,7 @@ async function chatAnthropic(
263266
messages: ChatMessage[],
264267
tools: ToolDef[],
265268
config: LLMConfig,
269+
signal?: AbortSignal,
266270
): Promise<LLMResponse> {
267271
const systemMsg = messages.find((m) => m.role === 'system')?.content || '';
268272
const nonSystemMessages = messages.filter((m) => m.role !== 'system');
@@ -338,6 +342,7 @@ async function chatAnthropic(
338342
method: 'POST',
339343
headers,
340344
body: JSON.stringify(body),
345+
signal,
341346
});
342347

343348
logger.info('LLM', 'Anthropic Response status:', res.status);

apps/webuiapps/vite.config.ts

Lines changed: 27 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -149,8 +149,12 @@ function llmConfigPlugin(): Plugin {
149149
}
150150

151151
try {
152-
fs.mkdirSync(resolve(os.homedir(), '.openroom'), { recursive: true });
153-
fs.writeFileSync(LLM_CONFIG_FILE, JSON.stringify(mergedConfig), 'utf-8');
152+
const configDir = resolve(os.homedir(), '.openroom');
153+
fs.mkdirSync(configDir, { recursive: true, mode: 0o700 });
154+
fs.writeFileSync(LLM_CONFIG_FILE, JSON.stringify(mergedConfig), {
155+
encoding: 'utf-8',
156+
mode: 0o600,
157+
});
154158
const stat = fs.statSync(LLM_CONFIG_FILE);
155159
cachedServerConfig = { mtimeMs: stat.mtimeMs, value: mergedConfig };
156160
res.writeHead(200);
@@ -182,7 +186,7 @@ function sanitizeRelativePath(relPath: string, baseDir: string): string | null {
182186
const safe = cleaned.replace(/[^a-zA-Z0-9_\-./]/g, '_');
183187
// Resolve to absolute and verify it stays within baseDir
184188
const resolved = resolve(baseDir, safe);
185-
// Normalize both paths for comparison (resolve handles .. and symlinks)
189+
// Normalize both paths for comparison (resolve handles .. sequences)
186190
const normalizedBase = resolve(baseDir);
187191
if (!resolved.startsWith(normalizedBase + sep) && resolved !== normalizedBase) {
188192
return null;
@@ -506,6 +510,16 @@ function isBlockedPassthroughHeader(headerName: string): boolean {
506510
'proxy-authorization',
507511
'x-api-key',
508512
'x-goog-api-key',
513+
// Hop-by-hop headers per RFC 9110 — forwarding these can cause
514+
// request smuggling or broken upstream responses
515+
'content-length',
516+
'transfer-encoding',
517+
'content-encoding',
518+
'accept-encoding',
519+
'te',
520+
'trailer',
521+
'upgrade',
522+
'keep-alive',
509523
].includes(headerName)
510524
);
511525
}
@@ -540,7 +554,14 @@ function llmProxyPlugin(): Plugin {
540554
name: 'llm-proxy',
541555
configureServer(server) {
542556
server.middlewares.use('/api/llm-proxy', async (req, res) => {
543-
const targetUrl = req.headers['x-llm-target-url'] as string;
557+
// Normalize header values — Node http headers can be string[] for duplicates
558+
const getHeader = (name: string): string | undefined => {
559+
const val = req.headers[name];
560+
if (Array.isArray(val)) return val[0];
561+
return val;
562+
};
563+
564+
const targetUrl = getHeader('x-llm-target-url');
544565
if (!targetUrl) {
545566
res.writeHead(400, { 'Content-Type': 'application/json' });
546567
res.end(JSON.stringify({ error: 'Missing X-LLM-Target-URL header' }));
@@ -595,8 +616,8 @@ function llmProxyPlugin(): Plugin {
595616
injectServerApiKey(
596617
headers,
597618
parsedTargetUrl,
598-
req.headers['x-llm-provider'] as string,
599-
req.headers['x-llm-config-scope'] as string,
619+
getHeader('x-llm-provider'),
620+
getHeader('x-llm-config-scope'),
600621
);
601622

602623
const controller = new AbortController();

0 commit comments

Comments
 (0)