Skip to content

Commit 40c53e3

Browse files
authored
Merge pull request #47 from zhalice2011/fix/openai-compatible-proxy-response
fix: handle non-standard responses from OpenAI-compatible API proxies
2 parents 1d979fa + 584805c commit 40c53e3

File tree

1 file changed

+28
-4
lines changed

1 file changed

+28
-4
lines changed

codewiki/src/be/llm_services.py

Lines changed: 28 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,11 @@
11
"""
22
LLM service factory for creating configured LLM clients.
3+
4+
Includes a compatibility layer for OpenAI-compatible API proxies that may
5+
return slightly non-standard responses (e.g. choices[].index = None).
36
"""
7+
import logging
8+
from openai.types import chat
49
from pydantic_ai.models.openai import OpenAIModel
510
from pydantic_ai.providers.openai import OpenAIProvider
611
from pydantic_ai.models.openai import OpenAIModelSettings
@@ -9,10 +14,29 @@
914

1015
from codewiki.src.config import Config
1116

17+
logger = logging.getLogger(__name__)
18+
19+
20+
class CompatibleOpenAIModel(OpenAIModel):
21+
"""OpenAIModel subclass that patches non-standard API proxy responses.
22+
23+
Some OpenAI-compatible proxies return responses with fields like
24+
choices[].index set to None instead of an integer. This subclass
25+
fixes those fields before pydantic validation runs.
26+
"""
27+
28+
def _validate_completion(self, response: chat.ChatCompletion) -> chat.ChatCompletion:
29+
# Patch choices[].index: None -> sequential integer (0, 1, 2, ...)
30+
if response.choices:
31+
for i, choice in enumerate(response.choices):
32+
if choice.index is None:
33+
choice.index = i
34+
return super()._validate_completion(response)
35+
1236

13-
def create_main_model(config: Config) -> OpenAIModel:
37+
def create_main_model(config: Config) -> CompatibleOpenAIModel:
1438
"""Create the main LLM model from configuration."""
15-
return OpenAIModel(
39+
return CompatibleOpenAIModel(
1640
model_name=config.main_model,
1741
provider=OpenAIProvider(
1842
base_url=config.llm_base_url,
@@ -25,9 +49,9 @@ def create_main_model(config: Config) -> OpenAIModel:
2549
)
2650

2751

28-
def create_fallback_model(config: Config) -> OpenAIModel:
52+
def create_fallback_model(config: Config) -> CompatibleOpenAIModel:
2953
"""Create the fallback LLM model from configuration."""
30-
return OpenAIModel(
54+
return CompatibleOpenAIModel(
3155
model_name=config.fallback_model,
3256
provider=OpenAIProvider(
3357
base_url=config.llm_base_url,

0 commit comments

Comments
 (0)