11"""
22LLM service factory for creating configured LLM clients.
3+
4+ Includes a compatibility layer for OpenAI-compatible API proxies that may
5+ return slightly non-standard responses (e.g. choices[].index = None).
36"""
7+ import logging
8+ from openai .types import chat
49from pydantic_ai .models .openai import OpenAIModel
510from pydantic_ai .providers .openai import OpenAIProvider
611from pydantic_ai .models .openai import OpenAIModelSettings
914
1015from codewiki .src .config import Config
1116
17+ logger = logging .getLogger (__name__ )
18+
19+
20+ class CompatibleOpenAIModel (OpenAIModel ):
21+ """OpenAIModel subclass that patches non-standard API proxy responses.
22+
23+ Some OpenAI-compatible proxies return responses with fields like
24+ choices[].index set to None instead of an integer. This subclass
25+ fixes those fields before pydantic validation runs.
26+ """
27+
28+ def _validate_completion (self , response : chat .ChatCompletion ) -> chat .ChatCompletion :
29+ # Patch choices[].index: None -> sequential integer (0, 1, 2, ...)
30+ if response .choices :
31+ for i , choice in enumerate (response .choices ):
32+ if choice .index is None :
33+ choice .index = i
34+ return super ()._validate_completion (response )
35+
1236
13- def create_main_model (config : Config ) -> OpenAIModel :
37+ def create_main_model (config : Config ) -> CompatibleOpenAIModel :
1438 """Create the main LLM model from configuration."""
15- return OpenAIModel (
39+ return CompatibleOpenAIModel (
1640 model_name = config .main_model ,
1741 provider = OpenAIProvider (
1842 base_url = config .llm_base_url ,
@@ -25,9 +49,9 @@ def create_main_model(config: Config) -> OpenAIModel:
2549 )
2650
2751
28- def create_fallback_model (config : Config ) -> OpenAIModel :
52+ def create_fallback_model (config : Config ) -> CompatibleOpenAIModel :
2953 """Create the fallback LLM model from configuration."""
30- return OpenAIModel (
54+ return CompatibleOpenAIModel (
3155 model_name = config .fallback_model ,
3256 provider = OpenAIProvider (
3357 base_url = config .llm_base_url ,
0 commit comments