|
34 | 34 | from .model_settings import ModelSettings |
35 | 35 | from .models.default_models import ( |
36 | 36 | get_default_model_settings, |
37 | | - gpt_5_reasoning_settings_required, |
38 | | - is_gpt_5_default, |
39 | 37 | ) |
40 | 38 | from .models.interface import Model |
41 | 39 | from .prompts import DynamicPromptFunction, Prompt, PromptUtil |
@@ -153,6 +151,20 @@ class MCPConfig(TypedDict): |
153 | 151 | """ |
154 | 152 |
|
155 | 153 |
|
| 154 | +def _initial_model_settings_for_model(model: str | Model | None) -> ModelSettings: |
| 155 | + if model is None: |
| 156 | + return get_default_model_settings() |
| 157 | + if isinstance(model, str): |
| 158 | + return get_default_model_settings(model) |
| 159 | + return ModelSettings() |
| 160 | + |
| 161 | + |
| 162 | +def _model_settings_match_implicit_model_defaults( |
| 163 | + model: str | Model | None, model_settings: ModelSettings |
| 164 | +) -> bool: |
| 165 | + return model_settings == _initial_model_settings_for_model(model) |
| 166 | + |
| 167 | + |
156 | 168 | @dataclass |
157 | 169 | class AgentBase(Generic[TContext]): |
158 | 170 | """Base class for `Agent` and `RealtimeAgent`.""" |
@@ -265,7 +277,7 @@ class Agent(AgentBase, Generic[TContext]): |
265 | 277 | """The model implementation to use when invoking the LLM. |
266 | 278 |
|
267 | 279 | By default, if not set, the agent will use the default model configured in |
268 | | - `agents.models.get_default_model()` (currently "gpt-4.1"). |
| 280 | + `agents.models.get_default_model()` (currently "gpt-5.4-mini"). |
269 | 281 | """ |
270 | 282 |
|
271 | 283 | model_settings: ModelSettings = field(default_factory=get_default_model_settings) |
@@ -383,25 +395,8 @@ def __post_init__(self): |
383 | 395 | f"got {type(self.model_settings).__name__}" |
384 | 396 | ) |
385 | 397 |
|
386 | | - if ( |
387 | | - # The user sets a non-default model |
388 | | - self.model is not None |
389 | | - and ( |
390 | | - # The default model is gpt-5 |
391 | | - is_gpt_5_default() is True |
392 | | - # However, the specified model is not a gpt-5 model |
393 | | - and ( |
394 | | - isinstance(self.model, str) is False |
395 | | - or gpt_5_reasoning_settings_required(self.model) is False # type: ignore |
396 | | - ) |
397 | | - # The model settings are not customized for the specified model |
398 | | - and self.model_settings == get_default_model_settings() |
399 | | - ) |
400 | | - ): |
401 | | - # In this scenario, we should use a generic model settings |
402 | | - # because non-gpt-5 models are not compatible with the default gpt-5 model settings. |
403 | | - # This is a best-effort attempt to make the agent work with non-gpt-5 models. |
404 | | - self.model_settings = ModelSettings() |
| 398 | + if self.model is not None and self.model_settings == get_default_model_settings(): |
| 399 | + self.model_settings = _initial_model_settings_for_model(self.model) |
405 | 400 |
|
406 | 401 | if not isinstance(self.input_guardrails, list): |
407 | 402 | raise TypeError( |
@@ -467,6 +462,12 @@ def clone(self, **kwargs: Any) -> Agent[TContext]: |
467 | 462 | new_agent = agent.clone(instructions="New instructions") |
468 | 463 | ``` |
469 | 464 | """ |
| 465 | + if ( |
| 466 | + "model" in kwargs |
| 467 | + and "model_settings" not in kwargs |
| 468 | + and _model_settings_match_implicit_model_defaults(self.model, self.model_settings) |
| 469 | + ): |
| 470 | + kwargs["model_settings"] = _initial_model_settings_for_model(kwargs["model"]) |
470 | 471 | return dataclasses.replace(self, **kwargs) |
471 | 472 |
|
472 | 473 | def as_tool( |
|
0 commit comments