2525
2626_UNSET : Final [Any ] = object ()
2727DEFAULT_TIMEOUT_SECONDS : Final [float ] = 300.0
28+ DEFAULT_MAX_TOKENS : Final [int ] = 1000
29+ DEFAULT_TEMPERATURE : Final [float ] = 0.0
30+ DEFAULT_MAX_RETRIES : Final [int ] = 3
2831
2932
3033def get_chat_model (
@@ -36,10 +39,10 @@ def get_chat_model(
3639 vendor_type : VendorType | str | None = None ,
3740 api_flavor : ApiFlavor | str | None = None ,
3841 custom_class : type [UiPathBaseChatModel ] | None = None ,
39- temperature : float | None = _UNSET ,
40- max_tokens : int | None = _UNSET ,
42+ temperature : float | None = DEFAULT_TEMPERATURE ,
43+ max_tokens : int | None = DEFAULT_MAX_TOKENS ,
4144 timeout : float | None = DEFAULT_TIMEOUT_SECONDS ,
42- max_retries : int | None = _UNSET ,
45+ max_retries : int | None = DEFAULT_MAX_RETRIES ,
4346 callbacks : Callbacks = _UNSET ,
4447 # Legacy-only arguments
4548 agenthub_config : str | None = None ,
@@ -58,10 +61,14 @@ def get_chat_model(
5861 Converse). Auto-detected when omitted.
5962 custom_class: Custom ``UiPathBaseChatModel`` subclass to instantiate
6063 instead of the auto-detected one.
61- temperature: Sampling temperature. Forwarded only when explicitly set.
62- max_tokens: Maximum output tokens. Forwarded only when explicitly set.
64+ temperature: Sampling temperature. Defaults to 0.0. Pass ``None`` to
65+ omit the parameter when the underlying client supports it.
66+ max_tokens: Maximum output tokens. Defaults to 1000 to match the
67+ historical default from ``UiPathRequestMixin``. Pass ``None`` to
68+ forward an explicit unset value (lets the underlying client apply
69+ its own default or use no limit).
6370 timeout: Request timeout in seconds. Defaults to 300 seconds.
64- max_retries: Max retry count. Forwarded only when explicitly set .
71+ max_retries: Max retry count. Defaults to 3 .
6572 callbacks: LangChain callbacks (handlers or a manager) attached to the
6673 returned chat model. Accepts ``list[BaseCallbackHandler]`` or a
6774 ``BaseCallbackManager``. Forwarded only when explicitly set.
@@ -131,8 +138,8 @@ def _legacy_chat_model(
131138
132139 return _legacy_get_chat_model (
133140 model ,
134- temperature if temperature is not _UNSET and temperature is not None else 0.0 ,
135- max_tokens if max_tokens is not _UNSET and max_tokens is not None else 0 ,
141+ temperature ,
142+ max_tokens ,
136143 agenthub_config ,
137144 byo_connection_id ,
138145 ** kwargs ,
0 commit comments