Skip to content

Commit bbf2e25

Browse files
cosminachoclaude
andauthored
feat: chat model factory defaults + omit strict_mode=False from payload (#808)
Co-authored-by: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
1 parent 5e8b146 commit bbf2e25

8 files changed

Lines changed: 32 additions & 23 deletions

File tree

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[project]
22
name = "uipath-langchain"
3-
version = "0.10.4"
3+
version = "0.10.5"
44
description = "Python SDK that enables developers to build and deploy LangGraph agents to the UiPath Cloud Platform"
55
readme = { file = "README.md", content-type = "text/markdown" }
66
requires-python = ">=3.11"

src/uipath_langchain/chat/_legacy/chat_model_factory.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,7 @@ def _create_openai_llm(
4040
model: str,
4141
api_flavor: APIFlavor,
4242
temperature: float | None,
43-
max_tokens: int,
43+
max_tokens: int | None,
4444
agenthub_config: str,
4545
byo_connection_id: str | None = None,
4646
**kwargs: Any,
@@ -87,7 +87,7 @@ def _create_bedrock_llm(
8787
model: str,
8888
api_flavor: APIFlavor,
8989
temperature: float | None,
90-
max_tokens: int,
90+
max_tokens: int | None,
9191
agenthub_config: str,
9292
byo_connection_id: str | None = None,
9393
**kwargs: Any,
@@ -244,8 +244,8 @@ def _get_model_info(
244244

245245
def get_chat_model(
246246
model: str,
247-
temperature: float,
248-
max_tokens: int,
247+
temperature: float | None,
248+
max_tokens: int | None,
249249
agenthub_config: str,
250250
byo_connection_id: str | None = None,
251251
**kwargs: Any,

src/uipath_langchain/chat/chat_model_factory.py

Lines changed: 15 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,9 @@
2525

2626
_UNSET: Final[Any] = object()
2727
DEFAULT_TIMEOUT_SECONDS: Final[float] = 300.0
28+
DEFAULT_MAX_TOKENS: Final[int] = 1000
29+
DEFAULT_TEMPERATURE: Final[float] = 0.0
30+
DEFAULT_MAX_RETRIES: Final[int] = 3
2831

2932

3033
def get_chat_model(
@@ -36,10 +39,10 @@ def get_chat_model(
3639
vendor_type: VendorType | str | None = None,
3740
api_flavor: ApiFlavor | str | None = None,
3841
custom_class: type[UiPathBaseChatModel] | None = None,
39-
temperature: float | None = _UNSET,
40-
max_tokens: int | None = _UNSET,
42+
temperature: float | None = DEFAULT_TEMPERATURE,
43+
max_tokens: int | None = DEFAULT_MAX_TOKENS,
4144
timeout: float | None = DEFAULT_TIMEOUT_SECONDS,
42-
max_retries: int | None = _UNSET,
45+
max_retries: int | None = DEFAULT_MAX_RETRIES,
4346
callbacks: Callbacks = _UNSET,
4447
# Legacy-only arguments
4548
agenthub_config: str | None = None,
@@ -58,10 +61,14 @@ def get_chat_model(
5861
Converse). Auto-detected when omitted.
5962
custom_class: Custom ``UiPathBaseChatModel`` subclass to instantiate
6063
instead of the auto-detected one.
61-
temperature: Sampling temperature. Forwarded only when explicitly set.
62-
max_tokens: Maximum output tokens. Forwarded only when explicitly set.
64+
temperature: Sampling temperature. Defaults to 0.0. Pass ``None`` to
65+
omit the parameter when the underlying client supports it.
66+
max_tokens: Maximum output tokens. Defaults to 1000 to match the
67+
historical default from ``UiPathRequestMixin``. Pass ``None`` to
68+
forward an explicit unset value (lets the underlying client apply
69+
its own default or use no limit).
6370
timeout: Request timeout in seconds. Defaults to 300 seconds.
64-
max_retries: Max retry count. Forwarded only when explicitly set.
71+
max_retries: Max retry count. Defaults to 3.
6572
callbacks: LangChain callbacks (handlers or a manager) attached to the
6673
returned chat model. Accepts ``list[BaseCallbackHandler]`` or a
6774
``BaseCallbackManager``. Forwarded only when explicitly set.
@@ -131,8 +138,8 @@ def _legacy_chat_model(
131138

132139
return _legacy_get_chat_model(
133140
model,
134-
temperature if temperature is not _UNSET and temperature is not None else 0.0,
135-
max_tokens if max_tokens is not _UNSET and max_tokens is not None else 0,
141+
temperature,
142+
max_tokens,
136143
agenthub_config,
137144
byo_connection_id,
138145
**kwargs,

src/uipath_langchain/chat/handlers/anthropic.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -48,8 +48,8 @@ def get_tool_binding_kwargs(
4848
kwargs: dict[str, Any] = {"tool_choice": tool_choice}
4949
if parallel_tool_calls is not None:
5050
kwargs["parallel_tool_calls"] = parallel_tool_calls
51-
if strict_mode is not None:
52-
kwargs["strict"] = strict_mode
51+
if strict_mode is True:
52+
kwargs["strict"] = True
5353
return kwargs
5454

5555
def check_stop_reason(self, response: AIMessage) -> None:

src/uipath_langchain/chat/handlers/fireworks.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -18,8 +18,8 @@ def get_tool_binding_kwargs(
1818
strict_mode: bool | None = None,
1919
) -> dict[str, Any]:
2020
kwargs: dict[str, Any] = {"tool_choice": tool_choice}
21-
if strict_mode is not None:
22-
kwargs["strict"] = strict_mode
21+
if strict_mode is True:
22+
kwargs["strict"] = True
2323
return kwargs
2424

2525
def check_stop_reason(self, response: Any) -> None:

src/uipath_langchain/chat/handlers/openai.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -73,8 +73,8 @@ def get_tool_binding_kwargs(
7373
kwargs: dict[str, Any] = {"tool_choice": tool_choice}
7474
if parallel_tool_calls is not None:
7575
kwargs["parallel_tool_calls"] = parallel_tool_calls
76-
if strict_mode is not None:
77-
kwargs["strict"] = strict_mode
76+
if strict_mode is True:
77+
kwargs["strict"] = True
7878
return kwargs
7979

8080
def check_stop_reason(self, response: AIMessage) -> None:

tests/chat/handlers/test_tool_binding_kwargs.py

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -103,10 +103,11 @@ def test_strict_mode_true(self):
103103
assert result["strict"] is True
104104

105105
def test_strict_mode_false(self):
106+
"""strict_mode=False must be omitted from the request payload."""
106107
result = self.handler.get_tool_binding_kwargs(
107108
tools=self.tools, tool_choice="auto", strict_mode=False
108109
)
109-
assert result["strict"] is False
110+
assert "strict" not in result
110111

111112
def test_all_keys_present(self):
112113
result = self.handler.get_tool_binding_kwargs(
@@ -161,17 +162,18 @@ def test_strict_mode_true(self):
161162
assert result["strict"] is True
162163

163164
def test_strict_mode_false(self):
165+
"""strict_mode=False must be omitted from the request payload."""
164166
result = self.handler.get_tool_binding_kwargs(
165167
tools=self.tools, tool_choice="auto", strict_mode=False
166168
)
167-
assert result["strict"] is False
169+
assert "strict" not in result
168170

169171
def test_all_keys_present(self):
170172
result = self.handler.get_tool_binding_kwargs(
171173
tools=self.tools,
172174
tool_choice="any",
173175
parallel_tool_calls=True,
174-
strict_mode=False,
176+
strict_mode=True,
175177
)
176178
assert set(result.keys()) == {"tool_choice", "parallel_tool_calls", "strict"}
177179

uv.lock

Lines changed: 1 addition & 1 deletion
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

0 commit comments

Comments
 (0)