Skip to content

Commit 3df583d

Browse files
fix: thread agenthub_config through chat_model_factory new path
PlatformSettings.agenthub_config defaults to "agentsruntime" upstream. The local _AgentHubConfigDefaultMixin clears this default for direct chat-model construction (UiPathChat, UiPathChatOpenAI, etc.), but the factory path used by uipath-agents bypassed the mitigation entirely: get_chat_model accepted agenthub_config but its docstring said it was "ignored by the new factory" — and indeed it was, dropped on the floor before delegating to the upstream factory. Result: agents running with command="debug" computed agenthub_config="agentsplayground" via get_agenthub_config(...) and passed it to get_chat_model, but every LLM gateway request shipped X-UiPath-AgentHub-Config: agentsruntime — billing the runtime quota instead of the developer debug quota. Fix: when agenthub_config is provided on the new path, build a client_settings (via get_default_client_settings) if absent and set client_settings.agenthub_config before delegating. Caller-supplied client_settings instances are mutated rather than replaced so other fields survive. Pass-through preserved when agenthub_config is None to leave existing env-var / direct-construction paths governed by the mixin. Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
1 parent 8475f84 commit 3df583d

4 files changed

Lines changed: 137 additions & 4 deletions

File tree

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[project]
22
name = "uipath-langchain"
3-
version = "0.10.18"
3+
version = "0.10.19"
44
description = "Python SDK that enables developers to build and deploy LangGraph agents to the UiPath Cloud Platform"
55
readme = { file = "README.md", content-type = "text/markdown" }
66
requires-python = ">=3.11"

src/uipath_langchain/chat/chat_model_factory.py

Lines changed: 12 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -73,8 +73,10 @@ def get_chat_model(
7373
returned chat model. Accepts ``list[BaseCallbackHandler]`` or a
7474
``BaseCallbackManager``. Forwarded only when explicitly set.
7575
Ignored by the legacy factory.
76-
agenthub_config: AgentHub config header value. Required by the legacy
77-
factory; ignored by the new factory.
76+
agenthub_config: AgentHub config header value. Threaded onto
77+
``client_settings.agenthub_config`` on the new path (PlatformSettings
78+
otherwise defaults to ``"agentsruntime"``); required by the legacy
79+
factory.
7880
use_new_llm_clients: Routes to the new ``uipath_langchain_client``
7981
factory when True (default). When False, routes to the legacy
8082
in-repo clients.
@@ -95,6 +97,14 @@ def get_chat_model(
9597
**kwargs,
9698
)
9799

100+
if agenthub_config is not None:
101+
if client_settings is None:
102+
from uipath_langchain_client.settings import get_default_client_settings
103+
104+
client_settings = get_default_client_settings()
105+
if hasattr(client_settings, "agenthub_config"):
106+
client_settings.agenthub_config = agenthub_config
107+
98108
optional_kwargs = {
99109
k: v
100110
for k, v in {
Lines changed: 123 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,123 @@
1+
"""Tests for chat_model_factory.get_chat_model agenthub_config plumbing.
2+
3+
The new-LLM-clients path must thread agenthub_config onto client_settings
4+
before delegating to the upstream factory. PlatformSettings.agenthub_config
5+
defaults to "agentsruntime", so without explicit threading, the carefully
6+
computed value at the call site never reaches the wire.
7+
"""
8+
9+
from types import SimpleNamespace
10+
11+
import pytest
12+
13+
from uipath_langchain.chat import chat_model_factory
14+
15+
_FAKE_JWT = (
16+
"eyJhbGciOiAiSFMyNTYiLCAidHlwIjogIkpXVCJ9."
17+
"eyJzdWIiOiAidGVzdCIsICJpc3MiOiAidGVzdCJ9."
18+
"signature"
19+
)
20+
21+
22+
@pytest.fixture(autouse=True)
23+
def _platform_env(monkeypatch):
24+
monkeypatch.setenv("UIPATH_ACCESS_TOKEN", _FAKE_JWT)
25+
monkeypatch.setenv("UIPATH_URL", "https://example.com/org/tenant/orchestrator_/")
26+
monkeypatch.setenv("UIPATH_TENANT_ID", "tenant")
27+
monkeypatch.setenv("UIPATH_ORGANIZATION_ID", "org")
28+
monkeypatch.delenv("UIPATH_AGENTHUB_CONFIG", raising=False)
29+
30+
31+
class _Sentinel:
32+
"""Lightweight stand-in for the chat model returned by the upstream factory."""
33+
34+
35+
def _stub_factory(monkeypatch):
36+
"""Replace the upstream factory with a capturing stub.
37+
38+
Returns a dict that will receive the kwargs the factory was called with.
39+
"""
40+
captured: dict = {}
41+
42+
def fake_factory(model, **kwargs):
43+
captured["model"] = model
44+
captured.update(kwargs)
45+
return _Sentinel()
46+
47+
monkeypatch.setattr(chat_model_factory, "get_chat_model_factory", fake_factory)
48+
return captured
49+
50+
51+
def test_new_path_threads_agenthub_config_into_client_settings(monkeypatch):
52+
captured = _stub_factory(monkeypatch)
53+
54+
chat_model_factory.get_chat_model(
55+
"gpt-4.1-mini-2025-04-14",
56+
agenthub_config="agentsplayground",
57+
use_new_llm_clients=True,
58+
)
59+
60+
settings = captured["client_settings"]
61+
assert settings is not None, "factory must receive a non-None client_settings"
62+
assert settings.agenthub_config == "agentsplayground"
63+
64+
65+
def test_new_path_passes_through_when_agenthub_config_is_none(monkeypatch):
66+
captured = _stub_factory(monkeypatch)
67+
68+
chat_model_factory.get_chat_model(
69+
"gpt-4.1-mini-2025-04-14",
70+
agenthub_config=None,
71+
use_new_llm_clients=True,
72+
)
73+
74+
# Without explicit agenthub_config, the factory must not synthesize a
75+
# client_settings — leave it for the upstream factory to default and let
76+
# the existing env-var / mixin paths govern.
77+
assert captured["client_settings"] is None
78+
79+
80+
def test_new_path_mutates_caller_supplied_client_settings(monkeypatch):
81+
captured = _stub_factory(monkeypatch)
82+
83+
caller_settings = SimpleNamespace(
84+
agenthub_config="agentsruntime",
85+
other_field="preserved",
86+
)
87+
88+
chat_model_factory.get_chat_model(
89+
"gpt-4.1-mini-2025-04-14",
90+
client_settings=caller_settings, # type: ignore[arg-type]
91+
agenthub_config="agentsplayground",
92+
use_new_llm_clients=True,
93+
)
94+
95+
forwarded = captured["client_settings"]
96+
assert forwarded is caller_settings, (
97+
"caller's settings instance must be preserved, not replaced"
98+
)
99+
assert forwarded.agenthub_config == "agentsplayground"
100+
assert forwarded.other_field == "preserved"
101+
102+
103+
def test_legacy_path_forwards_agenthub_config(monkeypatch):
104+
import uipath_langchain.chat._legacy.chat_model_factory as legacy_module
105+
106+
captured: dict = {}
107+
108+
def fake_legacy_factory(
109+
model, temperature, max_tokens, agenthub_config, byo_connection_id, **kwargs
110+
):
111+
captured["model"] = model
112+
captured["agenthub_config"] = agenthub_config
113+
return _Sentinel()
114+
115+
monkeypatch.setattr(legacy_module, "get_chat_model", fake_legacy_factory)
116+
117+
chat_model_factory.get_chat_model(
118+
"gpt-4.1-mini-2025-04-14",
119+
agenthub_config="agentsplayground",
120+
use_new_llm_clients=False,
121+
)
122+
123+
assert captured["agenthub_config"] == "agentsplayground"

uv.lock

Lines changed: 1 addition & 1 deletion
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

0 commit comments

Comments
 (0)