Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[project]
name = "uipath-langchain"
version = "0.10.18"
version = "0.10.19"
description = "Python SDK that enables developers to build and deploy LangGraph agents to the UiPath Cloud Platform"
readme = { file = "README.md", content-type = "text/markdown" }
requires-python = ">=3.11"
Expand Down
14 changes: 12 additions & 2 deletions src/uipath_langchain/chat/chat_model_factory.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,8 +73,10 @@ def get_chat_model(
returned chat model. Accepts ``list[BaseCallbackHandler]`` or a
``BaseCallbackManager``. Forwarded only when explicitly set.
Ignored by the legacy factory.
agenthub_config: AgentHub config header value. Required by the legacy
factory; ignored by the new factory.
agenthub_config: AgentHub config header value. Threaded onto
``client_settings.agenthub_config`` on the new path (PlatformSettings
otherwise defaults to ``"agentsruntime"``); required by the legacy
factory.
use_new_llm_clients: Routes to the new ``uipath_langchain_client``
factory when True (default). When False, routes to the legacy
in-repo clients.
Expand All @@ -95,6 +97,14 @@ def get_chat_model(
**kwargs,
)

if agenthub_config is not None:
if client_settings is None:
from uipath_langchain_client.settings import get_default_client_settings

client_settings = get_default_client_settings()
if hasattr(client_settings, "agenthub_config"):
client_settings.agenthub_config = agenthub_config

optional_kwargs = {
k: v
for k, v in {
Expand Down
123 changes: 123 additions & 0 deletions tests/chat/test_chat_model_factory_agenthub.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,123 @@
"""Tests for chat_model_factory.get_chat_model agenthub_config plumbing.

The new-LLM-clients path must thread agenthub_config onto client_settings
before delegating to the upstream factory. PlatformSettings.agenthub_config
defaults to "agentsruntime", so without explicit threading, the carefully
computed value at the call site never reaches the wire.
"""

from types import SimpleNamespace

import pytest

from uipath_langchain.chat import chat_model_factory

_FAKE_JWT = (
"eyJhbGciOiAiSFMyNTYiLCAidHlwIjogIkpXVCJ9."
"eyJzdWIiOiAidGVzdCIsICJpc3MiOiAidGVzdCJ9."
"signature"
)


@pytest.fixture(autouse=True)
def _platform_env(monkeypatch):
monkeypatch.setenv("UIPATH_ACCESS_TOKEN", _FAKE_JWT)
monkeypatch.setenv("UIPATH_URL", "https://example.com/org/tenant/orchestrator_/")
monkeypatch.setenv("UIPATH_TENANT_ID", "tenant")
monkeypatch.setenv("UIPATH_ORGANIZATION_ID", "org")
monkeypatch.delenv("UIPATH_AGENTHUB_CONFIG", raising=False)


class _Sentinel:
"""Lightweight stand-in for the chat model returned by the upstream factory."""


def _stub_factory(monkeypatch):
"""Replace the upstream factory with a capturing stub.

Returns a dict that will receive the kwargs the factory was called with.
"""
captured: dict = {}

def fake_factory(model, **kwargs):
captured["model"] = model
captured.update(kwargs)
return _Sentinel()

monkeypatch.setattr(chat_model_factory, "get_chat_model_factory", fake_factory)
return captured


def test_new_path_threads_agenthub_config_into_client_settings(monkeypatch):
captured = _stub_factory(monkeypatch)

chat_model_factory.get_chat_model(
"gpt-4.1-mini-2025-04-14",
agenthub_config="agentsplayground",
use_new_llm_clients=True,
)

settings = captured["client_settings"]
assert settings is not None, "factory must receive a non-None client_settings"
assert settings.agenthub_config == "agentsplayground"


def test_new_path_passes_through_when_agenthub_config_is_none(monkeypatch):
captured = _stub_factory(monkeypatch)

chat_model_factory.get_chat_model(
"gpt-4.1-mini-2025-04-14",
agenthub_config=None,
use_new_llm_clients=True,
)

# Without explicit agenthub_config, the factory must not synthesize a
# client_settings — leave it for the upstream factory to default and let
# the existing env-var / mixin paths govern.
assert captured["client_settings"] is None


def test_new_path_mutates_caller_supplied_client_settings(monkeypatch):
captured = _stub_factory(monkeypatch)

caller_settings = SimpleNamespace(
agenthub_config="agentsruntime",
other_field="preserved",
)

chat_model_factory.get_chat_model(
"gpt-4.1-mini-2025-04-14",
client_settings=caller_settings, # type: ignore[arg-type]
agenthub_config="agentsplayground",
use_new_llm_clients=True,
)

forwarded = captured["client_settings"]
assert forwarded is caller_settings, (
"caller's settings instance must be preserved, not replaced"
)
assert forwarded.agenthub_config == "agentsplayground"
assert forwarded.other_field == "preserved"


def test_legacy_path_forwards_agenthub_config(monkeypatch):
import uipath_langchain.chat._legacy.chat_model_factory as legacy_module

captured: dict = {}

def fake_legacy_factory(
model, temperature, max_tokens, agenthub_config, byo_connection_id, **kwargs
):
captured["model"] = model
captured["agenthub_config"] = agenthub_config
return _Sentinel()

monkeypatch.setattr(legacy_module, "get_chat_model", fake_legacy_factory)

chat_model_factory.get_chat_model(
"gpt-4.1-mini-2025-04-14",
agenthub_config="agentsplayground",
use_new_llm_clients=False,
)

assert captured["agenthub_config"] == "agentsplayground"
2 changes: 1 addition & 1 deletion uv.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Loading