Skip to content

Commit e1a8f1d

Browse files
feat: add file support for more LLM API Flavors (#425)
1 parent 1fb7438 commit e1a8f1d

34 files changed

Lines changed: 727 additions & 327 deletions

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[project]
22
name = "uipath-langchain"
3-
version = "0.4.17"
3+
version = "0.4.18"
44
description = "Python SDK that enables developers to build and deploy LangGraph agents to the UiPath Cloud Platform"
55
readme = { file = "README.md", content-type = "text/markdown" }
66
requires-python = ">=3.11"

src/uipath_langchain/agent/react/agent.py

Lines changed: 17 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,8 @@
88
from pydantic import BaseModel
99
from uipath.platform.guardrails import BaseGuardrail
1010

11+
from uipath_langchain.chat.types import UiPathPassthroughChatModel
12+
1113
from ..guardrails.actions import GuardrailAction
1214
from .guardrails.guardrails_subgraph import (
1315
create_agent_init_guardrails_subgraph,
@@ -33,6 +35,7 @@
3335
AgentGraphConfig,
3436
AgentGraphNode,
3537
AgentGraphState,
38+
AgentSettings,
3639
)
3740
from .utils import create_state_with_input
3841

@@ -62,6 +65,17 @@ def create_agent(
6265
"""
6366
from ..tools import create_tool_node
6467

68+
if not isinstance(model, UiPathPassthroughChatModel):
69+
raise TypeError(
70+
f"Model {type(model).__name__} does not implement UiPathPassthroughChatModel. "
71+
"The model must have llm_provider and api_flavor properties."
72+
)
73+
74+
agent_settings = AgentSettings(
75+
llm_provider=model.llm_provider,
76+
api_flavor=model.api_flavor,
77+
)
78+
6579
if config is None:
6680
config = AgentGraphConfig()
6781

@@ -71,7 +85,9 @@ def create_agent(
7185
)
7286
llm_tools: list[BaseTool] = [*agent_tools, *flow_control_tools]
7387

74-
init_node = create_init_node(messages, input_schema, config.is_conversational)
88+
init_node = create_init_node(
89+
messages, input_schema, config.is_conversational, agent_settings
90+
)
7591

7692
tool_nodes = create_tool_node(agent_tools)
7793
tool_nodes_with_guardrails = create_tools_guardrails_subgraph(

src/uipath_langchain/agent/react/file_type_handler.py

Lines changed: 0 additions & 210 deletions
This file was deleted.

src/uipath_langchain/agent/react/init_node.py

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -9,13 +9,15 @@
99
from .job_attachments import (
1010
get_job_attachments,
1111
)
12+
from .types import AgentSettings
1213

1314

1415
def create_init_node(
1516
messages: Sequence[SystemMessage | HumanMessage]
1617
| Callable[[Any], Sequence[SystemMessage | HumanMessage]],
1718
input_schema: type[BaseModel] | None,
1819
is_conversational: bool = False,
20+
agent_settings: AgentSettings | None = None,
1921
):
2022
def graph_state_init(state: Any) -> Any:
2123
resolved_messages: Sequence[SystemMessage | HumanMessage] | Overwrite
@@ -46,6 +48,7 @@ def graph_state_init(state: Any) -> Any:
4648
"messages": resolved_messages,
4749
"inner_state": {
4850
"job_attachments": job_attachments_dict,
51+
"agent_settings": agent_settings,
4952
},
5053
}
5154

src/uipath_langchain/agent/react/llm_node.py

Lines changed: 4 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
"""LLM node for ReAct Agent graph."""
22

3-
from typing import Literal, Sequence, TypeVar
3+
from typing import Sequence, TypeVar
44

55
from langchain_core.language_models import BaseChatModel
66
from langchain_core.messages import AIMessage, AnyMessage, ToolCall
@@ -14,6 +14,7 @@
1414
from uipath_langchain.agent.tools.structured_tool_with_argument_properties import (
1515
StructuredToolWithArgumentProperties,
1616
)
17+
from uipath_langchain.llm import get_payload_handler
1718

1819
from ..exceptions import AgentTerminationException
1920
from .constants import (
@@ -23,28 +24,6 @@
2324
from .types import FLOW_CONTROL_TOOLS, AgentGraphState
2425
from .utils import count_consecutive_thinking_messages, extract_input_data_from_state
2526

26-
OPENAI_COMPATIBLE_CHAT_MODELS = (
27-
"UiPathChatOpenAI",
28-
"AzureChatOpenAI",
29-
"ChatOpenAI",
30-
"UiPathChat",
31-
"UiPathAzureChatOpenAI",
32-
)
33-
34-
35-
def _get_required_tool_choice_by_model(
36-
model: BaseChatModel,
37-
) -> Literal["required", "any"]:
38-
"""Get the appropriate tool_choice value to enforce tool usage based on model type.
39-
40-
"required" - OpenAI compatible required tool_choice value
41-
"any" - Vertex and Bedrock parameter for required tool_choice value
42-
"""
43-
model_class_name = model.__class__.__name__
44-
if model_class_name in OPENAI_COMPATIBLE_CHAT_MODELS:
45-
return "required"
46-
return "any"
47-
4827

4928
def _filter_control_flow_tool_calls(
5029
tool_calls: list[ToolCall],
@@ -82,7 +61,8 @@ def create_llm_node(
8261
before enforcing tool usage. 0 = force tools every time.
8362
"""
8463
bindable_tools = list(tools) if tools else []
85-
tool_choice_required_value = _get_required_tool_choice_by_model(model)
64+
payload_handler = get_payload_handler(model)
65+
tool_choice_required_value = payload_handler.get_required_tool_choice()
8666

8767
async def llm_node(state: StateT):
8868
messages: list[AnyMessage] = state.messages

0 commit comments

Comments
 (0)