Skip to content

Commit acde588

Browse files
feat(agent): implement llm_messages_limit handling at llm_node (#435)
1 parent ba961da commit acde588

7 files changed

Lines changed: 39 additions & 18 deletions

File tree

pyproject.toml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,11 @@
11
[project]
22
name = "uipath-langchain"
3-
version = "0.4.11"
3+
version = "0.4.12"
44
description = "Python SDK that enables developers to build and deploy LangGraph agents to the UiPath Cloud Platform"
55
readme = { file = "README.md", content-type = "text/markdown" }
66
requires-python = ">=3.11"
77
dependencies = [
8-
"uipath>=2.5.10,<2.6.0",
8+
"uipath>=2.5.15,<2.6.0",
99
"uipath-runtime>=0.5.1,<0.6.0",
1010
"langgraph>=1.0.0, <2.0.0",
1111
"langchain-core>=1.2.5, <2.0.0",

src/uipath_langchain/agent/exceptions/exceptions.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
"""Exceptions for the basic agent loop."""
1+
"""Exceptions for the agent graph."""
22

33
from uipath.runtime.errors import UiPathRuntimeError
44

src/uipath_langchain/agent/react/agent.py

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
1-
import os
21
from typing import Callable, Sequence, Type, TypeVar
32

43
from langchain_core.language_models import BaseChatModel
@@ -66,8 +65,6 @@ def create_agent(
6665
if config is None:
6766
config = AgentGraphConfig()
6867

69-
os.environ["LANGCHAIN_RECURSION_LIMIT"] = str(config.recursion_limit)
70-
7168
agent_tools = list(tools)
7269
flow_control_tools: list[BaseTool] = (
7370
[] if config.is_conversational else create_flow_control_tools(output_schema)
@@ -109,7 +106,11 @@ def create_agent(
109106
builder.add_edge(START, AgentGraphNode.INIT)
110107

111108
llm_node = create_llm_node(
112-
model, llm_tools, config.thinking_messages_limit, config.is_conversational
109+
model,
110+
llm_tools,
111+
config.is_conversational,
112+
config.llm_messages_limit,
113+
config.thinking_messages_limit,
113114
)
114115
llm_with_guardrails_subgraph = create_llm_guardrails_subgraph(
115116
(AgentGraphNode.LLM, llm_node), guardrails, input_schema=input_schema
Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1 +1,2 @@
1-
MAX_CONSECUTIVE_THINKING_MESSAGES = 0
1+
DEFAULT_MAX_CONSECUTIVE_THINKING_MESSAGES = 0
2+
DEFAULT_MAX_LLM_MESSAGES = 25

src/uipath_langchain/agent/react/llm_node.py

Lines changed: 19 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,8 +5,13 @@
55
from langchain_core.language_models import BaseChatModel
66
from langchain_core.messages import AIMessage, AnyMessage, ToolCall
77
from langchain_core.tools import BaseTool
8+
from uipath.runtime.errors import UiPathErrorCategory, UiPathErrorCode
89

9-
from .constants import MAX_CONSECUTIVE_THINKING_MESSAGES
10+
from ..exceptions import AgentTerminationException
11+
from .constants import (
12+
DEFAULT_MAX_CONSECUTIVE_THINKING_MESSAGES,
13+
DEFAULT_MAX_LLM_MESSAGES,
14+
)
1015
from .types import FLOW_CONTROL_TOOLS, AgentGraphState
1116
from .utils import count_consecutive_thinking_messages
1217

@@ -46,8 +51,9 @@ def _filter_control_flow_tool_calls(
4651
def create_llm_node(
4752
model: BaseChatModel,
4853
tools: Sequence[BaseTool] | None = None,
49-
thinking_messages_limit: int = MAX_CONSECUTIVE_THINKING_MESSAGES,
5054
is_conversational: bool = False,
55+
llm_messages_limit: int = DEFAULT_MAX_LLM_MESSAGES,
56+
thinking_messages_limit: int = DEFAULT_MAX_CONSECUTIVE_THINKING_MESSAGES,
5157
):
5258
"""Create LLM node with dynamic tool_choice enforcement.
5359
@@ -57,6 +63,8 @@ def create_llm_node(
5763
Args:
5864
model: The chat model to use
5965
tools: Available tools to bind
66+
is_conversational: Whether this is a conversational agent
67+
llm_messages_limit: Maximum number of LLM calls allowed per execution
6068
thinking_messages_limit: Max consecutive LLM responses without tool calls
6169
before enforcing tool usage. 0 = force tools every time.
6270
"""
@@ -67,6 +75,15 @@ def create_llm_node(
6775
async def llm_node(state: AgentGraphState):
6876
messages: list[AnyMessage] = state.messages
6977

78+
agent_ai_messages = sum(1 for msg in messages if isinstance(msg, AIMessage))
79+
if agent_ai_messages >= llm_messages_limit:
80+
raise AgentTerminationException(
81+
code=UiPathErrorCode.EXECUTION_ERROR,
82+
title=f"Maximum iterations of '{llm_messages_limit}' reached.",
83+
detail="Verify the agent's trajectory or consider increasing the max iterations in the agent's settings.",
84+
category=UiPathErrorCategory.USER,
85+
)
86+
7087
consecutive_thinking_messages = count_consecutive_thinking_messages(messages)
7188

7289
if (

src/uipath_langchain/agent/react/types.py

Lines changed: 5 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -64,13 +64,15 @@ class AgentGraphNode(StrEnum):
6464

6565

6666
class AgentGraphConfig(BaseModel):
67-
recursion_limit: int = Field(
68-
default=50, ge=1, description="Maximum recursion limit for the agent graph"
67+
llm_messages_limit: int = Field(
68+
default=25,
69+
ge=1,
70+
description="Maximum number of LLM calls allowed per agent execution",
6971
)
7072
thinking_messages_limit: int = Field(
7173
default=0,
7274
ge=0,
73-
description="Max consecutive thinking messages before enforcing tool usage. 0 = force tools every time.",
75+
description="Max consecutive thinking messages before enforcing tool calling. 0 = force tool calling every time.",
7476
)
7577
is_conversational: bool = Field(
7678
default=False, description="If set, creates a graph for conversational agents"

uv.lock

Lines changed: 5 additions & 5 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

0 commit comments

Comments
 (0)