Skip to content

Commit 1d16ea4

Browse files
cristipufuclaude
andcommitted
fix: pydantic-ai stream LLM tokens as UiPathConversationMessageEvent
Replace raw dict payloads with proper UiPathConversationMessageEvent objects following the START -> CHUNK(s) -> END lifecycle. Use node.stream() + stream_text(delta=True) for real-time token streaming to the Chat UI. - Stream individual LLM tokens via pydantic-ai's ModelRequestNode.stream() - Emit proper message lifecycle events (start, content chunks, end) - Add strong typing for node parameters (ModelRequestNode, CallToolsNode) - Simplify message schema (remove unused toolCalls/interrupts/citations) - Add 6 e2e streaming tests with mocked LLM - Bump version to 0.0.3 Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
1 parent 6c7914b commit 1d16ea4

7 files changed

Lines changed: 295 additions & 40 deletions

File tree

packages/uipath-pydantic-ai/pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[project]
22
name = "uipath-pydantic-ai"
3-
version = "0.0.2"
3+
version = "0.0.3"
44
description = "Python SDK that enables developers to build and deploy PydanticAI agents to the UiPath Cloud Platform"
55
readme = "README.md"
66
requires-python = ">=3.11"
Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,9 @@
1+
flowchart TB
2+
__start__(__start__)
3+
weather_agent(weather_agent)
4+
weather_agent_tools(tools)
5+
__end__(__end__)
6+
weather_agent --> weather_agent_tools
7+
weather_agent_tools --> weather_agent
8+
__start__ --> |input|weather_agent
9+
weather_agent --> |output|__end__

packages/uipath-pydantic-ai/src/uipath_pydantic_ai/runtime/runtime.py

Lines changed: 88 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,25 @@
11
"""Runtime class for executing PydanticAI Agents within the UiPath framework."""
22

33
import json
4+
from datetime import datetime, timezone
45
from typing import Any, AsyncGenerator
56
from uuid import uuid4
67

78
from pydantic import BaseModel
89
from pydantic_ai import Agent, FunctionToolset
10+
from pydantic_ai._agent_graph import CallToolsNode, ModelRequestNode
11+
from pydantic_ai.messages import ModelResponse, ToolReturnPart
12+
from uipath.core.chat.content import (
13+
UiPathConversationContentPartChunkEvent,
14+
UiPathConversationContentPartEndEvent,
15+
UiPathConversationContentPartEvent,
16+
UiPathConversationContentPartStartEvent,
17+
)
18+
from uipath.core.chat.message import (
19+
UiPathConversationMessageEndEvent,
20+
UiPathConversationMessageEvent,
21+
UiPathConversationMessageStartEvent,
22+
)
923
from uipath.core.serialization import serialize_json
1024
from uipath.runtime import (
1125
UiPathExecuteOptions,
@@ -88,18 +102,65 @@ async def stream(
88102
)
89103

90104
model_node = node
91-
node = await agent_run.next(node)
92-
93-
yield UiPathRuntimeMessageEvent(
94-
payload=json.loads(serialize_json(model_node.request)),
95-
metadata={"event_name": "model_request"},
96-
)
105+
message_id = str(uuid4())
106+
content_part_id = f"chunk-{message_id}-0"
107+
has_text = False
108+
109+
async with model_node.stream(agent_run.ctx) as stream:
110+
async for text_chunk in stream.stream_text(
111+
delta=True, debounce_by=None
112+
):
113+
if not has_text:
114+
has_text = True
115+
yield UiPathRuntimeMessageEvent(
116+
payload=UiPathConversationMessageEvent(
117+
message_id=message_id,
118+
start=UiPathConversationMessageStartEvent(
119+
role="assistant",
120+
timestamp=self._get_timestamp(),
121+
),
122+
content_part=UiPathConversationContentPartEvent(
123+
content_part_id=content_part_id,
124+
start=UiPathConversationContentPartStartEvent(
125+
mime_type="text/plain",
126+
),
127+
),
128+
),
129+
)
130+
131+
yield UiPathRuntimeMessageEvent(
132+
payload=UiPathConversationMessageEvent(
133+
message_id=message_id,
134+
content_part=UiPathConversationContentPartEvent(
135+
content_part_id=content_part_id,
136+
chunk=UiPathConversationContentPartChunkEvent(
137+
data=text_chunk,
138+
),
139+
),
140+
),
141+
)
142+
143+
next_node = await agent_run.next(model_node)
144+
145+
if has_text:
146+
yield UiPathRuntimeMessageEvent(
147+
payload=UiPathConversationMessageEvent(
148+
message_id=message_id,
149+
end=UiPathConversationMessageEndEvent(),
150+
content_part=UiPathConversationContentPartEvent(
151+
content_part_id=content_part_id,
152+
end=UiPathConversationContentPartEndEvent(),
153+
),
154+
),
155+
)
97156

157+
assert isinstance(next_node, CallToolsNode)
98158
yield UiPathRuntimeStateEvent(
99-
payload=self._model_response_payload(node),
159+
payload=self._model_response_payload(next_node),
100160
node_name=agent_name,
101161
phase=UiPathRuntimeStatePhase.COMPLETED,
102162
)
163+
node = next_node
103164

104165
elif Agent.is_call_tools_node(node):
105166
tool_calls = node.model_response.tool_calls if has_tools else []
@@ -115,14 +176,15 @@ async def stream(
115176
phase=UiPathRuntimeStatePhase.STARTED,
116177
)
117178

118-
node = await agent_run.next(node)
179+
next_node = await agent_run.next(node)
119180

120-
if tool_calls:
181+
if tool_calls and isinstance(next_node, ModelRequestNode):
121182
yield UiPathRuntimeStateEvent(
122-
payload=self._tool_results_payload(node),
183+
payload=self._tool_results_payload(next_node),
123184
node_name=tools_node_name,
124185
phase=UiPathRuntimeStatePhase.COMPLETED,
125186
)
187+
node = next_node
126188

127189
else:
128190
node = await agent_run.next(node)
@@ -136,7 +198,15 @@ async def stream(
136198
raise self._create_runtime_error(e) from e
137199

138200
@staticmethod
139-
def _model_request_payload(node: Any) -> dict[str, Any]:
201+
def _get_timestamp() -> str:
202+
"""Get current UTC timestamp in ISO 8601 format."""
203+
now = datetime.now(timezone.utc)
204+
return now.strftime("%Y-%m-%dT%H:%M:%S.") + f"{now.microsecond // 1000:03d}Z"
205+
206+
@staticmethod
207+
def _model_request_payload(
208+
node: ModelRequestNode[Any, Any],
209+
) -> dict[str, Any]:
140210
"""Build payload for a ModelRequestNode STARTED event."""
141211
payload: dict[str, Any] = {}
142212
try:
@@ -153,15 +223,17 @@ def _model_request_payload(node: Any) -> dict[str, Any]:
153223
return payload
154224

155225
@staticmethod
156-
def _model_response_payload(next_node: Any) -> dict[str, Any]:
226+
def _model_response_payload(
227+
next_node: CallToolsNode[Any, Any],
228+
) -> dict[str, Any]:
157229
"""Build payload for a ModelRequestNode COMPLETED event.
158230
159231
After agent_run.next() the returned node is the CallToolsNode
160232
which carries the model_response with usage data.
161233
"""
162234
payload: dict[str, Any] = {}
163235
try:
164-
response = next_node.model_response
236+
response: ModelResponse = next_node.model_response
165237
if response.model_name:
166238
payload["model_name"] = response.model_name
167239
usage = response.usage
@@ -175,14 +247,14 @@ def _model_response_payload(next_node: Any) -> dict[str, Any]:
175247
return payload
176248

177249
@staticmethod
178-
def _tool_results_payload(next_node: Any) -> dict[str, Any]:
250+
def _tool_results_payload(
251+
next_node: ModelRequestNode[Any, Any],
252+
) -> dict[str, Any]:
179253
"""Build payload for a CallToolsNode COMPLETED event.
180254
181255
After agent_run.next() the returned node is a ModelRequestNode
182256
whose request.parts contain ToolReturnPart objects with results.
183257
"""
184-
from pydantic_ai.messages import ToolReturnPart
185-
186258
payload: dict[str, Any] = {}
187259
try:
188260
parts = next_node.request.parts if next_node.request else []

packages/uipath-pydantic-ai/src/uipath_pydantic_ai/runtime/schema.py

Lines changed: 2 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -286,17 +286,11 @@ def _conversation_message_item_schema() -> dict[str, Any]:
286286
"inline": {},
287287
},
288288
"required": ["inline"],
289-
},
290-
"citations": {
291-
"type": "array",
292-
"items": {"type": "object"},
293-
},
289+
}
294290
},
295291
"required": ["data"],
296292
},
297-
},
298-
"toolCalls": {"type": "array", "items": {"type": "object"}},
299-
"interrupts": {"type": "array", "items": {"type": "object"}},
293+
}
300294
},
301295
"required": ["role", "contentParts"],
302296
}

0 commit comments

Comments
 (0)