Skip to content

Commit 25eac41

Browse files
Merge branch 'main' into fix_llm_guardrails_check
2 parents efba4e2 + 0dce3d2 commit 25eac41

File tree

6 files changed

+218
-17
lines changed

6 files changed

+218
-17
lines changed

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,7 @@ description = "Python SDK that enables developers to build and deploy LangGraph
55
readme = { file = "README.md", content-type = "text/markdown" }
66
requires-python = ">=3.11"
77
dependencies = [
8-
"uipath>=2.2.35, <2.3.0",
8+
"uipath>=2.2.41, <2.3.0",
99
"langgraph>=1.0.0, <2.0.0",
1010
"langchain-core>=1.0.0, <2.0.0",
1111
"aiosqlite==0.21.0",

src/uipath_langchain/agent/guardrails/guardrails_factory.py

Lines changed: 12 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,7 @@
1717
AgentUnknownGuardrail,
1818
AgentWordOperator,
1919
AgentWordRule,
20+
StandardRecipient,
2021
)
2122
from uipath.core.guardrails import (
2223
BooleanRule,
@@ -273,17 +274,18 @@ def build_guardrails_with_actions(
273274
)
274275
)
275276
elif isinstance(action, AgentGuardrailEscalateAction):
276-
result.append(
277-
(
278-
converted_guardrail,
279-
EscalateAction(
280-
app_name=action.app.name,
281-
app_folder_path=action.app.folder_name,
282-
version=action.app.version,
283-
assignee=action.recipient.value,
284-
),
277+
if isinstance(action.recipient, StandardRecipient):
278+
result.append(
279+
(
280+
converted_guardrail,
281+
EscalateAction(
282+
app_name=action.app.name,
283+
app_folder_path=action.app.folder_name,
284+
version=action.app.version,
285+
assignee=action.recipient.value,
286+
),
287+
)
285288
)
286-
)
287289
elif isinstance(action, AgentGuardrailFilterAction):
288290
result.append((converted_guardrail, FilterAction(fields=action.fields)))
289291
return result
Lines changed: 123 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,123 @@
1+
import base64
2+
from enum import StrEnum
3+
from typing import Any
4+
5+
import httpx
6+
from uipath._utils._ssl_context import get_httpx_client_kwargs
7+
8+
IMAGE_MIME_TYPES: set[str] = {
9+
"image/png",
10+
"image/jpeg",
11+
"image/gif",
12+
"image/webp",
13+
}
14+
15+
16+
class LlmProvider(StrEnum):
17+
OPENAI = "openai"
18+
BEDROCK = "bedrock"
19+
VERTEX = "vertex"
20+
UNKNOWN = "unknown"
21+
22+
23+
def is_pdf(mime_type: str) -> bool:
24+
"""Check if the MIME type represents a PDF document."""
25+
return mime_type.lower() == "application/pdf"
26+
27+
28+
def is_image(mime_type: str) -> bool:
29+
"""Check if the MIME type represents a supported image format (PNG, JPEG, GIF, WebP)."""
30+
return mime_type.lower() in IMAGE_MIME_TYPES
31+
32+
33+
def detect_provider(model_name: str) -> LlmProvider:
34+
"""Detect the LLM provider (Bedrock, OpenAI, or Vertex) based on the model name."""
35+
if not model_name:
36+
raise ValueError(f"Unsupported model: {model_name}")
37+
38+
model_lower = model_name.lower()
39+
40+
if "anthropic" in model_lower or "claude" in model_lower:
41+
return LlmProvider.BEDROCK
42+
43+
if "gpt" in model_lower:
44+
return LlmProvider.OPENAI
45+
46+
if "gemini" in model_lower:
47+
return LlmProvider.VERTEX
48+
49+
raise ValueError(f"Unsupported model: {model_name}")
50+
51+
52+
async def _download_file(url: str) -> str:
53+
"""Download a file from a URL and return its content as a base64 string."""
54+
async with httpx.AsyncClient(**get_httpx_client_kwargs()) as client:
55+
response = await client.get(url)
56+
response.raise_for_status()
57+
file_content = response.content
58+
59+
return base64.b64encode(file_content).decode("utf-8")
60+
61+
62+
async def build_message_content_part_from_data(
63+
url: str,
64+
filename: str,
65+
mime_type: str,
66+
model: str,
67+
) -> dict[str, Any]:
68+
"""Download a file and build a provider-specific message content part.
69+
70+
The format varies based on the detected provider (Bedrock, OpenAI, or Vertex).
71+
"""
72+
provider = detect_provider(model)
73+
74+
if provider == LlmProvider.BEDROCK:
75+
raise ValueError("Anthropic models are not yet supported for file attachments")
76+
77+
if provider == LlmProvider.OPENAI:
78+
return await _build_openai_content_part_from_data(
79+
url, mime_type, filename, False
80+
)
81+
82+
if provider == LlmProvider.VERTEX:
83+
raise ValueError("Gemini models are not yet supported for file attachments")
84+
85+
raise ValueError(f"Unsupported provider: {provider}")
86+
87+
88+
async def _build_openai_content_part_from_data(
89+
url: str,
90+
mime_type: str,
91+
filename: str,
92+
download_image: bool,
93+
) -> dict[str, Any]:
94+
"""Build a content part for OpenAI models (base64-encoded or URL reference)."""
95+
if download_image:
96+
base64_content = await _download_file(url)
97+
if is_image(mime_type):
98+
data_url = f"data:{mime_type};base64,{base64_content}"
99+
return {
100+
"type": "input_image",
101+
"image_url": data_url,
102+
}
103+
104+
if is_pdf(mime_type):
105+
return {
106+
"type": "input_file",
107+
"filename": filename,
108+
"file_data": base64_content,
109+
}
110+
111+
elif is_image(mime_type):
112+
return {
113+
"type": "input_image",
114+
"image_url": url,
115+
}
116+
117+
elif is_pdf(mime_type):
118+
return {
119+
"type": "input_file",
120+
"file_url": url,
121+
}
122+
123+
raise ValueError(f"Unsupported mime_type: {mime_type}")
Lines changed: 76 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,76 @@
1+
"""LLM invocation with file attachments support."""
2+
3+
from dataclasses import dataclass
4+
from typing import Any
5+
6+
from langchain_core.language_models import BaseChatModel
7+
from langchain_core.messages import AIMessage, AnyMessage, HumanMessage
8+
9+
from .file_type_handler import build_message_content_part_from_data
10+
11+
12+
@dataclass
13+
class FileInfo:
14+
"""File information for LLM file attachments."""
15+
16+
url: str
17+
name: str
18+
mime_type: str
19+
20+
21+
def _get_model_name(model: BaseChatModel) -> str:
22+
"""Extract model name from a BaseChatModel instance."""
23+
for attr in ["model_name", "_model_name", "model", "model_id"]:
24+
value = getattr(model, attr, None)
25+
if value and isinstance(value, str):
26+
return value
27+
raise ValueError(f"Model name not found in model {model}")
28+
29+
30+
async def create_part_for_file(
31+
file_info: FileInfo,
32+
model: BaseChatModel,
33+
) -> dict[str, Any]:
34+
"""Create a provider-specific message content part for a file attachment.
35+
36+
Downloads the file from file_info.url and formats it for the model's provider.
37+
"""
38+
model_name = _get_model_name(model)
39+
return await build_message_content_part_from_data(
40+
url=file_info.url,
41+
filename=file_info.name,
42+
mime_type=file_info.mime_type,
43+
model=model_name,
44+
)
45+
46+
47+
async def llm_call_with_files(
48+
messages: list[AnyMessage],
49+
files: list[FileInfo],
50+
model: BaseChatModel,
51+
) -> AIMessage:
52+
"""Invoke an LLM with file attachments.
53+
54+
Downloads files, creates provider-specific content parts, and appends them
55+
as a HumanMessage. If no files are provided, equivalent to model.ainvoke().
56+
"""
57+
if not files:
58+
response = await model.ainvoke(messages)
59+
if not isinstance(response, AIMessage):
60+
raise TypeError(
61+
f"LLM returned {type(response).__name__} instead of AIMessage"
62+
)
63+
return response
64+
65+
content_parts: list[str | dict[Any, Any]] = []
66+
for file_info in files:
67+
content_part = await create_part_for_file(file_info, model)
68+
content_parts.append(content_part)
69+
70+
file_message = HumanMessage(content=content_parts)
71+
all_messages = list(messages) + [file_message]
72+
73+
response = await model.ainvoke(all_messages)
74+
if not isinstance(response, AIMessage):
75+
raise TypeError(f"LLM returned {type(response).__name__} instead of AIMessage")
76+
return response

tests/agent/guardrails/test_guardrails_factory.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,6 @@
99
AgentBooleanOperator,
1010
AgentBooleanRule,
1111
AgentCustomGuardrail,
12-
AgentEscalationRecipient,
1312
AgentEscalationRecipientType,
1413
AgentGuardrailActionType,
1514
AgentGuardrailBlockAction,
@@ -24,6 +23,7 @@
2423
AgentWordOperator,
2524
AgentWordRule,
2625
FieldReference,
26+
StandardRecipient,
2727
)
2828
from uipath.agent.models.agent import (
2929
AgentGuardrail as AgentGuardrailModel,
@@ -139,7 +139,7 @@ def test_escalate_action_is_mapped_with_app_and_recipient(self) -> None:
139139
folder_name="/TestFolder",
140140
version=2,
141141
)
142-
recipient = AgentEscalationRecipient(
142+
recipient = StandardRecipient(
143143
type=AgentEscalationRecipientType.USER_EMAIL,
144144
value="admin@example.com",
145145
)

uv.lock

Lines changed: 4 additions & 4 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

0 commit comments

Comments
 (0)