Skip to content

Commit 3d0afd8

Browse files
fix: check can consume licensing error [JAR-9330] (#725)
1 parent b2f211c commit 3d0afd8

File tree

5 files changed

+98
-3
lines changed

5 files changed

+98
-3
lines changed

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
11
[project]
22
name = "uipath-langchain"
3-
version = "0.9.10"
3+
version = "0.9.11"
44
description = "Python SDK that enables developers to build and deploy LangGraph agents to the UiPath Cloud Platform"
55
readme = { file = "README.md", content-type = "text/markdown" }
66
requires-python = ">=3.11"

src/uipath_langchain/agent/exceptions/exceptions.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -29,6 +29,7 @@ class AgentRuntimeErrorCode(str, Enum):
2929

3030
UNEXPECTED_ERROR = "UNEXPECTED_ERROR"
3131
HTTP_ERROR = "HTTP_ERROR"
32+
LICENSE_NOT_AVAILABLE = "LICENSE_NOT_AVAILABLE"
3233

3334
# Routing
3435
ROUTING_ERROR = "ROUTING_ERROR"
Lines changed: 86 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,86 @@
1+
"""Convert LLM provider HTTP errors into structured AgentRuntimeErrors.
2+
3+
Each LLM provider wraps HTTP errors in a different exception type:
4+
- OpenAI: openai.PermissionDeniedError → e.status_code
5+
- Vertex: google.genai.errors.ClientError → e.code
6+
- Bedrock: botocore.exceptions.ClientError → e.response dict
7+
8+
This module extracts the HTTP status code from any of these and re-raises
9+
as an AgentRuntimeError so that upstream error handling (exception mapper,
10+
CAS bridge) can categorise by status code without provider-specific logic.
11+
"""
12+
13+
from uipath.runtime.errors import UiPathErrorCategory
14+
15+
from uipath_langchain.agent.exceptions.exceptions import (
16+
AgentRuntimeError,
17+
AgentRuntimeErrorCode,
18+
)
19+
20+
21+
def _extract_status_code(e: BaseException) -> int | None:
22+
"""Extract HTTP status code from any provider-specific exception.
23+
24+
Supports OpenAI (status_code), Vertex/google.genai (code), and
25+
Bedrock/botocore (response dict). Walks __cause__ chain to handle
26+
LangChain wrapper exceptions (e.g. ChatGoogleGenerativeAIError).
27+
"""
28+
# OpenAI: e.status_code
29+
sc = getattr(e, "status_code", None)
30+
if isinstance(sc, int):
31+
return sc
32+
33+
# Vertex (google.genai.errors.APIError): e.code
34+
sc = getattr(e, "code", None)
35+
if isinstance(sc, int):
36+
return sc
37+
38+
# Bedrock (botocore.exceptions.ClientError): e.response dict
39+
resp = getattr(e, "response", None)
40+
if isinstance(resp, dict):
41+
sc = resp.get("ResponseMetadata", {}).get("HTTPStatusCode")
42+
if isinstance(sc, int):
43+
return sc
44+
45+
# Walk __cause__ chain
46+
cause = getattr(e, "__cause__", None)
47+
if cause is not None and cause is not e:
48+
return _extract_status_code(cause)
49+
50+
return None
51+
52+
53+
# Maps known LLM Gateway status codes to specific error codes.
54+
# Unknown status codes fall back to HTTP_ERROR.
55+
_LLM_STATUS_CODE_MAP: dict[int, AgentRuntimeErrorCode] = {
56+
403: AgentRuntimeErrorCode.LICENSE_NOT_AVAILABLE,
57+
}
58+
59+
60+
def raise_for_provider_http_error(e: BaseException) -> None:
61+
"""Re-raise provider-specific HTTP errors as a structured AgentRuntimeError.
62+
63+
Extracts the HTTP status code from any LLM provider exception and
64+
converts it to an AgentRuntimeError with the status code preserved.
65+
Known status codes (e.g. 403) get a specific error code so upstream
66+
handlers can match on the suffix. Does nothing if no HTTP status code
67+
can be extracted.
68+
"""
69+
sc = _extract_status_code(e)
70+
if sc is None:
71+
return
72+
73+
code = _LLM_STATUS_CODE_MAP.get(sc, AgentRuntimeErrorCode.HTTP_ERROR)
74+
75+
if sc == 403:
76+
category = UiPathErrorCategory.DEPLOYMENT
77+
else:
78+
category = UiPathErrorCategory.UNKNOWN
79+
80+
raise AgentRuntimeError(
81+
code=code,
82+
title=f"LLM provider returned HTTP {sc}",
83+
detail=str(e),
84+
category=category,
85+
status=sc,
86+
) from e

src/uipath_langchain/agent/react/llm_node.py

Lines changed: 9 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@
1616
from uipath_langchain.chat.handlers import get_payload_handler
1717

1818
from ..exceptions import AgentRuntimeError, AgentRuntimeErrorCode
19+
from ..exceptions.licensing import raise_for_provider_http_error
1920
from ..messages.message_utils import replace_tool_calls
2021
from ..tools.static_args import StaticArgsHandler
2122
from .constants import (
@@ -112,7 +113,14 @@ async def llm_node(state: StateT):
112113

113114
llm = model.bind_tools(static_schema_tools, **binding_kwargs)
114115

115-
response = await llm.ainvoke(messages)
116+
try:
117+
response = await llm.ainvoke(messages)
118+
except Exception as e:
119+
# LLM errors arrive as provider-specific exceptions (OpenAI, Bedrock,
120+
# Vertex). Convert to a structured AgentRuntimeError with the HTTP
121+
# status code so upstream handlers can categorise (e.g. 403 → licensing).
122+
raise_for_provider_http_error(e)
123+
raise
116124
if not isinstance(response, AIMessage):
117125
raise AgentRuntimeError(
118126
code=AgentRuntimeErrorCode.LLM_INVALID_RESPONSE,

uv.lock

Lines changed: 1 addition & 1 deletion
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

0 commit comments

Comments
 (0)