Skip to content

Commit 62c32cb

Browse files
Merge branch 'master' into webb/litellm/close-spans
2 parents ecd3718 + 59e97e7 commit 62c32cb

File tree

3 files changed

+339
-109
lines changed

3 files changed

+339
-109
lines changed

sentry_sdk/integrations/pydantic_ai/__init__.py

Lines changed: 127 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,11 @@
1-
from sentry_sdk.integrations import DidNotEnable, Integration
1+
import functools
22

3+
from sentry_sdk.integrations import DidNotEnable, Integration
4+
from sentry_sdk.utils import capture_internal_exceptions
35

46
try:
57
import pydantic_ai # type: ignore # noqa: F401
8+
from pydantic_ai import Agent
69
except ImportError:
710
raise DidNotEnable("pydantic-ai not installed")
811

@@ -14,10 +17,119 @@
1417
_patch_tool_execution,
1518
)
1619

20+
from .spans.ai_client import ai_client_span, update_ai_client_span
21+
22+
from typing import TYPE_CHECKING
23+
24+
if TYPE_CHECKING:
25+
from typing import Any
26+
from pydantic_ai import ModelRequestContext, RunContext
27+
from pydantic_ai.messages import ModelResponse # type: ignore
28+
from pydantic_ai.capabilities import Hooks # type: ignore
29+
30+
31+
def register_hooks(hooks: "Hooks") -> None:
32+
"""
33+
Creates hooks for chat model calls and register the hooks by adding the hooks to the `capabilities` argument passed to `Agent.__init__()`.
34+
"""
35+
36+
@hooks.on.before_model_request # type: ignore
37+
async def on_request(
38+
ctx: "RunContext[None]", request_context: "ModelRequestContext"
39+
) -> "ModelRequestContext":
40+
run_context_metadata = ctx.metadata
41+
if not isinstance(run_context_metadata, dict):
42+
return request_context
43+
44+
span = ai_client_span(
45+
messages=request_context.messages,
46+
agent=None,
47+
model=request_context.model,
48+
model_settings=request_context.model_settings,
49+
)
50+
51+
run_context_metadata["_sentry_span"] = span
52+
span.__enter__()
53+
54+
return request_context
55+
56+
@hooks.on.after_model_request # type: ignore
57+
async def on_response(
58+
ctx: "RunContext[None]",
59+
*,
60+
request_context: "ModelRequestContext",
61+
response: "ModelResponse",
62+
) -> "ModelResponse":
63+
run_context_metadata = ctx.metadata
64+
if not isinstance(run_context_metadata, dict):
65+
return response
66+
67+
span = run_context_metadata.pop("_sentry_span", None)
68+
if span is None:
69+
return response
70+
71+
update_ai_client_span(span, response)
72+
span.__exit__(None, None, None)
73+
74+
return response
75+
76+
@hooks.on.model_request_error # type: ignore
77+
async def on_error(
78+
ctx: "RunContext[None]",
79+
*,
80+
request_context: "ModelRequestContext",
81+
error: "Exception",
82+
) -> "ModelResponse":
83+
run_context_metadata = ctx.metadata
84+
85+
if not isinstance(run_context_metadata, dict):
86+
raise error
87+
88+
span = run_context_metadata.pop("_sentry_span", None)
89+
if span is None:
90+
raise error
91+
92+
with capture_internal_exceptions():
93+
span.__exit__(type(error), error, error.__traceback__)
94+
95+
raise error
96+
97+
original_init = Agent.__init__
98+
99+
@functools.wraps(original_init)
100+
def patched_init(self: "Agent[Any, Any]", *args: "Any", **kwargs: "Any") -> None:
101+
caps = list(kwargs.get("capabilities") or [])
102+
caps.append(hooks)
103+
kwargs["capabilities"] = caps
104+
105+
metadata = kwargs.get("metadata")
106+
if metadata is None:
107+
kwargs["metadata"] = {} # Used as shared reference between hooks
108+
109+
return original_init(self, *args, **kwargs)
110+
111+
Agent.__init__ = patched_init
112+
17113

18114
class PydanticAIIntegration(Integration):
115+
"""
116+
Typical interaction with the library:
117+
1. The user creates an Agent instance with configuration, including system instructions sent to every model call.
118+
2. The user calls `Agent.run()` or `Agent.run_stream()` to start an agent run. The latter can be used to incrementally receive progress.
119+
- Each run invocation has `RunContext` objects that are passed to the library hooks.
120+
3. In a loop, the agent repeatedly calls the model, maintaining a conversation history that includes previous messages and tool results, which is passed to each call.
121+
122+
Internally, Pydantic AI maintains an execution graph in which ModelRequestNode are responsible for model calls, including retries.
123+
Hooks using the decorators provided by `pydantic_ai.capabilities` create and manage spans for model calls when these hooks are available (newer library versions).
124+
The span is created in `on_request` and stored in the metadata of the `RunContext` object shared with `on_response` and `on_error`.
125+
126+
The metadata dictionary on the RunContext instance is initialized with `{"_sentry_span": None}` in the `_create_run_wrapper()` and `_create_streaming_wrapper()` wrappers that
127+
instrument `Agent.run()` and `Agent.run_stream()`, respectively. A non-empty dictionary is required for the metadata object to be a shared reference between hooks.
128+
"""
129+
19130
identifier = "pydantic_ai"
20131
origin = f"auto.ai.{identifier}"
132+
are_request_hooks_available = True
21133

22134
def __init__(
23135
self, include_prompts: bool = True, handled_tool_call_exceptions: bool = True
@@ -45,6 +157,18 @@ def setup_once() -> None:
45157
- Tool executions
46158
"""
47159
_patch_agent_run()
48-
_patch_graph_nodes()
49-
_patch_model_request()
50160
_patch_tool_execution()
161+
162+
try:
163+
from pydantic_ai.capabilities import Hooks
164+
except ImportError:
165+
Hooks = None
166+
PydanticAIIntegration.are_request_hooks_available = False
167+
168+
if Hooks is None:
169+
_patch_graph_nodes()
170+
_patch_model_request()
171+
return
172+
173+
hooks = Hooks()
174+
register_hooks(hooks)

sentry_sdk/integrations/pydantic_ai/patches/agent_run.py

Lines changed: 16 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -96,6 +96,9 @@ def _create_run_wrapper(
9696
original_func: The original run method
9797
is_streaming: Whether this is a streaming method (for future use)
9898
"""
99+
from sentry_sdk.integrations.pydantic_ai import (
100+
PydanticAIIntegration,
101+
) # Required to avoid circular import
99102

100103
@wraps(original_func)
101104
async def wrapper(self: "Any", *args: "Any", **kwargs: "Any") -> "Any":
@@ -107,6 +110,11 @@ async def wrapper(self: "Any", *args: "Any", **kwargs: "Any") -> "Any":
107110
model = kwargs.get("model")
108111
model_settings = kwargs.get("model_settings")
109112

113+
if PydanticAIIntegration.are_request_hooks_available:
114+
metadata = kwargs.get("metadata")
115+
if metadata is None:
116+
kwargs["metadata"] = {"_sentry_span": None}
117+
110118
# Create invoke_agent span
111119
with invoke_agent_span(
112120
user_prompt, self, model, model_settings, is_streaming
@@ -140,6 +148,9 @@ def _create_streaming_wrapper(
140148
"""
141149
Wraps run_stream method that returns an async context manager.
142150
"""
151+
from sentry_sdk.integrations.pydantic_ai import (
152+
PydanticAIIntegration,
153+
) # Required to avoid circular import
143154

144155
@wraps(original_func)
145156
def wrapper(self: "Any", *args: "Any", **kwargs: "Any") -> "Any":
@@ -148,6 +159,11 @@ def wrapper(self: "Any", *args: "Any", **kwargs: "Any") -> "Any":
148159
model = kwargs.get("model")
149160
model_settings = kwargs.get("model_settings")
150161

162+
if PydanticAIIntegration.are_request_hooks_available:
163+
metadata = kwargs.get("metadata")
164+
if metadata is None:
165+
kwargs["metadata"] = {"_sentry_span": None}
166+
151167
# Call original function to get the context manager
152168
original_ctx_manager = original_func(self, *args, **kwargs)
153169

0 commit comments

Comments
 (0)