1515 gen_ai_attributes as GenAIAttributes ,
1616)
1717from opentelemetry .util .genai .handler import TelemetryHandler
18- from opentelemetry .util .genai .types import (
19- Error ,
20- LLMInvocation , # TODO: migrate to InferenceInvocation
21- )
22- from opentelemetry .util .genai .utils import (
23- should_capture_content_on_spans_in_experimental_mode ,
24- )
18+ from opentelemetry .util .genai .invocation import InferenceInvocation
2519
2620from .messages_extractors import (
2721 extract_params ,
2822 get_input_messages ,
2923 get_llm_request_attributes ,
24+ get_server_address_and_port ,
3025 get_system_instruction ,
3126)
3227from .wrappers import (
28+ MessagesStreamManagerWrapper ,
3329 MessagesStreamWrapper ,
3430 MessageWrapper ,
3531)
3632
3733if TYPE_CHECKING :
34+ from anthropic .lib .streaming ._messages import ( # pylint: disable=no-name-in-module
35+ MessageStreamManager ,
36+ )
3837 from anthropic .resources .messages import Messages
3938 from anthropic .types import RawMessageStreamEvent
4039
@@ -48,73 +47,109 @@ def messages_create(
4847) -> Callable [
4948 ...,
5049 Union [
51- " AnthropicMessage" ,
52- " AnthropicStream[RawMessageStreamEvent]" ,
50+ AnthropicMessage ,
51+ AnthropicStream [RawMessageStreamEvent ],
5352 MessagesStreamWrapper [None ],
5453 ],
5554]:
5655 """Wrap the `create` method of the `Messages` class to trace it."""
57- capture_content = should_capture_content_on_spans_in_experimental_mode ()
56+ capture_content = handler . should_capture_content ()
5857
5958 def traced_method (
6059 wrapped : Callable [
6160 ...,
6261 Union [
63- " AnthropicMessage" ,
64- " AnthropicStream[RawMessageStreamEvent]" ,
62+ AnthropicMessage ,
63+ AnthropicStream [RawMessageStreamEvent ],
6564 ],
6665 ],
67- instance : " Messages" ,
66+ instance : Messages ,
6867 args : tuple [Any , ...],
6968 kwargs : dict [str , Any ],
7069 ) -> Union [
71- " AnthropicMessage" ,
72- " AnthropicStream[RawMessageStreamEvent]" ,
70+ AnthropicMessage ,
71+ AnthropicStream [RawMessageStreamEvent ],
7372 MessagesStreamWrapper [None ],
7473 ]:
75- params = extract_params (* args , ** kwargs )
76- attributes = get_llm_request_attributes (params , instance )
77- request_model_attribute = attributes .get (
78- GenAIAttributes .GEN_AI_REQUEST_MODEL
79- )
80- request_model = (
81- request_model_attribute
82- if isinstance (request_model_attribute , str )
83- else params .model
74+ invocation = _create_invocation (
75+ handler , instance , args , kwargs , capture_content
8476 )
85-
86- invocation = LLMInvocation (
87- request_model = request_model ,
88- provider = ANTHROPIC ,
89- input_messages = get_input_messages (params .messages )
90- if capture_content
91- else [],
92- system_instruction = get_system_instruction (params .system )
93- if capture_content
94- else [],
95- attributes = attributes ,
96- )
97-
98- # Use manual lifecycle management for both streaming and non-streaming
99- handler .start_llm (invocation )
10077 try :
10178 result = wrapped (* args , ** kwargs )
10279 if isinstance (result , AnthropicStream ):
10380 return MessagesStreamWrapper (
104- result , handler , invocation , capture_content
81+ result , invocation , capture_content
10582 )
10683
10784 wrapper = MessageWrapper (result , capture_content )
10885 wrapper .extract_into (invocation )
109- handler . stop_llm ( invocation )
86+ invocation . stop ( )
11087 return wrapper .message
11188 except Exception as exc :
112- handler .fail_llm (
113- invocation , Error (message = str (exc ), type = type (exc ))
114- )
89+ invocation .fail (exc )
11590 raise
11691
11792 return cast (
11893 'Callable[..., Union["AnthropicMessage", "AnthropicStream[RawMessageStreamEvent]", MessagesStreamWrapper[None]]]' ,
11994 traced_method ,
12095 )
96+
97+
98+ def _create_invocation (
99+ handler : TelemetryHandler ,
100+ instance : Messages ,
101+ args : tuple [Any , ...],
102+ kwargs : dict [str , Any ],
103+ capture_content : bool ,
104+ ) -> InferenceInvocation :
105+ params = extract_params (* args , ** kwargs )
106+ attributes = get_llm_request_attributes (params , instance )
107+ request_model_attribute = attributes .get (
108+ GenAIAttributes .GEN_AI_REQUEST_MODEL
109+ )
110+ request_model = (
111+ request_model_attribute
112+ if isinstance (request_model_attribute , str )
113+ else params .model
114+ )
115+
116+ server_address , server_port = get_server_address_and_port (instance )
117+ invocation = handler .start_inference (
118+ provider = ANTHROPIC ,
119+ request_model = request_model ,
120+ server_address = server_address ,
121+ server_port = server_port ,
122+ )
123+ invocation .input_messages = (
124+ get_input_messages (params .messages ) if capture_content else []
125+ )
126+ invocation .system_instruction = (
127+ get_system_instruction (params .system ) if capture_content else []
128+ )
129+ invocation .attributes = attributes
130+ return invocation
131+
132+
133+ def messages_stream (
134+ handler : TelemetryHandler ,
135+ ) -> Callable [..., MessagesStreamManagerWrapper [Any ]]:
136+ """Wrap the sync `stream` method of the `Messages` class."""
137+ capture_content = handler .should_capture_content ()
138+
139+ def traced_method (
140+ wrapped : Callable [..., MessageStreamManager ],
141+ instance : Messages ,
142+ args : tuple [Any , ...],
143+ kwargs : dict [str , Any ],
144+ ) -> MessagesStreamManagerWrapper [Any ]:
145+ return MessagesStreamManagerWrapper (
146+ wrapped (* args , ** kwargs ),
147+ lambda : _create_invocation (
148+ handler , instance , args , kwargs , capture_content
149+ ),
150+ capture_content ,
151+ )
152+
153+ return cast (
154+ "Callable[..., MessagesStreamManagerWrapper[Any]]" , traced_method
155+ )
0 commit comments