@@ -64,6 +64,7 @@ async def __call__(self, *args, **kwargs):
6464 content = [TextBlock (type = "text" , text = "Hi, I'm Claude." )],
6565 type = "message" ,
6666 usage = Usage (input_tokens = 10 , output_tokens = 20 ),
67+ stop_reason = "end_turn" ,
6768)
6869
6970
@@ -134,6 +135,7 @@ def test_nonstreaming_create_message(
134135 assert span ["data" ][SPANDATA .GEN_AI_USAGE_OUTPUT_TOKENS ] == 20
135136 assert span ["data" ][SPANDATA .GEN_AI_USAGE_TOTAL_TOKENS ] == 30
136137 assert span ["data" ][SPANDATA .GEN_AI_RESPONSE_STREAMING ] is False
138+ assert span ["data" ][SPANDATA .GEN_AI_RESPONSE_FINISH_REASONS ] == "end_turn"
137139
138140
139141@pytest .mark .asyncio
@@ -204,6 +206,122 @@ async def test_nonstreaming_create_message_async(
204206 assert span ["data" ][SPANDATA .GEN_AI_USAGE_OUTPUT_TOKENS ] == 20
205207 assert span ["data" ][SPANDATA .GEN_AI_USAGE_TOTAL_TOKENS ] == 30
206208 assert span ["data" ][SPANDATA .GEN_AI_RESPONSE_STREAMING ] is False
209+ assert span ["data" ][SPANDATA .GEN_AI_RESPONSE_FINISH_REASONS ] == "end_turn"
210+
211+
212+ def test_streaming_create_message_with_finish_reason (sentry_init , capture_events ):
213+ client = Anthropic (api_key = "z" )
214+ returned_stream = Stream (cast_to = None , response = None , client = client )
215+ returned_stream ._iterator = [
216+ MessageStartEvent (
217+ message = EXAMPLE_MESSAGE ,
218+ type = "message_start" ,
219+ ),
220+ ContentBlockStartEvent (
221+ type = "content_block_start" ,
222+ index = 0 ,
223+ content_block = TextBlock (type = "text" , text = "" ),
224+ ),
225+ ContentBlockDeltaEvent (
226+ delta = TextDelta (text = "Hi!" , type = "text_delta" ),
227+ index = 0 ,
228+ type = "content_block_delta" ,
229+ ),
230+ ContentBlockStopEvent (type = "content_block_stop" , index = 0 ),
231+ MessageDeltaEvent (
232+ delta = Delta (stop_reason = "end_turn" ),
233+ usage = MessageDeltaUsage (output_tokens = 10 ),
234+ type = "message_delta" ,
235+ ),
236+ ]
237+
238+ sentry_init (
239+ integrations = [AnthropicIntegration (include_prompts = True )],
240+ traces_sample_rate = 1.0 ,
241+ send_default_pii = True ,
242+ )
243+ events = capture_events ()
244+ client .messages ._post = mock .Mock (return_value = returned_stream )
245+
246+ messages = [
247+ {
248+ "role" : "user" ,
249+ "content" : "Hello, Claude" ,
250+ }
251+ ]
252+
253+ with start_transaction (name = "anthropic" ):
254+ message = client .messages .create (
255+ max_tokens = 1024 , messages = messages , model = "model" , stream = True
256+ )
257+ for _ in message :
258+ pass
259+
260+ assert len (events ) == 1
261+ (event ,) = events
262+ (span ,) = event ["spans" ]
263+
264+ assert span ["data" ][SPANDATA .GEN_AI_RESPONSE_FINISH_REASONS ] == "end_turn"
265+
266+
267+ @pytest .mark .asyncio
268+ async def test_streaming_create_message_with_finish_reason_async (
269+ sentry_init , capture_events , async_iterator
270+ ):
271+ client = AsyncAnthropic (api_key = "z" )
272+ returned_stream = AsyncStream (cast_to = None , response = None , client = client )
273+ returned_stream ._iterator = async_iterator (
274+ [
275+ MessageStartEvent (
276+ message = EXAMPLE_MESSAGE ,
277+ type = "message_start" ,
278+ ),
279+ ContentBlockStartEvent (
280+ type = "content_block_start" ,
281+ index = 0 ,
282+ content_block = TextBlock (type = "text" , text = "" ),
283+ ),
284+ ContentBlockDeltaEvent (
285+ delta = TextDelta (text = "Hi!" , type = "text_delta" ),
286+ index = 0 ,
287+ type = "content_block_delta" ,
288+ ),
289+ ContentBlockStopEvent (type = "content_block_stop" , index = 0 ),
290+ MessageDeltaEvent (
291+ delta = Delta (stop_reason = "end_turn" ),
292+ usage = MessageDeltaUsage (output_tokens = 10 ),
293+ type = "message_delta" ,
294+ ),
295+ ]
296+ )
297+
298+ sentry_init (
299+ integrations = [AnthropicIntegration (include_prompts = True )],
300+ traces_sample_rate = 1.0 ,
301+ send_default_pii = True ,
302+ )
303+ events = capture_events ()
304+ client .messages ._post = AsyncMock (return_value = returned_stream )
305+
306+ messages = [
307+ {
308+ "role" : "user" ,
309+ "content" : "Hello, Claude" ,
310+ }
311+ ]
312+
313+ with start_transaction (name = "anthropic" ):
314+ message = await client .messages .create (
315+ max_tokens = 1024 , messages = messages , model = "model" , stream = True
316+ )
317+ async for _ in message :
318+ pass
319+
320+ assert len (events ) == 1
321+ (event ,) = events
322+ (span ,) = event ["spans" ]
323+
324+ assert span ["data" ][SPANDATA .GEN_AI_RESPONSE_FINISH_REASONS ] == "end_turn"
207325
208326
209327@pytest .mark .parametrize (
@@ -545,6 +663,7 @@ def test_streaming_create_message_with_input_json_delta(
545663 assert span ["data" ][SPANDATA .GEN_AI_USAGE_OUTPUT_TOKENS ] == 41
546664 assert span ["data" ][SPANDATA .GEN_AI_USAGE_TOTAL_TOKENS ] == 407
547665 assert span ["data" ][SPANDATA .GEN_AI_RESPONSE_STREAMING ] is True
666+ assert span ["data" ][SPANDATA .GEN_AI_RESPONSE_FINISH_REASONS ] == "tool_use"
548667
549668
550669@pytest .mark .asyncio
@@ -687,6 +806,7 @@ async def test_streaming_create_message_with_input_json_delta_async(
687806 assert span ["data" ][SPANDATA .GEN_AI_USAGE_OUTPUT_TOKENS ] == 41
688807 assert span ["data" ][SPANDATA .GEN_AI_USAGE_TOTAL_TOKENS ] == 407
689808 assert span ["data" ][SPANDATA .GEN_AI_RESPONSE_STREAMING ] is True
809+ assert span ["data" ][SPANDATA .GEN_AI_RESPONSE_FINISH_REASONS ] == "tool_use"
690810
691811
692812def test_exception_message_create (sentry_init , capture_events ):
0 commit comments