Skip to content

Commit 7e5363c

Browse files
ericapisaniclaude
andcommitted
feat(anthropic): Set gen_ai.response.id span attribute
Extract the response ID from Anthropic API responses and set it as the gen_ai.response.id span attribute. For non-streaming responses, read result.id directly. For streaming responses, capture event.message.id from the message_start event. Refs PY-2137 Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
1 parent b1ddc5d commit 7e5363c

File tree

2 files changed

+24
-2
lines changed

2 files changed

+24
-2
lines changed

sentry_sdk/integrations/anthropic.py

Lines changed: 18 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -126,7 +126,8 @@ def _collect_ai_data(
126126
model: "str | None",
127127
usage: "_RecordedUsage",
128128
content_blocks: "list[str]",
129-
) -> "tuple[str | None, _RecordedUsage, list[str]]":
129+
response_id: "str | None" = None,
130+
) -> "tuple[str | None, _RecordedUsage, list[str], str | None]":
130131
"""
131132
Collect model information, token usage, and collect content blocks from the AI streaming response.
132133
"""
@@ -146,6 +147,7 @@ def _collect_ai_data(
146147
# https://github.com/anthropics/anthropic-sdk-python/blob/9c485f6966e10ae0ea9eabb3a921d2ea8145a25b/src/anthropic/lib/streaming/_messages.py#L433-L518
147148
if event.type == "message_start":
148149
model = event.message.model or model
150+
response_id = getattr(event.message, "id", None) or response_id
149151

150152
incoming_usage = event.message.usage
151153
usage.output_tokens = incoming_usage.output_tokens
@@ -162,6 +164,7 @@ def _collect_ai_data(
162164
model,
163165
usage,
164166
content_blocks,
167+
response_id,
165168
)
166169

167170
# Counterintuitive, but message_delta contains cumulative token counts :)
@@ -190,12 +193,14 @@ def _collect_ai_data(
190193
model,
191194
usage,
192195
content_blocks,
196+
response_id,
193197
)
194198

195199
return (
196200
model,
197201
usage,
198202
content_blocks,
203+
response_id,
199204
)
200205

201206

@@ -348,10 +353,13 @@ def _set_output_data(
348353
cache_write_input_tokens: "int | None",
349354
content_blocks: "list[Any]",
350355
finish_span: bool = False,
356+
response_id: "str | None" = None,
351357
) -> None:
352358
"""
353359
Set output data for the span based on the AI response."""
354360
span.set_data(SPANDATA.GEN_AI_RESPONSE_MODEL, model)
361+
if response_id is not None:
362+
span.set_data(SPANDATA.GEN_AI_RESPONSE_ID, response_id)
355363
if should_send_default_pii() and integration.include_prompts:
356364
output_messages: "dict[str, list[Any]]" = {
357365
"response": [],
@@ -443,6 +451,7 @@ def _sentry_patched_create_common(f: "Any", *args: "Any", **kwargs: "Any") -> "A
443451
cache_write_input_tokens=cache_write_input_tokens,
444452
content_blocks=content_blocks,
445453
finish_span=True,
454+
response_id=getattr(result, "id", None),
446455
)
447456

448457
# Streaming response
@@ -453,17 +462,20 @@ def new_iterator() -> "Iterator[MessageStreamEvent]":
453462
model = None
454463
usage = _RecordedUsage()
455464
content_blocks: "list[str]" = []
465+
response_id = None
456466

457467
for event in old_iterator:
458468
(
459469
model,
460470
usage,
461471
content_blocks,
472+
response_id,
462473
) = _collect_ai_data(
463474
event,
464475
model,
465476
usage,
466477
content_blocks,
478+
response_id,
467479
)
468480
yield event
469481

@@ -485,23 +497,27 @@ def new_iterator() -> "Iterator[MessageStreamEvent]":
485497
cache_write_input_tokens=usage.cache_write_input_tokens,
486498
content_blocks=[{"text": "".join(content_blocks), "type": "text"}],
487499
finish_span=True,
500+
response_id=response_id,
488501
)
489502

490503
async def new_iterator_async() -> "AsyncIterator[MessageStreamEvent]":
491504
model = None
492505
usage = _RecordedUsage()
493506
content_blocks: "list[str]" = []
507+
response_id = None
494508

495509
async for event in old_iterator:
496510
(
497511
model,
498512
usage,
499513
content_blocks,
514+
response_id,
500515
) = _collect_ai_data(
501516
event,
502517
model,
503518
usage,
504519
content_blocks,
520+
response_id,
505521
)
506522
yield event
507523

@@ -523,6 +539,7 @@ async def new_iterator_async() -> "AsyncIterator[MessageStreamEvent]":
523539
cache_write_input_tokens=usage.cache_write_input_tokens,
524540
content_blocks=[{"text": "".join(content_blocks), "type": "text"}],
525541
finish_span=True,
542+
response_id=response_id,
526543
)
527544

528545
if str(type(result._iterator)) == "<class 'async_generator'>":

tests/integrations/anthropic/test_anthropic.py

Lines changed: 6 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -134,6 +134,7 @@ def test_nonstreaming_create_message(
134134
assert span["data"][SPANDATA.GEN_AI_USAGE_OUTPUT_TOKENS] == 20
135135
assert span["data"][SPANDATA.GEN_AI_USAGE_TOTAL_TOKENS] == 30
136136
assert span["data"][SPANDATA.GEN_AI_RESPONSE_STREAMING] is False
137+
assert span["data"][SPANDATA.GEN_AI_RESPONSE_ID] == "id"
137138

138139

139140
@pytest.mark.asyncio
@@ -204,6 +205,7 @@ async def test_nonstreaming_create_message_async(
204205
assert span["data"][SPANDATA.GEN_AI_USAGE_OUTPUT_TOKENS] == 20
205206
assert span["data"][SPANDATA.GEN_AI_USAGE_TOTAL_TOKENS] == 30
206207
assert span["data"][SPANDATA.GEN_AI_RESPONSE_STREAMING] is False
208+
assert span["data"][SPANDATA.GEN_AI_RESPONSE_ID] == "id"
207209

208210

209211
@pytest.mark.parametrize(
@@ -306,6 +308,7 @@ def test_streaming_create_message(
306308
assert span["data"][SPANDATA.GEN_AI_USAGE_OUTPUT_TOKENS] == 10
307309
assert span["data"][SPANDATA.GEN_AI_USAGE_TOTAL_TOKENS] == 20
308310
assert span["data"][SPANDATA.GEN_AI_RESPONSE_STREAMING] is True
311+
assert span["data"][SPANDATA.GEN_AI_RESPONSE_ID] == "id"
309312

310313

311314
@pytest.mark.asyncio
@@ -411,6 +414,7 @@ async def test_streaming_create_message_async(
411414
assert span["data"][SPANDATA.GEN_AI_USAGE_OUTPUT_TOKENS] == 10
412415
assert span["data"][SPANDATA.GEN_AI_USAGE_TOTAL_TOKENS] == 20
413416
assert span["data"][SPANDATA.GEN_AI_RESPONSE_STREAMING] is True
417+
assert span["data"][SPANDATA.GEN_AI_RESPONSE_ID] == "id"
414418

415419

416420
@pytest.mark.skipif(
@@ -852,13 +856,14 @@ def test_collect_ai_data_with_input_json_delta():
852856

853857
content_blocks = []
854858

855-
model, new_usage, new_content_blocks = _collect_ai_data(
859+
model, new_usage, new_content_blocks, response_id = _collect_ai_data(
856860
event, model, usage, content_blocks
857861
)
858862
assert model is None
859863
assert new_usage.input_tokens == usage.input_tokens
860864
assert new_usage.output_tokens == usage.output_tokens
861865
assert new_content_blocks == ["test"]
866+
assert response_id is None
862867

863868

864869
@pytest.mark.skipif(

0 commit comments

Comments
 (0)