Skip to content

Commit 201eccd

Browse files
authored
fix(core): Standardize Vercel AI span descriptions to align with GenAI semantic conventions (#19624)
- Standardize invoke_agent pipeline span descriptions to use `invoke_agent` (with optional `functionId` suffix) instead of Vercel SDK function names like `generateText` or `generateObject`. This aligns with how other AI integrations (e.g. LangGraph) name their agent spans. - Unify all `.do*` content generation span descriptions under a single `generate_content` prefix (e.g. `generate_content mock-model-id`) instead of using individual prefixes like `generate_text`, `stream_text`, `generate_object`, `stream_object`. - Remove `addOriginToSpan` helper and inline the `setAttribute` call directly. Closes #19625 (added automatically)
1 parent fb1b7ba commit 201eccd

File tree

6 files changed

+71
-80
lines changed

6 files changed

+71
-80
lines changed

dev-packages/node-integration-tests/suites/tracing/vercelai/test-generate-object.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@ describe('Vercel AI integration - generateObject', () => {
2828
'sentry.op': 'gen_ai.invoke_agent',
2929
'sentry.origin': 'auto.vercelai.otel',
3030
}),
31-
description: 'generateObject',
31+
description: 'invoke_agent',
3232
op: 'gen_ai.invoke_agent',
3333
origin: 'auto.vercelai.otel',
3434
status: 'ok',
@@ -51,7 +51,7 @@ describe('Vercel AI integration - generateObject', () => {
5151
'gen_ai.usage.output_tokens': 25,
5252
'gen_ai.usage.total_tokens': 40,
5353
}),
54-
description: 'generate_object mock-model-id',
54+
description: 'generate_content mock-model-id',
5555
op: 'gen_ai.generate_object',
5656
origin: 'auto.vercelai.otel',
5757
status: 'ok',

dev-packages/node-integration-tests/suites/tracing/vercelai/test.ts

Lines changed: 17 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -52,7 +52,7 @@ describe('Vercel AI integration', () => {
5252
'vercel.ai.settings.maxSteps': 1,
5353
'vercel.ai.streaming': false,
5454
},
55-
description: 'generateText',
55+
description: 'invoke_agent',
5656
op: 'gen_ai.invoke_agent',
5757
origin: 'auto.vercelai.otel',
5858
status: 'ok',
@@ -81,7 +81,7 @@ describe('Vercel AI integration', () => {
8181
'vercel.ai.settings.maxRetries': 2,
8282
'vercel.ai.streaming': false,
8383
},
84-
description: 'generate_text mock-model-id',
84+
description: 'generate_content mock-model-id',
8585
op: 'gen_ai.generate_text',
8686
origin: 'auto.vercelai.otel',
8787
status: 'ok',
@@ -109,7 +109,7 @@ describe('Vercel AI integration', () => {
109109
'vercel.ai.settings.maxSteps': 1,
110110
'vercel.ai.streaming': false,
111111
},
112-
description: 'generateText',
112+
description: 'invoke_agent',
113113
op: 'gen_ai.invoke_agent',
114114
origin: 'auto.vercelai.otel',
115115
status: 'ok',
@@ -142,7 +142,7 @@ describe('Vercel AI integration', () => {
142142
'vercel.ai.settings.maxRetries': 2,
143143
'vercel.ai.streaming': false,
144144
},
145-
description: 'generate_text mock-model-id',
145+
description: 'generate_content mock-model-id',
146146
op: 'gen_ai.generate_text',
147147
origin: 'auto.vercelai.otel',
148148
status: 'ok',
@@ -166,7 +166,7 @@ describe('Vercel AI integration', () => {
166166
'vercel.ai.settings.maxSteps': 1,
167167
'vercel.ai.streaming': false,
168168
},
169-
description: 'generateText',
169+
description: 'invoke_agent',
170170
op: 'gen_ai.invoke_agent',
171171
origin: 'auto.vercelai.otel',
172172
status: 'ok',
@@ -195,7 +195,7 @@ describe('Vercel AI integration', () => {
195195
'vercel.ai.settings.maxRetries': 2,
196196
'vercel.ai.streaming': false,
197197
},
198-
description: 'generate_text mock-model-id',
198+
description: 'generate_content mock-model-id',
199199
op: 'gen_ai.generate_text',
200200
origin: 'auto.vercelai.otel',
201201
status: 'ok',
@@ -248,7 +248,7 @@ describe('Vercel AI integration', () => {
248248
'vercel.ai.settings.maxSteps': 1,
249249
'vercel.ai.streaming': false,
250250
},
251-
description: 'generateText',
251+
description: 'invoke_agent',
252252
op: 'gen_ai.invoke_agent',
253253
origin: 'auto.vercelai.otel',
254254
status: 'ok',
@@ -287,7 +287,7 @@ describe('Vercel AI integration', () => {
287287
'vercel.ai.settings.maxRetries': 2,
288288
'vercel.ai.streaming': false,
289289
},
290-
description: 'generate_text mock-model-id',
290+
description: 'generate_content mock-model-id',
291291
op: 'gen_ai.generate_text',
292292
origin: 'auto.vercelai.otel',
293293
status: 'ok',
@@ -320,7 +320,7 @@ describe('Vercel AI integration', () => {
320320
'vercel.ai.settings.maxSteps': 1,
321321
'vercel.ai.streaming': false,
322322
},
323-
description: 'generateText',
323+
description: 'invoke_agent',
324324
op: 'gen_ai.invoke_agent',
325325
origin: 'auto.vercelai.otel',
326326
status: 'ok',
@@ -358,7 +358,7 @@ describe('Vercel AI integration', () => {
358358
'vercel.ai.settings.maxRetries': 2,
359359
'vercel.ai.streaming': false,
360360
},
361-
description: 'generate_text mock-model-id',
361+
description: 'generate_content mock-model-id',
362362
op: 'gen_ai.generate_text',
363363
origin: 'auto.vercelai.otel',
364364
status: 'ok',
@@ -392,7 +392,7 @@ describe('Vercel AI integration', () => {
392392
'vercel.ai.settings.maxSteps': 1,
393393
'vercel.ai.streaming': false,
394394
},
395-
description: 'generateText',
395+
description: 'invoke_agent',
396396
op: 'gen_ai.invoke_agent',
397397
origin: 'auto.vercelai.otel',
398398
status: 'ok',
@@ -433,7 +433,7 @@ describe('Vercel AI integration', () => {
433433
'vercel.ai.settings.maxRetries': 2,
434434
'vercel.ai.streaming': false,
435435
},
436-
description: 'generate_text mock-model-id',
436+
description: 'generate_content mock-model-id',
437437
op: 'gen_ai.generate_text',
438438
origin: 'auto.vercelai.otel',
439439
status: 'ok',
@@ -503,7 +503,7 @@ describe('Vercel AI integration', () => {
503503
'vercel.ai.settings.maxSteps': 1,
504504
'vercel.ai.streaming': false,
505505
},
506-
description: 'generateText',
506+
description: 'invoke_agent',
507507
op: 'gen_ai.invoke_agent',
508508
origin: 'auto.vercelai.otel',
509509
status: 'internal_error',
@@ -531,7 +531,7 @@ describe('Vercel AI integration', () => {
531531
'vercel.ai.settings.maxRetries': 2,
532532
'vercel.ai.streaming': false,
533533
},
534-
description: 'generate_text mock-model-id',
534+
description: 'generate_content mock-model-id',
535535
op: 'gen_ai.generate_text',
536536
origin: 'auto.vercelai.otel',
537537
status: 'ok',
@@ -623,7 +623,7 @@ describe('Vercel AI integration', () => {
623623
'vercel.ai.settings.maxSteps': 1,
624624
'vercel.ai.streaming': false,
625625
},
626-
description: 'generateText',
626+
description: 'invoke_agent',
627627
op: 'gen_ai.invoke_agent',
628628
origin: 'auto.vercelai.otel',
629629
status: 'internal_error',
@@ -651,7 +651,7 @@ describe('Vercel AI integration', () => {
651651
'vercel.ai.settings.maxRetries': 2,
652652
'vercel.ai.streaming': false,
653653
},
654-
description: 'generate_text mock-model-id',
654+
description: 'generate_content mock-model-id',
655655
op: 'gen_ai.generate_text',
656656
origin: 'auto.vercelai.otel',
657657
status: 'ok',
@@ -735,7 +735,7 @@ describe('Vercel AI integration', () => {
735735
spans: expect.arrayContaining([
736736
// The generateText span should have the correct op even though model ID was not available at span start
737737
expect.objectContaining({
738-
description: 'generateText',
738+
description: 'invoke_agent',
739739
op: 'gen_ai.invoke_agent',
740740
origin: 'auto.vercelai.otel',
741741
status: 'ok',

dev-packages/node-integration-tests/suites/tracing/vercelai/v5/test.ts

Lines changed: 14 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -50,7 +50,7 @@ describe('Vercel AI integration (V5)', () => {
5050
[SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.invoke_agent',
5151
[SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN]: 'auto.vercelai.otel',
5252
},
53-
description: 'generateText',
53+
description: 'invoke_agent',
5454
op: 'gen_ai.invoke_agent',
5555
origin: 'auto.vercelai.otel',
5656
status: 'ok',
@@ -79,7 +79,7 @@ describe('Vercel AI integration (V5)', () => {
7979
[GEN_AI_RESPONSE_MODEL_ATTRIBUTE]: 'mock-model-id',
8080
[GEN_AI_USAGE_TOTAL_TOKENS_ATTRIBUTE]: 30,
8181
},
82-
description: 'generate_text mock-model-id',
82+
description: 'generate_content mock-model-id',
8383
op: 'gen_ai.generate_text',
8484
origin: 'auto.vercelai.otel',
8585
status: 'ok',
@@ -106,7 +106,7 @@ describe('Vercel AI integration (V5)', () => {
106106
[SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.invoke_agent',
107107
[SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN]: 'auto.vercelai.otel',
108108
},
109-
description: 'generateText',
109+
description: 'invoke_agent',
110110
op: 'gen_ai.invoke_agent',
111111
origin: 'auto.vercelai.otel',
112112
status: 'ok',
@@ -138,7 +138,7 @@ describe('Vercel AI integration (V5)', () => {
138138
[GEN_AI_RESPONSE_MODEL_ATTRIBUTE]: 'mock-model-id',
139139
[GEN_AI_USAGE_TOTAL_TOKENS_ATTRIBUTE]: 30,
140140
},
141-
description: 'generate_text mock-model-id',
141+
description: 'generate_content mock-model-id',
142142
op: 'gen_ai.generate_text',
143143
origin: 'auto.vercelai.otel',
144144
status: 'ok',
@@ -161,7 +161,7 @@ describe('Vercel AI integration (V5)', () => {
161161
[SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.invoke_agent',
162162
[SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN]: 'auto.vercelai.otel',
163163
},
164-
description: 'generateText',
164+
description: 'invoke_agent',
165165
op: 'gen_ai.invoke_agent',
166166
origin: 'auto.vercelai.otel',
167167
status: 'ok',
@@ -190,7 +190,7 @@ describe('Vercel AI integration (V5)', () => {
190190
[SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.generate_text',
191191
[SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN]: 'auto.vercelai.otel',
192192
},
193-
description: 'generate_text mock-model-id',
193+
description: 'generate_content mock-model-id',
194194
op: 'gen_ai.generate_text',
195195
origin: 'auto.vercelai.otel',
196196
status: 'ok',
@@ -242,7 +242,7 @@ describe('Vercel AI integration (V5)', () => {
242242
[SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.invoke_agent',
243243
[SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN]: 'auto.vercelai.otel',
244244
},
245-
description: 'generateText',
245+
description: 'invoke_agent',
246246
op: 'gen_ai.invoke_agent',
247247
origin: 'auto.vercelai.otel',
248248
status: 'ok',
@@ -275,7 +275,7 @@ describe('Vercel AI integration (V5)', () => {
275275
[SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.generate_text',
276276
[SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN]: 'auto.vercelai.otel',
277277
},
278-
description: 'generate_text mock-model-id',
278+
description: 'generate_content mock-model-id',
279279
op: 'gen_ai.generate_text',
280280
origin: 'auto.vercelai.otel',
281281
status: 'ok',
@@ -302,7 +302,7 @@ describe('Vercel AI integration (V5)', () => {
302302
[SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.invoke_agent',
303303
[SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN]: 'auto.vercelai.otel',
304304
},
305-
description: 'generateText',
305+
description: 'invoke_agent',
306306
op: 'gen_ai.invoke_agent',
307307
origin: 'auto.vercelai.otel',
308308
status: 'ok',
@@ -334,7 +334,7 @@ describe('Vercel AI integration (V5)', () => {
334334
[GEN_AI_RESPONSE_MODEL_ATTRIBUTE]: 'mock-model-id',
335335
[GEN_AI_USAGE_TOTAL_TOKENS_ATTRIBUTE]: 30,
336336
},
337-
description: 'generate_text mock-model-id',
337+
description: 'generate_content mock-model-id',
338338
op: 'gen_ai.generate_text',
339339
origin: 'auto.vercelai.otel',
340340
status: 'ok',
@@ -361,7 +361,7 @@ describe('Vercel AI integration (V5)', () => {
361361
[SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.invoke_agent',
362362
[SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN]: 'auto.vercelai.otel',
363363
},
364-
description: 'generateText',
364+
description: 'invoke_agent',
365365
op: 'gen_ai.invoke_agent',
366366
origin: 'auto.vercelai.otel',
367367
status: 'ok',
@@ -396,7 +396,7 @@ describe('Vercel AI integration (V5)', () => {
396396
[SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.generate_text',
397397
[SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN]: 'auto.vercelai.otel',
398398
}),
399-
description: 'generate_text mock-model-id',
399+
description: 'generate_content mock-model-id',
400400
op: 'gen_ai.generate_text',
401401
origin: 'auto.vercelai.otel',
402402
status: 'ok',
@@ -480,7 +480,7 @@ describe('Vercel AI integration (V5)', () => {
480480
[SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN]: 'auto.vercelai.otel',
481481
'vercel.ai.response.finishReason': 'tool-calls',
482482
},
483-
description: 'generateText',
483+
description: 'invoke_agent',
484484
op: 'gen_ai.invoke_agent',
485485
origin: 'auto.vercelai.otel',
486486
}),
@@ -507,7 +507,7 @@ describe('Vercel AI integration (V5)', () => {
507507
[SEMANTIC_ATTRIBUTE_SENTRY_OP]: 'gen_ai.generate_text',
508508
[SEMANTIC_ATTRIBUTE_SENTRY_ORIGIN]: 'auto.vercelai.otel',
509509
},
510-
description: 'generate_text mock-model-id',
510+
description: 'generate_content mock-model-id',
511511
op: 'gen_ai.generate_text',
512512
origin: 'auto.vercelai.otel',
513513
status: 'ok',

0 commit comments

Comments
 (0)