Skip to content

Commit 5425ffb

Browse files
authored
fix(core): Set op on ended Vercel AI spans (#18601)
Ensures Vercel AI span op is always set correctly at span start, regardless of whether the model ID is available. Changes: - Removed the model ID check gate before calling processGenerateSpan - the op is determined by span name, not model ID - Span name updates (e.g., generate_text gpt-4) are now only applied when model ID exists, avoiding undefined in names - Added integration test for late model ID scenario Closes #18448
1 parent da1c41f commit 5425ffb

5 files changed

Lines changed: 179 additions & 72 deletions

File tree

Lines changed: 36 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,36 @@
1+
import * as Sentry from '@sentry/node';
2+
import { generateText } from 'ai';
3+
4+
// Custom mock model that doesn't set modelId initially (simulates late model ID setting)
5+
// This tests that the op is correctly set even when model ID is not available at span start.
6+
// The span name update (e.g., 'generate_text gpt-4') is skipped when model ID is missing.t
7+
class LateModelIdMock {
8+
specificationVersion = 'v1';
9+
provider = 'late-model-provider';
10+
// modelId is intentionally undefined initially to simulate late setting
11+
modelId = undefined;
12+
defaultObjectGenerationMode = 'json';
13+
14+
async doGenerate() {
15+
// Model ID is only "available" during generation, not at span start
16+
this.modelId = 'late-mock-model-id';
17+
18+
return {
19+
rawCall: { rawPrompt: null, rawSettings: {} },
20+
finishReason: 'stop',
21+
usage: { promptTokens: 5, completionTokens: 10 },
22+
text: 'Response from late model!',
23+
};
24+
}
25+
}
26+
27+
async function run() {
28+
await Sentry.startSpan({ op: 'function', name: 'main' }, async () => {
29+
await generateText({
30+
model: new LateModelIdMock(),
31+
prompt: 'Test prompt for late model ID',
32+
});
33+
});
34+
}
35+
36+
run();

dev-packages/node-integration-tests/suites/tracing/vercelai/test.ts

Lines changed: 36 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -699,4 +699,40 @@ describe('Vercel AI integration', () => {
699699
expect(errorEvent!.contexts!.trace!.span_id).toBe(transactionEvent!.contexts!.trace!.span_id);
700700
});
701701
});
702+
703+
createEsmAndCjsTests(__dirname, 'scenario-late-model-id.mjs', 'instrument.mjs', (createRunner, test) => {
704+
test('sets op correctly even when model ID is not available at span start', async () => {
705+
const expectedTransaction = {
706+
transaction: 'main',
707+
spans: expect.arrayContaining([
708+
// The generateText span should have the correct op even though model ID was not available at span start
709+
expect.objectContaining({
710+
description: 'generateText',
711+
op: 'gen_ai.invoke_agent',
712+
origin: 'auto.vercelai.otel',
713+
status: 'ok',
714+
data: expect.objectContaining({
715+
'sentry.op': 'gen_ai.invoke_agent',
716+
'sentry.origin': 'auto.vercelai.otel',
717+
'gen_ai.operation.name': 'ai.generateText',
718+
}),
719+
}),
720+
// The doGenerate span - name stays as 'generateText.doGenerate' since model ID is missing
721+
expect.objectContaining({
722+
description: 'generateText.doGenerate',
723+
op: 'gen_ai.generate_text',
724+
origin: 'auto.vercelai.otel',
725+
status: 'ok',
726+
data: expect.objectContaining({
727+
'sentry.op': 'gen_ai.generate_text',
728+
'sentry.origin': 'auto.vercelai.otel',
729+
'gen_ai.operation.name': 'ai.generateText.doGenerate',
730+
}),
731+
}),
732+
]),
733+
};
734+
735+
await createRunner().expect({ transaction: expectedTransaction }).start().completed();
736+
});
737+
});
702738
});

packages/core/src/tracing/ai/gen-ai-attributes.ts

Lines changed: 35 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -179,6 +179,41 @@ export const GEN_AI_USAGE_INPUT_TOKENS_CACHED_ATTRIBUTE = 'gen_ai.usage.input_to
179179
*/
180180
export const GEN_AI_INVOKE_AGENT_OPERATION_ATTRIBUTE = 'gen_ai.invoke_agent';
181181

182+
/**
183+
* The span operation name for generating text
184+
*/
185+
export const GEN_AI_GENERATE_TEXT_DO_GENERATE_OPERATION_ATTRIBUTE = 'gen_ai.generate_text';
186+
187+
/**
188+
* The span operation name for streaming text
189+
*/
190+
export const GEN_AI_STREAM_TEXT_DO_STREAM_OPERATION_ATTRIBUTE = 'gen_ai.stream_text';
191+
192+
/**
193+
* The span operation name for generating object
194+
*/
195+
export const GEN_AI_GENERATE_OBJECT_DO_GENERATE_OPERATION_ATTRIBUTE = 'gen_ai.generate_object';
196+
197+
/**
198+
* The span operation name for streaming object
199+
*/
200+
export const GEN_AI_STREAM_OBJECT_DO_STREAM_OPERATION_ATTRIBUTE = 'gen_ai.stream_object';
201+
202+
/**
203+
* The span operation name for embedding
204+
*/
205+
export const GEN_AI_EMBED_DO_EMBED_OPERATION_ATTRIBUTE = 'gen_ai.embed';
206+
207+
/**
208+
* The span operation name for embedding many
209+
*/
210+
export const GEN_AI_EMBED_MANY_DO_EMBED_OPERATION_ATTRIBUTE = 'gen_ai.embed_many';
211+
212+
/**
213+
* The span operation name for executing a tool
214+
*/
215+
export const GEN_AI_EXECUTE_TOOL_OPERATION_ATTRIBUTE = 'gen_ai.execute_tool';
216+
182217
// =============================================================================
183218
// OPENAI-SPECIFIC ATTRIBUTES
184219
// =============================================================================

packages/core/src/tracing/vercel-ai/index.ts

Lines changed: 30 additions & 72 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@ import {
1919
accumulateTokensForParent,
2020
applyAccumulatedTokens,
2121
convertAvailableToolsToJsonString,
22+
getSpanOpFromName,
2223
requestMessagesFromPrompt,
2324
} from './utils';
2425
import type { ProviderMetadata } from './vercel-ai-attributes';
@@ -64,10 +65,8 @@ function onVercelAiSpanStart(span: Span): void {
6465
return;
6566
}
6667

67-
// The AI model ID must be defined for generate, stream, and embed spans.
68-
// The provider is optional and may not always be present.
69-
const aiModelId = attributes[AI_MODEL_ID_ATTRIBUTE];
70-
if (typeof aiModelId !== 'string' || !aiModelId) {
68+
// Check if this is a Vercel AI span by name pattern.
69+
if (!name.startsWith('ai.')) {
7170
return;
7271
}
7372

@@ -225,76 +224,35 @@ function processGenerateSpan(span: Span, name: string, attributes: SpanAttribute
225224
}
226225
span.setAttribute('ai.streaming', name.includes('stream'));
227226

228-
// Generate Spans
229-
if (name === 'ai.generateText') {
230-
span.setAttribute(SEMANTIC_ATTRIBUTE_SENTRY_OP, 'gen_ai.invoke_agent');
231-
return;
232-
}
233-
234-
if (name === 'ai.generateText.doGenerate') {
235-
span.setAttribute(SEMANTIC_ATTRIBUTE_SENTRY_OP, 'gen_ai.generate_text');
236-
span.updateName(`generate_text ${attributes[AI_MODEL_ID_ATTRIBUTE]}`);
237-
return;
238-
}
239-
240-
if (name === 'ai.streamText') {
241-
span.setAttribute(SEMANTIC_ATTRIBUTE_SENTRY_OP, 'gen_ai.invoke_agent');
242-
return;
243-
}
244-
245-
if (name === 'ai.streamText.doStream') {
246-
span.setAttribute(SEMANTIC_ATTRIBUTE_SENTRY_OP, 'gen_ai.stream_text');
247-
span.updateName(`stream_text ${attributes[AI_MODEL_ID_ATTRIBUTE]}`);
248-
return;
249-
}
250-
251-
if (name === 'ai.generateObject') {
252-
span.setAttribute(SEMANTIC_ATTRIBUTE_SENTRY_OP, 'gen_ai.invoke_agent');
253-
return;
254-
}
255-
256-
if (name === 'ai.generateObject.doGenerate') {
257-
span.setAttribute(SEMANTIC_ATTRIBUTE_SENTRY_OP, 'gen_ai.generate_object');
258-
span.updateName(`generate_object ${attributes[AI_MODEL_ID_ATTRIBUTE]}`);
259-
return;
260-
}
261-
262-
if (name === 'ai.streamObject') {
263-
span.setAttribute(SEMANTIC_ATTRIBUTE_SENTRY_OP, 'gen_ai.invoke_agent');
264-
return;
265-
}
266-
267-
if (name === 'ai.streamObject.doStream') {
268-
span.setAttribute(SEMANTIC_ATTRIBUTE_SENTRY_OP, 'gen_ai.stream_object');
269-
span.updateName(`stream_object ${attributes[AI_MODEL_ID_ATTRIBUTE]}`);
270-
return;
271-
}
272-
273-
if (name === 'ai.embed') {
274-
span.setAttribute(SEMANTIC_ATTRIBUTE_SENTRY_OP, 'gen_ai.invoke_agent');
275-
return;
276-
}
277-
278-
if (name === 'ai.embed.doEmbed') {
279-
span.setAttribute(SEMANTIC_ATTRIBUTE_SENTRY_OP, 'gen_ai.embed');
280-
span.updateName(`embed ${attributes[AI_MODEL_ID_ATTRIBUTE]}`);
281-
return;
227+
// Set the op based on the span name
228+
const op = getSpanOpFromName(name);
229+
if (op) {
230+
span.setAttribute(SEMANTIC_ATTRIBUTE_SENTRY_OP, op);
282231
}
283232

284-
if (name === 'ai.embedMany') {
285-
span.setAttribute(SEMANTIC_ATTRIBUTE_SENTRY_OP, 'gen_ai.invoke_agent');
286-
return;
287-
}
288-
289-
if (name === 'ai.embedMany.doEmbed') {
290-
span.setAttribute(SEMANTIC_ATTRIBUTE_SENTRY_OP, 'gen_ai.embed_many');
291-
span.updateName(`embed_many ${attributes[AI_MODEL_ID_ATTRIBUTE]}`);
292-
return;
293-
}
294-
295-
if (name.startsWith('ai.stream')) {
296-
span.setAttribute(SEMANTIC_ATTRIBUTE_SENTRY_OP, 'ai.run');
297-
return;
233+
// Update span names for .do* spans to include the model ID (only if model ID exists)
234+
const modelId = attributes[AI_MODEL_ID_ATTRIBUTE];
235+
if (modelId) {
236+
switch (name) {
237+
case 'ai.generateText.doGenerate':
238+
span.updateName(`generate_text ${modelId}`);
239+
break;
240+
case 'ai.streamText.doStream':
241+
span.updateName(`stream_text ${modelId}`);
242+
break;
243+
case 'ai.generateObject.doGenerate':
244+
span.updateName(`generate_object ${modelId}`);
245+
break;
246+
case 'ai.streamObject.doStream':
247+
span.updateName(`stream_object ${modelId}`);
248+
break;
249+
case 'ai.embed.doEmbed':
250+
span.updateName(`embed ${modelId}`);
251+
break;
252+
case 'ai.embedMany.doEmbed':
253+
span.updateName(`embed_many ${modelId}`);
254+
break;
255+
}
298256
}
299257
}
300258

packages/core/src/tracing/vercel-ai/utils.ts

Lines changed: 42 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,15 @@
11
import type { TraceContext } from '../../types-hoist/context';
22
import type { Span, SpanAttributes, SpanJSON } from '../../types-hoist/span';
33
import {
4+
GEN_AI_EMBED_DO_EMBED_OPERATION_ATTRIBUTE,
5+
GEN_AI_EMBED_MANY_DO_EMBED_OPERATION_ATTRIBUTE,
6+
GEN_AI_EXECUTE_TOOL_OPERATION_ATTRIBUTE,
7+
GEN_AI_GENERATE_OBJECT_DO_GENERATE_OPERATION_ATTRIBUTE,
8+
GEN_AI_GENERATE_TEXT_DO_GENERATE_OPERATION_ATTRIBUTE,
9+
GEN_AI_INVOKE_AGENT_OPERATION_ATTRIBUTE,
410
GEN_AI_REQUEST_MESSAGES_ATTRIBUTE,
11+
GEN_AI_STREAM_OBJECT_DO_STREAM_OPERATION_ATTRIBUTE,
12+
GEN_AI_STREAM_TEXT_DO_STREAM_OPERATION_ATTRIBUTE,
513
GEN_AI_USAGE_INPUT_TOKENS_ATTRIBUTE,
614
GEN_AI_USAGE_OUTPUT_TOKENS_ATTRIBUTE,
715
} from '../ai/gen-ai-attributes';
@@ -137,3 +145,37 @@ export function requestMessagesFromPrompt(span: Span, attributes: SpanAttributes
137145
if (messages.length) span.setAttribute(GEN_AI_REQUEST_MESSAGES_ATTRIBUTE, getTruncatedJsonString(messages));
138146
}
139147
}
148+
149+
/**
150+
* Maps a Vercel AI span name to the corresponding Sentry op.
151+
*/
152+
export function getSpanOpFromName(name: string): string | undefined {
153+
switch (name) {
154+
case 'ai.generateText':
155+
case 'ai.streamText':
156+
case 'ai.generateObject':
157+
case 'ai.streamObject':
158+
case 'ai.embed':
159+
case 'ai.embedMany':
160+
return GEN_AI_INVOKE_AGENT_OPERATION_ATTRIBUTE;
161+
case 'ai.generateText.doGenerate':
162+
return GEN_AI_GENERATE_TEXT_DO_GENERATE_OPERATION_ATTRIBUTE;
163+
case 'ai.streamText.doStream':
164+
return GEN_AI_STREAM_TEXT_DO_STREAM_OPERATION_ATTRIBUTE;
165+
case 'ai.generateObject.doGenerate':
166+
return GEN_AI_GENERATE_OBJECT_DO_GENERATE_OPERATION_ATTRIBUTE;
167+
case 'ai.streamObject.doStream':
168+
return GEN_AI_STREAM_OBJECT_DO_STREAM_OPERATION_ATTRIBUTE;
169+
case 'ai.embed.doEmbed':
170+
return GEN_AI_EMBED_DO_EMBED_OPERATION_ATTRIBUTE;
171+
case 'ai.embedMany.doEmbed':
172+
return GEN_AI_EMBED_MANY_DO_EMBED_OPERATION_ATTRIBUTE;
173+
case 'ai.toolCall':
174+
return GEN_AI_EXECUTE_TOOL_OPERATION_ATTRIBUTE;
175+
default:
176+
if (name.startsWith('ai.stream')) {
177+
return 'ai.run';
178+
}
179+
return undefined;
180+
}
181+
}

0 commit comments

Comments
 (0)