Skip to content

Commit a4e6f39

Browse files
committed
fix(Core): Report well known values in gen_ai.operation.name attribute
1 parent 80b4705 commit a4e6f39

15 files changed

Lines changed: 251 additions & 210 deletions

File tree

dev-packages/node-integration-tests/suites/tracing/anthropic/test.ts

Lines changed: 70 additions & 70 deletions
Large diffs are not rendered by default.

dev-packages/node-integration-tests/suites/tracing/langchain/test.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -286,7 +286,7 @@ describe('LangChain integration', () => {
286286
// This should have Anthropic instrumentation (origin: 'auto.ai.anthropic')
287287
const firstAnthropicSpan = spans.find(
288288
span =>
289-
span.description === 'messages claude-3-5-sonnet-20241022' && span.origin === 'auto.ai.anthropic',
289+
span.description === 'chat claude-3-5-sonnet-20241022' && span.origin === 'auto.ai.anthropic',
290290
);
291291

292292
// Second call: LangChain call
@@ -300,7 +300,7 @@ describe('LangChain integration', () => {
300300
// Count how many Anthropic spans we have - should be exactly 1
301301
const anthropicSpans = spans.filter(
302302
span =>
303-
span.description === 'messages claude-3-5-sonnet-20241022' && span.origin === 'auto.ai.anthropic',
303+
span.description === 'chat claude-3-5-sonnet-20241022' && span.origin === 'auto.ai.anthropic',
304304
);
305305

306306
// Verify the edge case limitation:

dev-packages/node-integration-tests/suites/tracing/langchain/v1/test.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -336,7 +336,7 @@ conditionalTest({ min: 20 })('LangChain integration (v1)', () => {
336336
// This should have Anthropic instrumentation (origin: 'auto.ai.anthropic')
337337
const firstAnthropicSpan = spans.find(
338338
span =>
339-
span.description === 'messages claude-3-5-sonnet-20241022' && span.origin === 'auto.ai.anthropic',
339+
span.description === 'chat claude-3-5-sonnet-20241022' && span.origin === 'auto.ai.anthropic',
340340
);
341341

342342
// Second call: LangChain call
@@ -350,7 +350,7 @@ conditionalTest({ min: 20 })('LangChain integration (v1)', () => {
350350
// Count how many Anthropic spans we have - should be exactly 1
351351
const anthropicSpans = spans.filter(
352352
span =>
353-
span.description === 'messages claude-3-5-sonnet-20241022' && span.origin === 'auto.ai.anthropic',
353+
span.description === 'chat claude-3-5-sonnet-20241022' && span.origin === 'auto.ai.anthropic',
354354
);
355355

356356
// Verify the edge case limitation:

dev-packages/node-integration-tests/suites/tracing/openai/openai-tool-calls/test.ts

Lines changed: 16 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -117,8 +117,8 @@ describe('OpenAI Tool Calls integration', () => {
117117
// Third span - responses API with tools (non-streaming)
118118
expect.objectContaining({
119119
data: {
120-
'gen_ai.operation.name': 'responses',
121-
'sentry.op': 'gen_ai.responses',
120+
'gen_ai.operation.name': 'chat',
121+
'sentry.op': 'gen_ai.chat',
122122
'sentry.origin': 'auto.ai.openai',
123123
'gen_ai.system': 'openai',
124124
'gen_ai.request.model': 'gpt-4',
@@ -135,16 +135,16 @@ describe('OpenAI Tool Calls integration', () => {
135135
'openai.usage.completion_tokens': 12,
136136
'openai.usage.prompt_tokens': 8,
137137
},
138-
description: 'responses gpt-4',
139-
op: 'gen_ai.responses',
138+
description: 'chat gpt-4',
139+
op: 'gen_ai.chat',
140140
origin: 'auto.ai.openai',
141141
status: 'ok',
142142
}),
143143
// Fourth span - responses API with tools and streaming
144144
expect.objectContaining({
145145
data: {
146-
'gen_ai.operation.name': 'responses',
147-
'sentry.op': 'gen_ai.responses',
146+
'gen_ai.operation.name': 'chat',
147+
'sentry.op': 'gen_ai.chat',
148148
'sentry.origin': 'auto.ai.openai',
149149
'gen_ai.system': 'openai',
150150
'gen_ai.request.model': 'gpt-4',
@@ -163,8 +163,8 @@ describe('OpenAI Tool Calls integration', () => {
163163
'openai.usage.completion_tokens': 12,
164164
'openai.usage.prompt_tokens': 8,
165165
},
166-
description: 'responses gpt-4 stream-response',
167-
op: 'gen_ai.responses',
166+
description: 'chat gpt-4 stream-response',
167+
op: 'gen_ai.chat',
168168
origin: 'auto.ai.openai',
169169
status: 'ok',
170170
}),
@@ -238,8 +238,8 @@ describe('OpenAI Tool Calls integration', () => {
238238
// Third span - responses API with tools (non-streaming) with PII
239239
expect.objectContaining({
240240
data: {
241-
'gen_ai.operation.name': 'responses',
242-
'sentry.op': 'gen_ai.responses',
241+
'gen_ai.operation.name': 'chat',
242+
'sentry.op': 'gen_ai.chat',
243243
'sentry.origin': 'auto.ai.openai',
244244
'gen_ai.system': 'openai',
245245
'gen_ai.request.model': 'gpt-4',
@@ -259,16 +259,16 @@ describe('OpenAI Tool Calls integration', () => {
259259
'openai.usage.completion_tokens': 12,
260260
'openai.usage.prompt_tokens': 8,
261261
},
262-
description: 'responses gpt-4',
263-
op: 'gen_ai.responses',
262+
description: 'chat gpt-4',
263+
op: 'gen_ai.chat',
264264
origin: 'auto.ai.openai',
265265
status: 'ok',
266266
}),
267267
// Fourth span - responses API with tools and streaming with PII
268268
expect.objectContaining({
269269
data: {
270-
'gen_ai.operation.name': 'responses',
271-
'sentry.op': 'gen_ai.responses',
270+
'gen_ai.operation.name': 'chat',
271+
'sentry.op': 'gen_ai.chat',
272272
'sentry.origin': 'auto.ai.openai',
273273
'gen_ai.system': 'openai',
274274
'gen_ai.request.model': 'gpt-4',
@@ -290,8 +290,8 @@ describe('OpenAI Tool Calls integration', () => {
290290
'openai.usage.completion_tokens': 12,
291291
'openai.usage.prompt_tokens': 8,
292292
},
293-
description: 'responses gpt-4 stream-response',
294-
op: 'gen_ai.responses',
293+
description: 'chat gpt-4 stream-response',
294+
op: 'gen_ai.chat',
295295
origin: 'auto.ai.openai',
296296
status: 'ok',
297297
}),

dev-packages/node-integration-tests/suites/tracing/openai/test.ts

Lines changed: 31 additions & 31 deletions
Original file line numberDiff line numberDiff line change
@@ -38,8 +38,8 @@ describe('OpenAI integration', () => {
3838
// Second span - responses API
3939
expect.objectContaining({
4040
data: {
41-
'gen_ai.operation.name': 'responses',
42-
'sentry.op': 'gen_ai.responses',
41+
'gen_ai.operation.name': 'chat',
42+
'sentry.op': 'gen_ai.chat',
4343
'sentry.origin': 'auto.ai.openai',
4444
'gen_ai.system': 'openai',
4545
'gen_ai.request.model': 'gpt-3.5-turbo',
@@ -55,8 +55,8 @@ describe('OpenAI integration', () => {
5555
'openai.usage.completion_tokens': 8,
5656
'openai.usage.prompt_tokens': 5,
5757
},
58-
description: 'responses gpt-3.5-turbo',
59-
op: 'gen_ai.responses',
58+
description: 'chat gpt-3.5-turbo',
59+
op: 'gen_ai.chat',
6060
origin: 'auto.ai.openai',
6161
status: 'ok',
6262
}),
@@ -105,8 +105,8 @@ describe('OpenAI integration', () => {
105105
// Fifth span - responses API streaming
106106
expect.objectContaining({
107107
data: {
108-
'gen_ai.operation.name': 'responses',
109-
'sentry.op': 'gen_ai.responses',
108+
'gen_ai.operation.name': 'chat',
109+
'sentry.op': 'gen_ai.chat',
110110
'sentry.origin': 'auto.ai.openai',
111111
'gen_ai.system': 'openai',
112112
'gen_ai.request.model': 'gpt-4',
@@ -124,8 +124,8 @@ describe('OpenAI integration', () => {
124124
'openai.usage.completion_tokens': 10,
125125
'openai.usage.prompt_tokens': 6,
126126
},
127-
description: 'responses gpt-4 stream-response',
128-
op: 'gen_ai.responses',
127+
description: 'chat gpt-4 stream-response',
128+
op: 'gen_ai.chat',
129129
origin: 'auto.ai.openai',
130130
status: 'ok',
131131
}),
@@ -182,8 +182,8 @@ describe('OpenAI integration', () => {
182182
// Second span - responses API with PII
183183
expect.objectContaining({
184184
data: {
185-
'gen_ai.operation.name': 'responses',
186-
'sentry.op': 'gen_ai.responses',
185+
'gen_ai.operation.name': 'chat',
186+
'sentry.op': 'gen_ai.chat',
187187
'sentry.origin': 'auto.ai.openai',
188188
'gen_ai.system': 'openai',
189189
'gen_ai.request.model': 'gpt-3.5-turbo',
@@ -201,8 +201,8 @@ describe('OpenAI integration', () => {
201201
'openai.usage.completion_tokens': 8,
202202
'openai.usage.prompt_tokens': 5,
203203
},
204-
description: 'responses gpt-3.5-turbo',
205-
op: 'gen_ai.responses',
204+
description: 'chat gpt-3.5-turbo',
205+
op: 'gen_ai.chat',
206206
origin: 'auto.ai.openai',
207207
status: 'ok',
208208
}),
@@ -256,8 +256,8 @@ describe('OpenAI integration', () => {
256256
// Fifth span - responses API streaming with PII
257257
expect.objectContaining({
258258
data: expect.objectContaining({
259-
'gen_ai.operation.name': 'responses',
260-
'sentry.op': 'gen_ai.responses',
259+
'gen_ai.operation.name': 'chat',
260+
'sentry.op': 'gen_ai.chat',
261261
'sentry.origin': 'auto.ai.openai',
262262
'gen_ai.system': 'openai',
263263
'gen_ai.request.model': 'gpt-4',
@@ -277,8 +277,8 @@ describe('OpenAI integration', () => {
277277
'openai.usage.completion_tokens': 10,
278278
'openai.usage.prompt_tokens': 6,
279279
}),
280-
description: 'responses gpt-4 stream-response',
281-
op: 'gen_ai.responses',
280+
description: 'chat gpt-4 stream-response',
281+
op: 'gen_ai.chat',
282282
origin: 'auto.ai.openai',
283283
status: 'ok',
284284
}),
@@ -639,16 +639,16 @@ describe('OpenAI integration', () => {
639639
spans: expect.arrayContaining([
640640
expect.objectContaining({
641641
data: expect.objectContaining({
642-
'gen_ai.operation.name': 'responses',
643-
'sentry.op': 'gen_ai.responses',
642+
'gen_ai.operation.name': 'chat',
643+
'sentry.op': 'gen_ai.chat',
644644
'sentry.origin': 'auto.ai.openai',
645645
'gen_ai.system': 'openai',
646646
'gen_ai.request.model': 'gpt-3.5-turbo',
647647
// Messages should be present and should include truncated string input (contains only As)
648648
'gen_ai.request.messages': expect.stringMatching(/^A+$/),
649649
}),
650-
description: 'responses gpt-3.5-turbo',
651-
op: 'gen_ai.responses',
650+
description: 'chat gpt-3.5-turbo',
651+
op: 'gen_ai.chat',
652652
origin: 'auto.ai.openai',
653653
status: 'ok',
654654
}),
@@ -668,30 +668,30 @@ describe('OpenAI integration', () => {
668668
// First span - conversations.create returns conversation object with id
669669
expect.objectContaining({
670670
data: expect.objectContaining({
671-
'gen_ai.operation.name': 'conversations',
672-
'sentry.op': 'gen_ai.conversations',
671+
'gen_ai.operation.name': 'chat',
672+
'sentry.op': 'gen_ai.chat',
673673
'sentry.origin': 'auto.ai.openai',
674674
'gen_ai.system': 'openai',
675675
// The conversation ID should be captured from the response
676676
'gen_ai.conversation.id': 'conv_689667905b048191b4740501625afd940c7533ace33a2dab',
677677
}),
678-
description: 'conversations unknown',
679-
op: 'gen_ai.conversations',
678+
description: 'chat unknown',
679+
op: 'gen_ai.chat',
680680
origin: 'auto.ai.openai',
681681
status: 'ok',
682682
}),
683683
// Second span - responses.create with conversation parameter
684684
expect.objectContaining({
685685
data: expect.objectContaining({
686-
'gen_ai.operation.name': 'responses',
687-
'sentry.op': 'gen_ai.responses',
686+
'gen_ai.operation.name': 'chat',
687+
'sentry.op': 'gen_ai.chat',
688688
'sentry.origin': 'auto.ai.openai',
689689
'gen_ai.system': 'openai',
690690
'gen_ai.request.model': 'gpt-4',
691691
// The conversation ID should be captured from the request
692692
'gen_ai.conversation.id': 'conv_689667905b048191b4740501625afd940c7533ace33a2dab',
693693
}),
694-
op: 'gen_ai.responses',
694+
op: 'gen_ai.chat',
695695
origin: 'auto.ai.openai',
696696
status: 'ok',
697697
}),
@@ -700,22 +700,22 @@ describe('OpenAI integration', () => {
700700
data: expect.not.objectContaining({
701701
'gen_ai.conversation.id': expect.anything(),
702702
}),
703-
op: 'gen_ai.responses',
703+
op: 'gen_ai.chat',
704704
origin: 'auto.ai.openai',
705705
status: 'ok',
706706
}),
707707
// Fourth span - responses.create with previous_response_id (chaining)
708708
expect.objectContaining({
709709
data: expect.objectContaining({
710-
'gen_ai.operation.name': 'responses',
711-
'sentry.op': 'gen_ai.responses',
710+
'gen_ai.operation.name': 'chat',
711+
'sentry.op': 'gen_ai.chat',
712712
'sentry.origin': 'auto.ai.openai',
713713
'gen_ai.system': 'openai',
714714
'gen_ai.request.model': 'gpt-4',
715715
// The previous_response_id should be captured as conversation.id
716716
'gen_ai.conversation.id': 'resp_mock_conv_123',
717717
}),
718-
op: 'gen_ai.responses',
718+
op: 'gen_ai.chat',
719719
origin: 'auto.ai.openai',
720720
status: 'ok',
721721
}),

dev-packages/node-integration-tests/suites/tracing/openai/v6/test.ts

Lines changed: 16 additions & 16 deletions
Original file line numberDiff line numberDiff line change
@@ -38,8 +38,8 @@ describe('OpenAI integration (V6)', () => {
3838
// Second span - responses API
3939
expect.objectContaining({
4040
data: {
41-
'gen_ai.operation.name': 'responses',
42-
'sentry.op': 'gen_ai.responses',
41+
'gen_ai.operation.name': 'chat',
42+
'sentry.op': 'gen_ai.chat',
4343
'sentry.origin': 'auto.ai.openai',
4444
'gen_ai.system': 'openai',
4545
'gen_ai.request.model': 'gpt-3.5-turbo',
@@ -55,8 +55,8 @@ describe('OpenAI integration (V6)', () => {
5555
'openai.usage.completion_tokens': 8,
5656
'openai.usage.prompt_tokens': 5,
5757
},
58-
description: 'responses gpt-3.5-turbo',
59-
op: 'gen_ai.responses',
58+
description: 'chat gpt-3.5-turbo',
59+
op: 'gen_ai.chat',
6060
origin: 'auto.ai.openai',
6161
status: 'ok',
6262
}),
@@ -105,8 +105,8 @@ describe('OpenAI integration (V6)', () => {
105105
// Fifth span - responses API streaming
106106
expect.objectContaining({
107107
data: {
108-
'gen_ai.operation.name': 'responses',
109-
'sentry.op': 'gen_ai.responses',
108+
'gen_ai.operation.name': 'chat',
109+
'sentry.op': 'gen_ai.chat',
110110
'sentry.origin': 'auto.ai.openai',
111111
'gen_ai.system': 'openai',
112112
'gen_ai.request.model': 'gpt-4',
@@ -124,8 +124,8 @@ describe('OpenAI integration (V6)', () => {
124124
'openai.usage.completion_tokens': 10,
125125
'openai.usage.prompt_tokens': 6,
126126
},
127-
description: 'responses gpt-4 stream-response',
128-
op: 'gen_ai.responses',
127+
description: 'chat gpt-4 stream-response',
128+
op: 'gen_ai.chat',
129129
origin: 'auto.ai.openai',
130130
status: 'ok',
131131
}),
@@ -182,8 +182,8 @@ describe('OpenAI integration (V6)', () => {
182182
// Second span - responses API with PII
183183
expect.objectContaining({
184184
data: {
185-
'gen_ai.operation.name': 'responses',
186-
'sentry.op': 'gen_ai.responses',
185+
'gen_ai.operation.name': 'chat',
186+
'sentry.op': 'gen_ai.chat',
187187
'sentry.origin': 'auto.ai.openai',
188188
'gen_ai.system': 'openai',
189189
'gen_ai.request.model': 'gpt-3.5-turbo',
@@ -201,8 +201,8 @@ describe('OpenAI integration (V6)', () => {
201201
'openai.usage.completion_tokens': 8,
202202
'openai.usage.prompt_tokens': 5,
203203
},
204-
description: 'responses gpt-3.5-turbo',
205-
op: 'gen_ai.responses',
204+
description: 'chat gpt-3.5-turbo',
205+
op: 'gen_ai.chat',
206206
origin: 'auto.ai.openai',
207207
status: 'ok',
208208
}),
@@ -256,8 +256,8 @@ describe('OpenAI integration (V6)', () => {
256256
// Fifth span - responses API streaming with PII
257257
expect.objectContaining({
258258
data: expect.objectContaining({
259-
'gen_ai.operation.name': 'responses',
260-
'sentry.op': 'gen_ai.responses',
259+
'gen_ai.operation.name': 'chat',
260+
'sentry.op': 'gen_ai.chat',
261261
'sentry.origin': 'auto.ai.openai',
262262
'gen_ai.system': 'openai',
263263
'gen_ai.request.model': 'gpt-4',
@@ -277,8 +277,8 @@ describe('OpenAI integration (V6)', () => {
277277
'openai.usage.completion_tokens': 10,
278278
'openai.usage.prompt_tokens': 6,
279279
}),
280-
description: 'responses gpt-4 stream-response',
281-
op: 'gen_ai.responses',
280+
description: 'chat gpt-4 stream-response',
281+
op: 'gen_ai.chat',
282282
origin: 'auto.ai.openai',
283283
status: 'ok',
284284
}),

0 commit comments

Comments
 (0)