Skip to content

Commit 6a6acc4

Browse files
committed
fix vercel ai op name, and fix tests
1 parent 79c9f4c commit 6a6acc4

7 files changed

Lines changed: 103 additions & 64 deletions

File tree

dev-packages/node-integration-tests/suites/tracing/google-genai/test.ts

Lines changed: 56 additions & 56 deletions
Original file line numberDiff line numberDiff line change
@@ -46,8 +46,8 @@ describe('Google GenAI integration', () => {
4646
// Third span - models.generateContent
4747
expect.objectContaining({
4848
data: {
49-
'gen_ai.operation.name': 'models',
50-
'sentry.op': 'gen_ai.models',
49+
'gen_ai.operation.name': 'generate_content',
50+
'sentry.op': 'gen_ai.generate_content',
5151
'sentry.origin': 'auto.ai.google_genai',
5252
'gen_ai.system': 'google_genai',
5353
'gen_ai.request.model': 'gemini-1.5-flash',
@@ -58,22 +58,22 @@ describe('Google GenAI integration', () => {
5858
'gen_ai.usage.output_tokens': 12,
5959
'gen_ai.usage.total_tokens': 20,
6060
},
61-
description: 'models gemini-1.5-flash',
62-
op: 'gen_ai.models',
61+
description: 'generate_content gemini-1.5-flash',
62+
op: 'gen_ai.generate_content',
6363
origin: 'auto.ai.google_genai',
6464
status: 'ok',
6565
}),
6666
// Fourth span - error handling
6767
expect.objectContaining({
6868
data: {
69-
'gen_ai.operation.name': 'models',
70-
'sentry.op': 'gen_ai.models',
69+
'gen_ai.operation.name': 'generate_content',
70+
'sentry.op': 'gen_ai.generate_content',
7171
'sentry.origin': 'auto.ai.google_genai',
7272
'gen_ai.system': 'google_genai',
7373
'gen_ai.request.model': 'error-model',
7474
},
75-
description: 'models error-model',
76-
op: 'gen_ai.models',
75+
description: 'generate_content error-model',
76+
op: 'gen_ai.generate_content',
7777
origin: 'auto.ai.google_genai',
7878
status: 'internal_error',
7979
}),
@@ -123,8 +123,8 @@ describe('Google GenAI integration', () => {
123123
// Third span - models.generateContent with PII
124124
expect.objectContaining({
125125
data: expect.objectContaining({
126-
'gen_ai.operation.name': 'models',
127-
'sentry.op': 'gen_ai.models',
126+
'gen_ai.operation.name': 'generate_content',
127+
'sentry.op': 'gen_ai.generate_content',
128128
'sentry.origin': 'auto.ai.google_genai',
129129
'gen_ai.system': 'google_genai',
130130
'gen_ai.request.model': 'gemini-1.5-flash',
@@ -137,23 +137,23 @@ describe('Google GenAI integration', () => {
137137
'gen_ai.usage.output_tokens': 12,
138138
'gen_ai.usage.total_tokens': 20,
139139
}),
140-
description: 'models gemini-1.5-flash',
141-
op: 'gen_ai.models',
140+
description: 'generate_content gemini-1.5-flash',
141+
op: 'gen_ai.generate_content',
142142
origin: 'auto.ai.google_genai',
143143
status: 'ok',
144144
}),
145145
// Fourth span - error handling with PII
146146
expect.objectContaining({
147147
data: expect.objectContaining({
148-
'gen_ai.operation.name': 'models',
149-
'sentry.op': 'gen_ai.models',
148+
'gen_ai.operation.name': 'generate_content',
149+
'sentry.op': 'gen_ai.generate_content',
150150
'sentry.origin': 'auto.ai.google_genai',
151151
'gen_ai.system': 'google_genai',
152152
'gen_ai.request.model': 'error-model',
153153
'gen_ai.request.messages': expect.any(String), // Should include contents when recordInputs: true
154154
}),
155-
description: 'models error-model',
156-
op: 'gen_ai.models',
155+
description: 'generate_content error-model',
156+
op: 'gen_ai.generate_content',
157157
origin: 'auto.ai.google_genai',
158158
status: 'internal_error',
159159
}),
@@ -213,8 +213,8 @@ describe('Google GenAI integration', () => {
213213
// Non-streaming with tools
214214
expect.objectContaining({
215215
data: expect.objectContaining({
216-
'gen_ai.operation.name': 'models',
217-
'sentry.op': 'gen_ai.models',
216+
'gen_ai.operation.name': 'generate_content',
217+
'sentry.op': 'gen_ai.generate_content',
218218
'sentry.origin': 'auto.ai.google_genai',
219219
'gen_ai.system': 'google_genai',
220220
'gen_ai.request.model': 'gemini-2.0-flash-001',
@@ -226,16 +226,16 @@ describe('Google GenAI integration', () => {
226226
'gen_ai.usage.output_tokens': 8,
227227
'gen_ai.usage.total_tokens': 23,
228228
}),
229-
description: 'models gemini-2.0-flash-001',
230-
op: 'gen_ai.models',
229+
description: 'generate_content gemini-2.0-flash-001',
230+
op: 'gen_ai.generate_content',
231231
origin: 'auto.ai.google_genai',
232232
status: 'ok',
233233
}),
234234
// Streaming with tools
235235
expect.objectContaining({
236236
data: expect.objectContaining({
237-
'gen_ai.operation.name': 'models',
238-
'sentry.op': 'gen_ai.models',
237+
'gen_ai.operation.name': 'generate_content',
238+
'sentry.op': 'gen_ai.generate_content',
239239
'sentry.origin': 'auto.ai.google_genai',
240240
'gen_ai.system': 'google_genai',
241241
'gen_ai.request.model': 'gemini-2.0-flash-001',
@@ -250,16 +250,16 @@ describe('Google GenAI integration', () => {
250250
'gen_ai.usage.output_tokens': 10,
251251
'gen_ai.usage.total_tokens': 22,
252252
}),
253-
description: 'models gemini-2.0-flash-001 stream-response',
254-
op: 'gen_ai.models',
253+
description: 'generate_content gemini-2.0-flash-001 stream-response',
254+
op: 'gen_ai.generate_content',
255255
origin: 'auto.ai.google_genai',
256256
status: 'ok',
257257
}),
258258
// Without tools for comparison
259259
expect.objectContaining({
260260
data: expect.objectContaining({
261-
'gen_ai.operation.name': 'models',
262-
'sentry.op': 'gen_ai.models',
261+
'gen_ai.operation.name': 'generate_content',
262+
'sentry.op': 'gen_ai.generate_content',
263263
'sentry.origin': 'auto.ai.google_genai',
264264
'gen_ai.system': 'google_genai',
265265
'gen_ai.request.model': 'gemini-2.0-flash-001',
@@ -269,8 +269,8 @@ describe('Google GenAI integration', () => {
269269
'gen_ai.usage.output_tokens': 12,
270270
'gen_ai.usage.total_tokens': 20,
271271
}),
272-
description: 'models gemini-2.0-flash-001',
273-
op: 'gen_ai.models',
272+
description: 'generate_content gemini-2.0-flash-001',
273+
op: 'gen_ai.generate_content',
274274
origin: 'auto.ai.google_genai',
275275
status: 'ok',
276276
}),
@@ -289,8 +289,8 @@ describe('Google GenAI integration', () => {
289289
// First span - models.generateContentStream (streaming)
290290
expect.objectContaining({
291291
data: expect.objectContaining({
292-
'gen_ai.operation.name': 'models',
293-
'sentry.op': 'gen_ai.models',
292+
'gen_ai.operation.name': 'generate_content',
293+
'sentry.op': 'gen_ai.generate_content',
294294
'sentry.origin': 'auto.ai.google_genai',
295295
'gen_ai.system': 'google_genai',
296296
'gen_ai.request.model': 'gemini-1.5-flash',
@@ -305,8 +305,8 @@ describe('Google GenAI integration', () => {
305305
'gen_ai.usage.output_tokens': 12,
306306
'gen_ai.usage.total_tokens': 22,
307307
}),
308-
description: 'models gemini-1.5-flash stream-response',
309-
op: 'gen_ai.models',
308+
description: 'generate_content gemini-1.5-flash stream-response',
309+
op: 'gen_ai.generate_content',
310310
origin: 'auto.ai.google_genai',
311311
status: 'ok',
312312
}),
@@ -347,24 +347,24 @@ describe('Google GenAI integration', () => {
347347
// Fourth span - blocked content streaming
348348
expect.objectContaining({
349349
data: expect.objectContaining({
350-
'gen_ai.operation.name': 'models',
351-
'sentry.op': 'gen_ai.models',
350+
'gen_ai.operation.name': 'generate_content',
351+
'sentry.op': 'gen_ai.generate_content',
352352
'sentry.origin': 'auto.ai.google_genai',
353353
}),
354-
description: 'models blocked-model stream-response',
355-
op: 'gen_ai.models',
354+
description: 'generate_content blocked-model stream-response',
355+
op: 'gen_ai.generate_content',
356356
origin: 'auto.ai.google_genai',
357357
status: 'internal_error',
358358
}),
359359
// Fifth span - error handling for streaming
360360
expect.objectContaining({
361361
data: expect.objectContaining({
362-
'gen_ai.operation.name': 'models',
363-
'sentry.op': 'gen_ai.models',
362+
'gen_ai.operation.name': 'generate_content',
363+
'sentry.op': 'gen_ai.generate_content',
364364
'sentry.origin': 'auto.ai.google_genai',
365365
}),
366-
description: 'models error-model stream-response',
367-
op: 'gen_ai.models',
366+
description: 'generate_content error-model stream-response',
367+
op: 'gen_ai.generate_content',
368368
origin: 'auto.ai.google_genai',
369369
status: 'internal_error',
370370
}),
@@ -377,8 +377,8 @@ describe('Google GenAI integration', () => {
377377
// First span - models.generateContentStream (streaming) with PII
378378
expect.objectContaining({
379379
data: expect.objectContaining({
380-
'gen_ai.operation.name': 'models',
381-
'sentry.op': 'gen_ai.models',
380+
'gen_ai.operation.name': 'generate_content',
381+
'sentry.op': 'gen_ai.generate_content',
382382
'sentry.origin': 'auto.ai.google_genai',
383383
'gen_ai.system': 'google_genai',
384384
'gen_ai.request.model': 'gemini-1.5-flash',
@@ -394,8 +394,8 @@ describe('Google GenAI integration', () => {
394394
'gen_ai.usage.output_tokens': 12,
395395
'gen_ai.usage.total_tokens': 22,
396396
}),
397-
description: 'models gemini-1.5-flash stream-response',
398-
op: 'gen_ai.models',
397+
description: 'generate_content gemini-1.5-flash stream-response',
398+
op: 'gen_ai.generate_content',
399399
origin: 'auto.ai.google_genai',
400400
status: 'ok',
401401
}),
@@ -441,33 +441,33 @@ describe('Google GenAI integration', () => {
441441
// Fourth span - blocked content stream with PII
442442
expect.objectContaining({
443443
data: expect.objectContaining({
444-
'gen_ai.operation.name': 'models',
445-
'sentry.op': 'gen_ai.models',
444+
'gen_ai.operation.name': 'generate_content',
445+
'sentry.op': 'gen_ai.generate_content',
446446
'sentry.origin': 'auto.ai.google_genai',
447447
'gen_ai.system': 'google_genai',
448448
'gen_ai.request.model': 'blocked-model',
449449
'gen_ai.request.temperature': 0.7,
450450
'gen_ai.request.messages': expect.any(String), // Should include contents when recordInputs: true
451451
'gen_ai.response.streaming': true,
452452
}),
453-
description: 'models blocked-model stream-response',
454-
op: 'gen_ai.models',
453+
description: 'generate_content blocked-model stream-response',
454+
op: 'gen_ai.generate_content',
455455
origin: 'auto.ai.google_genai',
456456
status: 'internal_error',
457457
}),
458458
// Fifth span - error handling for streaming with PII
459459
expect.objectContaining({
460460
data: expect.objectContaining({
461-
'gen_ai.operation.name': 'models',
462-
'sentry.op': 'gen_ai.models',
461+
'gen_ai.operation.name': 'generate_content',
462+
'sentry.op': 'gen_ai.generate_content',
463463
'sentry.origin': 'auto.ai.google_genai',
464464
'gen_ai.system': 'google_genai',
465465
'gen_ai.request.model': 'error-model',
466466
'gen_ai.request.temperature': 0.7,
467467
'gen_ai.request.messages': expect.any(String), // Should include contents when recordInputs: true
468468
}),
469-
description: 'models error-model stream-response',
470-
op: 'gen_ai.models',
469+
description: 'generate_content error-model stream-response',
470+
op: 'gen_ai.generate_content',
471471
origin: 'auto.ai.google_genai',
472472
status: 'internal_error',
473473
}),
@@ -505,8 +505,8 @@ describe('Google GenAI integration', () => {
505505
// First call: Last message is large and gets truncated (only C's remain, D's are cropped)
506506
expect.objectContaining({
507507
data: expect.objectContaining({
508-
'gen_ai.operation.name': 'models',
509-
'sentry.op': 'gen_ai.models',
508+
'gen_ai.operation.name': 'generate_content',
509+
'sentry.op': 'gen_ai.generate_content',
510510
'sentry.origin': 'auto.ai.google_genai',
511511
'gen_ai.system': 'google_genai',
512512
'gen_ai.request.model': 'gemini-1.5-flash',
@@ -515,8 +515,8 @@ describe('Google GenAI integration', () => {
515515
/^\[\{"role":"user","parts":\[\{"text":"C+"\}\]\}\]$/,
516516
),
517517
}),
518-
description: 'models gemini-1.5-flash',
519-
op: 'gen_ai.models',
518+
description: 'generate_content gemini-1.5-flash',
519+
op: 'gen_ai.generate_content',
520520
origin: 'auto.ai.google_genai',
521521
status: 'ok',
522522
}),

dev-packages/node-integration-tests/suites/tracing/langchain/test.ts

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -285,8 +285,7 @@ describe('LangChain integration', () => {
285285
// First call: Direct Anthropic call made BEFORE LangChain import
286286
// This should have Anthropic instrumentation (origin: 'auto.ai.anthropic')
287287
const firstAnthropicSpan = spans.find(
288-
span =>
289-
span.description === 'chat claude-3-5-sonnet-20241022' && span.origin === 'auto.ai.anthropic',
288+
span => span.description === 'chat claude-3-5-sonnet-20241022' && span.origin === 'auto.ai.anthropic',
290289
);
291290

292291
// Second call: LangChain call
@@ -299,8 +298,7 @@ describe('LangChain integration', () => {
299298
// This should NOT have Anthropic instrumentation (skip works correctly)
300299
// Count how many Anthropic spans we have - should be exactly 1
301300
const anthropicSpans = spans.filter(
302-
span =>
303-
span.description === 'chat claude-3-5-sonnet-20241022' && span.origin === 'auto.ai.anthropic',
301+
span => span.description === 'chat claude-3-5-sonnet-20241022' && span.origin === 'auto.ai.anthropic',
304302
);
305303

306304
// Verify the edge case limitation:

dev-packages/node-integration-tests/suites/tracing/langchain/v1/test.ts

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -335,8 +335,7 @@ conditionalTest({ min: 20 })('LangChain integration (v1)', () => {
335335
// First call: Direct Anthropic call made BEFORE LangChain import
336336
// This should have Anthropic instrumentation (origin: 'auto.ai.anthropic')
337337
const firstAnthropicSpan = spans.find(
338-
span =>
339-
span.description === 'chat claude-3-5-sonnet-20241022' && span.origin === 'auto.ai.anthropic',
338+
span => span.description === 'chat claude-3-5-sonnet-20241022' && span.origin === 'auto.ai.anthropic',
340339
);
341340

342341
// Second call: LangChain call
@@ -349,8 +348,7 @@ conditionalTest({ min: 20 })('LangChain integration (v1)', () => {
349348
// This should NOT have Anthropic instrumentation (skip works correctly)
350349
// Count how many Anthropic spans we have - should be exactly 1
351350
const anthropicSpans = spans.filter(
352-
span =>
353-
span.description === 'chat claude-3-5-sonnet-20241022' && span.origin === 'auto.ai.anthropic',
351+
span => span.description === 'chat claude-3-5-sonnet-20241022' && span.origin === 'auto.ai.anthropic',
354352
);
355353

356354
// Verify the edge case limitation:

0 commit comments

Comments
 (0)