Skip to content

Commit a17a3cf

Browse files
committed
move away from gen_ai.request.messages to gen_ai.input.messages
1 parent 80b4705 commit a17a3cf

21 files changed

Lines changed: 154 additions & 154 deletions

File tree

dev-packages/cloudflare-integration-tests/suites/tracing/langgraph/test.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,7 @@ it('traces langgraph compile and invoke operations', async ({ signal }) => {
3737
'sentry.origin': 'auto.ai.langgraph',
3838
'gen_ai.agent.name': 'weather_assistant',
3939
'gen_ai.pipeline.name': 'weather_assistant',
40-
'gen_ai.request.messages': '[{"role":"user","content":"What is the weather in SF?"}]',
40+
'gen_ai.input.messages': '[{"role":"user","content":"What is the weather in SF?"}]',
4141
'gen_ai.response.model': 'mock-model',
4242
'gen_ai.usage.input_tokens': 20,
4343
'gen_ai.usage.output_tokens': 10,

dev-packages/node-integration-tests/suites/tracing/anthropic/test.ts

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -86,7 +86,7 @@ describe('Anthropic integration', () => {
8686
data: expect.objectContaining({
8787
'gen_ai.operation.name': 'messages',
8888
'gen_ai.request.max_tokens': 100,
89-
'gen_ai.request.messages': '[{"role":"user","content":"What is the capital of France?"}]',
89+
'gen_ai.input.messages': '[{"role":"user","content":"What is the capital of France?"}]',
9090
'gen_ai.request.model': 'claude-3-haiku-20240307',
9191
'gen_ai.request.temperature': 0.7,
9292
'gen_ai.response.id': 'msg_mock123',
@@ -126,7 +126,7 @@ describe('Anthropic integration', () => {
126126
expect.objectContaining({
127127
data: expect.objectContaining({
128128
'gen_ai.operation.name': 'messages',
129-
'gen_ai.request.messages': '[{"role":"user","content":"This will fail"}]',
129+
'gen_ai.input.messages': '[{"role":"user","content":"This will fail"}]',
130130
'gen_ai.request.model': 'error-model',
131131
'gen_ai.system': 'anthropic',
132132
'sentry.op': 'gen_ai.messages',
@@ -159,7 +159,7 @@ describe('Anthropic integration', () => {
159159
expect.objectContaining({
160160
data: expect.objectContaining({
161161
'gen_ai.operation.name': 'messages',
162-
'gen_ai.request.messages': '[{"role":"user","content":"What is the capital of France?"}]',
162+
'gen_ai.input.messages': '[{"role":"user","content":"What is the capital of France?"}]',
163163
'gen_ai.request.model': 'claude-3-haiku-20240307',
164164
'gen_ai.response.text': '15',
165165
'gen_ai.system': 'anthropic',
@@ -229,7 +229,7 @@ describe('Anthropic integration', () => {
229229
expect.objectContaining({
230230
data: expect.objectContaining({
231231
'gen_ai.operation.name': 'messages',
232-
'gen_ai.request.messages': '[{"role":"user","content":"What is the capital of France?"}]',
232+
'gen_ai.input.messages': '[{"role":"user","content":"What is the capital of France?"}]',
233233
'gen_ai.request.model': 'claude-3-haiku-20240307',
234234
'gen_ai.request.stream': true,
235235
'gen_ai.response.id': 'msg_stream123',
@@ -287,15 +287,15 @@ describe('Anthropic integration', () => {
287287
// Check that custom options are respected
288288
expect.objectContaining({
289289
data: expect.objectContaining({
290-
'gen_ai.request.messages': expect.any(String), // Should include messages when recordInputs: true
290+
'gen_ai.input.messages': expect.any(String), // Should include messages when recordInputs: true
291291
'gen_ai.response.text': expect.any(String), // Should include response text when recordOutputs: true
292292
}),
293293
}),
294294
// Check token counting with options
295295
expect.objectContaining({
296296
data: expect.objectContaining({
297297
'gen_ai.operation.name': 'messages',
298-
'gen_ai.request.messages': expect.any(String), // Should include messages when recordInputs: true
298+
'gen_ai.input.messages': expect.any(String), // Should include messages when recordInputs: true
299299
'gen_ai.response.text': '15', // Present because recordOutputs=true is set in options
300300
}),
301301
op: 'gen_ai.messages',
@@ -646,7 +646,7 @@ describe('Anthropic integration', () => {
646646
'gen_ai.system': 'anthropic',
647647
'gen_ai.request.model': 'claude-3-haiku-20240307',
648648
// Messages should be present (truncation happened) and should be a JSON array
649-
'gen_ai.request.messages': expect.stringMatching(/^\[\{"role":"user","content":"C+"\}\]$/),
649+
'gen_ai.input.messages': expect.stringMatching(/^\[\{"role":"user","content":"C+"\}\]$/),
650650
}),
651651
description: 'messages claude-3-haiku-20240307',
652652
op: 'gen_ai.messages',
@@ -662,7 +662,7 @@ describe('Anthropic integration', () => {
662662
'gen_ai.system': 'anthropic',
663663
'gen_ai.request.model': 'claude-3-haiku-20240307',
664664
// Small message should be kept intact
665-
'gen_ai.request.messages': JSON.stringify([
665+
'gen_ai.input.messages': JSON.stringify([
666666
{ role: 'user', content: 'This is a small message that fits within the limit' },
667667
]),
668668
}),
@@ -696,7 +696,7 @@ describe('Anthropic integration', () => {
696696
'gen_ai.system': 'anthropic',
697697
'gen_ai.request.model': 'claude-3-haiku-20240307',
698698
// Only the last message (with filtered media) should be kept
699-
'gen_ai.request.messages': JSON.stringify([
699+
'gen_ai.input.messages': JSON.stringify([
700700
{
701701
role: 'user',
702702
content: [

dev-packages/node-integration-tests/suites/tracing/google-genai/test.ts

Lines changed: 14 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -94,7 +94,7 @@ describe('Google GenAI integration', () => {
9494
'gen_ai.request.temperature': 0.8,
9595
'gen_ai.request.top_p': 0.9,
9696
'gen_ai.request.max_tokens': 150,
97-
'gen_ai.request.messages': '[{"role":"user","parts":[{"text":"Hello, how are you?"}]}]',
97+
'gen_ai.input.messages': '[{"role":"user","parts":[{"text":"Hello, how are you?"}]}]',
9898
}),
9999
description: 'chat gemini-1.5-pro create',
100100
op: 'gen_ai.chat',
@@ -109,7 +109,7 @@ describe('Google GenAI integration', () => {
109109
'sentry.origin': 'auto.ai.google_genai',
110110
'gen_ai.system': 'google_genai',
111111
'gen_ai.request.model': 'gemini-1.5-pro',
112-
'gen_ai.request.messages': expect.any(String), // Should include message when recordInputs: true
112+
'gen_ai.input.messages': expect.any(String), // Should include message when recordInputs: true
113113
'gen_ai.response.text': expect.any(String), // Should include response when recordOutputs: true
114114
'gen_ai.usage.input_tokens': 8,
115115
'gen_ai.usage.output_tokens': 12,
@@ -131,7 +131,7 @@ describe('Google GenAI integration', () => {
131131
'gen_ai.request.temperature': 0.7,
132132
'gen_ai.request.top_p': 0.9,
133133
'gen_ai.request.max_tokens': 100,
134-
'gen_ai.request.messages': expect.any(String), // Should include contents when recordInputs: true
134+
'gen_ai.input.messages': expect.any(String), // Should include contents when recordInputs: true
135135
'gen_ai.response.text': expect.any(String), // Should include response when recordOutputs: true
136136
'gen_ai.usage.input_tokens': 8,
137137
'gen_ai.usage.output_tokens': 12,
@@ -150,7 +150,7 @@ describe('Google GenAI integration', () => {
150150
'sentry.origin': 'auto.ai.google_genai',
151151
'gen_ai.system': 'google_genai',
152152
'gen_ai.request.model': 'error-model',
153-
'gen_ai.request.messages': expect.any(String), // Should include contents when recordInputs: true
153+
'gen_ai.input.messages': expect.any(String), // Should include contents when recordInputs: true
154154
}),
155155
description: 'models error-model',
156156
op: 'gen_ai.models',
@@ -166,7 +166,7 @@ describe('Google GenAI integration', () => {
166166
// Check that custom options are respected
167167
expect.objectContaining({
168168
data: expect.objectContaining({
169-
'gen_ai.request.messages': expect.any(String), // Should include messages when recordInputs: true
169+
'gen_ai.input.messages': expect.any(String), // Should include messages when recordInputs: true
170170
'gen_ai.response.text': expect.any(String), // Should include response text when recordOutputs: true
171171
}),
172172
description: expect.not.stringContaining('stream-response'), // Non-streaming span
@@ -219,7 +219,7 @@ describe('Google GenAI integration', () => {
219219
'gen_ai.system': 'google_genai',
220220
'gen_ai.request.model': 'gemini-2.0-flash-001',
221221
'gen_ai.request.available_tools': EXPECTED_AVAILABLE_TOOLS_JSON,
222-
'gen_ai.request.messages': expect.any(String), // Should include contents
222+
'gen_ai.input.messages': expect.any(String), // Should include contents
223223
'gen_ai.response.text': expect.any(String), // Should include response text
224224
'gen_ai.response.tool_calls': expect.any(String), // Should include tool calls
225225
'gen_ai.usage.input_tokens': 15,
@@ -240,7 +240,7 @@ describe('Google GenAI integration', () => {
240240
'gen_ai.system': 'google_genai',
241241
'gen_ai.request.model': 'gemini-2.0-flash-001',
242242
'gen_ai.request.available_tools': EXPECTED_AVAILABLE_TOOLS_JSON,
243-
'gen_ai.request.messages': expect.any(String), // Should include contents
243+
'gen_ai.input.messages': expect.any(String), // Should include contents
244244
'gen_ai.response.streaming': true,
245245
'gen_ai.response.text': expect.any(String), // Should include response text
246246
'gen_ai.response.tool_calls': expect.any(String), // Should include tool calls
@@ -263,7 +263,7 @@ describe('Google GenAI integration', () => {
263263
'sentry.origin': 'auto.ai.google_genai',
264264
'gen_ai.system': 'google_genai',
265265
'gen_ai.request.model': 'gemini-2.0-flash-001',
266-
'gen_ai.request.messages': expect.any(String), // Should include contents
266+
'gen_ai.input.messages': expect.any(String), // Should include contents
267267
'gen_ai.response.text': expect.any(String), // Should include response text
268268
'gen_ai.usage.input_tokens': 8,
269269
'gen_ai.usage.output_tokens': 12,
@@ -385,7 +385,7 @@ describe('Google GenAI integration', () => {
385385
'gen_ai.request.temperature': 0.7,
386386
'gen_ai.request.top_p': 0.9,
387387
'gen_ai.request.max_tokens': 100,
388-
'gen_ai.request.messages': expect.any(String), // Should include contents when recordInputs: true
388+
'gen_ai.input.messages': expect.any(String), // Should include contents when recordInputs: true
389389
'gen_ai.response.streaming': true,
390390
'gen_ai.response.id': 'mock-response-streaming-id',
391391
'gen_ai.response.model': 'gemini-1.5-pro',
@@ -424,7 +424,7 @@ describe('Google GenAI integration', () => {
424424
'sentry.origin': 'auto.ai.google_genai',
425425
'gen_ai.system': 'google_genai',
426426
'gen_ai.request.model': 'gemini-1.5-pro',
427-
'gen_ai.request.messages': expect.any(String), // Should include message when recordInputs: true
427+
'gen_ai.input.messages': expect.any(String), // Should include message when recordInputs: true
428428
'gen_ai.response.streaming': true,
429429
'gen_ai.response.id': 'mock-response-streaming-id',
430430
'gen_ai.response.model': 'gemini-1.5-pro',
@@ -447,7 +447,7 @@ describe('Google GenAI integration', () => {
447447
'gen_ai.system': 'google_genai',
448448
'gen_ai.request.model': 'blocked-model',
449449
'gen_ai.request.temperature': 0.7,
450-
'gen_ai.request.messages': expect.any(String), // Should include contents when recordInputs: true
450+
'gen_ai.input.messages': expect.any(String), // Should include contents when recordInputs: true
451451
'gen_ai.response.streaming': true,
452452
}),
453453
description: 'models blocked-model stream-response',
@@ -464,7 +464,7 @@ describe('Google GenAI integration', () => {
464464
'gen_ai.system': 'google_genai',
465465
'gen_ai.request.model': 'error-model',
466466
'gen_ai.request.temperature': 0.7,
467-
'gen_ai.request.messages': expect.any(String), // Should include contents when recordInputs: true
467+
'gen_ai.input.messages': expect.any(String), // Should include contents when recordInputs: true
468468
}),
469469
description: 'models error-model stream-response',
470470
op: 'gen_ai.models',
@@ -511,7 +511,7 @@ describe('Google GenAI integration', () => {
511511
'gen_ai.system': 'google_genai',
512512
'gen_ai.request.model': 'gemini-1.5-flash',
513513
// Messages should be present (truncation happened) and should be a JSON array with parts
514-
'gen_ai.request.messages': expect.stringMatching(
514+
'gen_ai.input.messages': expect.stringMatching(
515515
/^\[\{"role":"user","parts":\[\{"text":"C+"\}\]\}\]$/,
516516
),
517517
}),
@@ -529,7 +529,7 @@ describe('Google GenAI integration', () => {
529529
'gen_ai.system': 'google_genai',
530530
'gen_ai.request.model': 'gemini-1.5-flash',
531531
// Small message should be kept intact
532-
'gen_ai.request.messages': JSON.stringify([
532+
'gen_ai.input.messages': JSON.stringify([
533533
{
534534
role: 'user',
535535
parts: [{ text: 'This is a small message that fits within the limit' }],

dev-packages/node-integration-tests/suites/tracing/langchain/test.ts

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -84,7 +84,7 @@ describe('LangChain integration', () => {
8484
'gen_ai.request.model': 'claude-3-5-sonnet-20241022',
8585
'gen_ai.request.temperature': 0.7,
8686
'gen_ai.request.max_tokens': 100,
87-
'gen_ai.request.messages': expect.any(String), // Should include messages when recordInputs: true
87+
'gen_ai.input.messages': expect.any(String), // Should include messages when recordInputs: true
8888
'gen_ai.response.text': expect.any(String), // Should include response when recordOutputs: true
8989
'gen_ai.response.id': expect.any(String),
9090
'gen_ai.response.model': expect.any(String),
@@ -109,7 +109,7 @@ describe('LangChain integration', () => {
109109
'gen_ai.request.temperature': 0.9,
110110
'gen_ai.request.top_p': 0.95,
111111
'gen_ai.request.max_tokens': 200,
112-
'gen_ai.request.messages': expect.any(String), // Should include messages when recordInputs: true
112+
'gen_ai.input.messages': expect.any(String), // Should include messages when recordInputs: true
113113
'gen_ai.response.text': expect.any(String), // Should include response when recordOutputs: true
114114
'gen_ai.response.id': expect.any(String),
115115
'gen_ai.response.model': expect.any(String),
@@ -131,7 +131,7 @@ describe('LangChain integration', () => {
131131
'sentry.origin': 'auto.ai.langchain',
132132
'gen_ai.system': 'anthropic',
133133
'gen_ai.request.model': 'error-model',
134-
'gen_ai.request.messages': expect.any(String), // Should include messages when recordInputs: true
134+
'gen_ai.input.messages': expect.any(String), // Should include messages when recordInputs: true
135135
}),
136136
description: 'chat error-model',
137137
op: 'gen_ai.chat',
@@ -207,7 +207,7 @@ describe('LangChain integration', () => {
207207
'gen_ai.system': 'anthropic',
208208
'gen_ai.request.model': 'claude-3-5-sonnet-20241022',
209209
// Messages should be present and should include truncated string input (contains only Cs)
210-
'gen_ai.request.messages': expect.stringMatching(/^\[\{"role":"user","content":"C+"\}\]$/),
210+
'gen_ai.input.messages': expect.stringMatching(/^\[\{"role":"user","content":"C+"\}\]$/),
211211
}),
212212
description: 'chat claude-3-5-sonnet-20241022',
213213
op: 'gen_ai.chat',
@@ -223,7 +223,7 @@ describe('LangChain integration', () => {
223223
'gen_ai.system': 'anthropic',
224224
'gen_ai.request.model': 'claude-3-5-sonnet-20241022',
225225
// Messages should be present (truncation happened) and should be a JSON array of a single index (contains only Cs)
226-
'gen_ai.request.messages': expect.stringMatching(/^\[\{"role":"user","content":"C+"\}\]$/),
226+
'gen_ai.input.messages': expect.stringMatching(/^\[\{"role":"user","content":"C+"\}\]$/),
227227
}),
228228
description: 'chat claude-3-5-sonnet-20241022',
229229
op: 'gen_ai.chat',
@@ -239,7 +239,7 @@ describe('LangChain integration', () => {
239239
'gen_ai.system': 'anthropic',
240240
'gen_ai.request.model': 'claude-3-5-sonnet-20241022',
241241
// Small message should be kept intact
242-
'gen_ai.request.messages': JSON.stringify([
242+
'gen_ai.input.messages': JSON.stringify([
243243
{ role: 'user', content: 'This is a small message that fits within the limit' },
244244
]),
245245
}),

dev-packages/node-integration-tests/suites/tracing/langchain/v1/test.ts

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -87,7 +87,7 @@ conditionalTest({ min: 20 })('LangChain integration (v1)', () => {
8787
'gen_ai.request.model': 'claude-3-5-sonnet-20241022',
8888
'gen_ai.request.temperature': 0.7,
8989
'gen_ai.request.max_tokens': 100,
90-
'gen_ai.request.messages': expect.any(String), // Should include messages when recordInputs: true
90+
'gen_ai.input.messages': expect.any(String), // Should include messages when recordInputs: true
9191
'gen_ai.response.text': expect.any(String), // Should include response when recordOutputs: true
9292
'gen_ai.response.id': expect.any(String),
9393
'gen_ai.response.model': expect.any(String),
@@ -112,7 +112,7 @@ conditionalTest({ min: 20 })('LangChain integration (v1)', () => {
112112
'gen_ai.request.temperature': 0.9,
113113
'gen_ai.request.top_p': 0.95,
114114
'gen_ai.request.max_tokens': 200,
115-
'gen_ai.request.messages': expect.any(String), // Should include messages when recordInputs: true
115+
'gen_ai.input.messages': expect.any(String), // Should include messages when recordInputs: true
116116
'gen_ai.response.text': expect.any(String), // Should include response when recordOutputs: true
117117
'gen_ai.response.id': expect.any(String),
118118
'gen_ai.response.model': expect.any(String),
@@ -134,7 +134,7 @@ conditionalTest({ min: 20 })('LangChain integration (v1)', () => {
134134
// 'sentry.origin': 'auto.ai.langchain',
135135
// 'gen_ai.system': 'anthropic',
136136
// 'gen_ai.request.model': 'error-model',
137-
// 'gen_ai.request.messages': expect.any(String), // Should include messages when recordInputs: true
137+
// 'gen_ai.input.messages': expect.any(String), // Should include messages when recordInputs: true
138138
// }),
139139
// description: 'chat error-model',
140140
// op: 'gen_ai.chat',
@@ -250,7 +250,7 @@ conditionalTest({ min: 20 })('LangChain integration (v1)', () => {
250250
'gen_ai.system': 'anthropic',
251251
'gen_ai.request.model': 'claude-3-5-sonnet-20241022',
252252
// Messages should be present and should include truncated string input (contains only Cs)
253-
'gen_ai.request.messages': expect.stringMatching(/^\[\{"role":"user","content":"C+"\}\]$/),
253+
'gen_ai.input.messages': expect.stringMatching(/^\[\{"role":"user","content":"C+"\}\]$/),
254254
}),
255255
description: 'chat claude-3-5-sonnet-20241022',
256256
op: 'gen_ai.chat',
@@ -266,7 +266,7 @@ conditionalTest({ min: 20 })('LangChain integration (v1)', () => {
266266
'gen_ai.system': 'anthropic',
267267
'gen_ai.request.model': 'claude-3-5-sonnet-20241022',
268268
// Messages should be present (truncation happened) and should be a JSON array of a single index (contains only Cs)
269-
'gen_ai.request.messages': expect.stringMatching(/^\[\{"role":"user","content":"C+"\}\]$/),
269+
'gen_ai.input.messages': expect.stringMatching(/^\[\{"role":"user","content":"C+"\}\]$/),
270270
}),
271271
description: 'chat claude-3-5-sonnet-20241022',
272272
op: 'gen_ai.chat',
@@ -282,7 +282,7 @@ conditionalTest({ min: 20 })('LangChain integration (v1)', () => {
282282
'gen_ai.system': 'anthropic',
283283
'gen_ai.request.model': 'claude-3-5-sonnet-20241022',
284284
// Small message should be kept intact
285-
'gen_ai.request.messages': JSON.stringify([
285+
'gen_ai.input.messages': JSON.stringify([
286286
{ role: 'user', content: 'This is a small message that fits within the limit' },
287287
]),
288288
}),

0 commit comments

Comments
 (0)