Skip to content

Commit 62d2a98

Browse files
test(langchain): Consolidate span origin assertion (#5723)
1 parent 9903b26 commit 62d2a98

File tree

1 file changed

+8
-116
lines changed

1 file changed

+8
-116
lines changed

tests/integrations/langchain/test_langchain.py

Lines changed: 8 additions & 116 deletions
Original file line numberDiff line numberDiff line change
@@ -214,11 +214,19 @@ def test_langchain_agent(
214214

215215
tx = events[0]
216216
assert tx["type"] == "transaction"
217+
assert tx["contexts"]["trace"]["origin"] == "manual"
218+
219+
invoke_agent_span = next(x for x in tx["spans"] if x["op"] == "gen_ai.invoke_agent")
217220
chat_spans = list(x for x in tx["spans"] if x["op"] == "gen_ai.chat")
218221
tool_exec_span = next(x for x in tx["spans"] if x["op"] == "gen_ai.execute_tool")
219222

220223
assert len(chat_spans) == 2
221224

225+
assert invoke_agent_span["origin"] == "auto.ai.langchain"
226+
assert chat_spans[0]["origin"] == "auto.ai.langchain"
227+
assert chat_spans[1]["origin"] == "auto.ai.langchain"
228+
assert tool_exec_span["origin"] == "auto.ai.langchain"
229+
222230
# We can't guarantee anything about the "shape" of the langchain execution graph
223231
assert len(list(x for x in tx["spans"] if x["op"] == "gen_ai.chat")) > 0
224232

@@ -392,122 +400,6 @@ def test_span_status_error(sentry_init, capture_events):
392400
assert transaction["contexts"]["trace"]["status"] == "internal_error"
393401

394402

395-
def test_span_origin(sentry_init, capture_events):
396-
sentry_init(
397-
integrations=[LangchainIntegration()],
398-
traces_sample_rate=1.0,
399-
)
400-
events = capture_events()
401-
402-
prompt = ChatPromptTemplate.from_messages(
403-
[
404-
(
405-
"system",
406-
"You are very powerful assistant, but don't know current events",
407-
),
408-
("user", "{input}"),
409-
MessagesPlaceholder(variable_name="agent_scratchpad"),
410-
]
411-
)
412-
global stream_result_mock
413-
stream_result_mock = Mock(
414-
side_effect=[
415-
[
416-
ChatGenerationChunk(
417-
type="ChatGenerationChunk",
418-
message=AIMessageChunk(
419-
content="",
420-
additional_kwargs={
421-
"tool_calls": [
422-
{
423-
"index": 0,
424-
"id": "call_BbeyNhCKa6kYLYzrD40NGm3b",
425-
"function": {
426-
"arguments": "",
427-
"name": "get_word_length",
428-
},
429-
"type": "function",
430-
}
431-
]
432-
},
433-
),
434-
),
435-
ChatGenerationChunk(
436-
type="ChatGenerationChunk",
437-
message=AIMessageChunk(
438-
content="",
439-
additional_kwargs={
440-
"tool_calls": [
441-
{
442-
"index": 0,
443-
"id": None,
444-
"function": {
445-
"arguments": '{"word": "eudca"}',
446-
"name": None,
447-
},
448-
"type": None,
449-
}
450-
]
451-
},
452-
),
453-
),
454-
ChatGenerationChunk(
455-
type="ChatGenerationChunk",
456-
message=AIMessageChunk(
457-
content="5",
458-
usage_metadata={
459-
"input_tokens": 142,
460-
"output_tokens": 50,
461-
"total_tokens": 192,
462-
"input_token_details": {"audio": 0, "cache_read": 0},
463-
"output_token_details": {"audio": 0, "reasoning": 0},
464-
},
465-
),
466-
generation_info={"finish_reason": "function_call"},
467-
),
468-
],
469-
[
470-
ChatGenerationChunk(
471-
text="The word eudca has 5 letters.",
472-
type="ChatGenerationChunk",
473-
message=AIMessageChunk(
474-
content="The word eudca has 5 letters.",
475-
usage_metadata={
476-
"input_tokens": 89,
477-
"output_tokens": 28,
478-
"total_tokens": 117,
479-
"input_token_details": {"audio": 0, "cache_read": 0},
480-
"output_token_details": {"audio": 0, "reasoning": 0},
481-
},
482-
),
483-
),
484-
ChatGenerationChunk(
485-
type="ChatGenerationChunk",
486-
generation_info={"finish_reason": "stop"},
487-
message=AIMessageChunk(content=""),
488-
),
489-
],
490-
]
491-
)
492-
llm = MockOpenAI(
493-
model_name="gpt-3.5-turbo",
494-
temperature=0,
495-
openai_api_key="badkey",
496-
)
497-
agent = create_openai_tools_agent(llm, [get_word_length], prompt)
498-
499-
agent_executor = AgentExecutor(agent=agent, tools=[get_word_length], verbose=True)
500-
501-
with start_transaction():
502-
list(agent_executor.stream({"input": "How many letters in the word eudca"}))
503-
504-
(event,) = events
505-
506-
assert event["contexts"]["trace"]["origin"] == "manual"
507-
for span in event["spans"]:
508-
assert span["origin"] == "auto.ai.langchain"
509-
510-
511403
def test_manual_callback_no_duplication(sentry_init):
512404
"""
513405
Test that when a user manually provides a SentryLangchainCallback,

0 commit comments

Comments
 (0)