1- import contextvars
21import itertools
3- import sys
42import json
53import warnings
64from collections import OrderedDict
2624if TYPE_CHECKING :
2725 from typing import (
2826 Any ,
29- AsyncIterator ,
3027 Callable ,
3128 Dict ,
32- Iterator ,
3329 List ,
3430 Optional ,
3531 Union ,
@@ -153,44 +149,6 @@ def _transform_langchain_message_content(content: "Any") -> "Any":
153149 return content
154150
155151
156- # Contextvar to track agent names in a stack for re-entrant agent support
157- _agent_stack : "contextvars.ContextVar[Optional[List[Optional[str]]]]" = (
158- contextvars .ContextVar ("langchain_agent_stack" , default = None )
159- )
160-
161-
162- def _push_agent (agent_name : "Optional[str]" ) -> None :
163- """Push an agent name onto the stack."""
164- stack = _agent_stack .get ()
165- if stack is None :
166- stack = []
167- else :
168- # Copy the list to maintain contextvar isolation across async contexts
169- stack = stack .copy ()
170- stack .append (agent_name )
171- _agent_stack .set (stack )
172-
173-
174- def _pop_agent () -> "Optional[str]" :
175- """Pop an agent name from the stack and return it."""
176- stack = _agent_stack .get ()
177- if stack :
178- # Copy the list to maintain contextvar isolation across async contexts
179- stack = stack .copy ()
180- agent_name = stack .pop ()
181- _agent_stack .set (stack )
182- return agent_name
183- return None
184-
185-
186- def _get_current_agent () -> "Optional[str]" :
187- """Get the current agent name (top of stack) without removing it."""
188- stack = _agent_stack .get ()
189- if stack :
190- return stack [- 1 ]
191- return None
192-
193-
194152def _get_system_instructions (messages : "List[List[BaseMessage]]" ) -> "List[str]" :
195153 system_instructions = []
196154
@@ -455,10 +413,6 @@ def on_chat_model_start(
455413 elif "openai" in ai_type :
456414 span .set_data (SPANDATA .GEN_AI_SYSTEM , "openai" )
457415
458- agent_name = _get_current_agent ()
459- if agent_name :
460- span .set_data (SPANDATA .GEN_AI_AGENT_NAME , agent_name )
461-
462416 for key , attribute in DATA_FIELDS .items ():
463417 if key in all_params and all_params [key ] is not None :
464418 set_data_normalized (span , attribute , all_params [key ], unpack = False )
@@ -655,10 +609,6 @@ def on_tool_start(
655609 if tool_description is not None :
656610 span .set_data (SPANDATA .GEN_AI_TOOL_DESCRIPTION , tool_description )
657611
658- agent_name = _get_current_agent ()
659- if agent_name :
660- span .set_data (SPANDATA .GEN_AI_AGENT_NAME , agent_name )
661-
662612 if should_send_default_pii () and self .include_prompts :
663613 set_data_normalized (
664614 span ,
@@ -985,50 +935,45 @@ def new_invoke(self: "Any", *args: "Any", **kwargs: "Any") -> "Any":
985935 name = f"invoke_agent { agent_name } " if agent_name else "invoke_agent" ,
986936 origin = LangchainIntegration .origin ,
987937 ) as span :
988- _push_agent (agent_name )
989- try :
990- if agent_name :
991- span .set_data (SPANDATA .GEN_AI_AGENT_NAME , agent_name )
992-
993- span .set_data (SPANDATA .GEN_AI_OPERATION_NAME , "invoke_agent" )
994- span .set_data (SPANDATA .GEN_AI_RESPONSE_STREAMING , False )
995-
996- _set_tools_on_span (span , tools )
997-
998- # Run the agent
999- result = f (self , * args , ** kwargs )
1000-
1001- input = result .get ("input" )
1002- if (
1003- input is not None
1004- and should_send_default_pii ()
1005- and integration .include_prompts
1006- ):
1007- normalized_messages = normalize_message_roles ([input ])
1008- scope = sentry_sdk .get_current_scope ()
1009- messages_data = truncate_and_annotate_messages (
1010- normalized_messages , span , scope
938+ if agent_name :
939+ span .set_data (SPANDATA .GEN_AI_AGENT_NAME , agent_name )
940+
941+ span .set_data (SPANDATA .GEN_AI_OPERATION_NAME , "invoke_agent" )
942+ span .set_data (SPANDATA .GEN_AI_RESPONSE_STREAMING , False )
943+
944+ _set_tools_on_span (span , tools )
945+
946+ # Run the agent
947+ result = f (self , * args , ** kwargs )
948+
949+ input = result .get ("input" )
950+ if (
951+ input is not None
952+ and should_send_default_pii ()
953+ and integration .include_prompts
954+ ):
955+ normalized_messages = normalize_message_roles ([input ])
956+ scope = sentry_sdk .get_current_scope ()
957+ messages_data = truncate_and_annotate_messages (
958+ normalized_messages , span , scope
959+ )
960+ if messages_data is not None :
961+ set_data_normalized (
962+ span ,
963+ SPANDATA .GEN_AI_REQUEST_MESSAGES ,
964+ messages_data ,
965+ unpack = False ,
1011966 )
1012- if messages_data is not None :
1013- set_data_normalized (
1014- span ,
1015- SPANDATA .GEN_AI_REQUEST_MESSAGES ,
1016- messages_data ,
1017- unpack = False ,
1018- )
1019967
1020- output = result .get ("output" )
1021- if (
1022- output is not None
1023- and should_send_default_pii ()
1024- and integration .include_prompts
1025- ):
1026- set_data_normalized (span , SPANDATA .GEN_AI_RESPONSE_TEXT , output )
968+ output = result .get ("output" )
969+ if (
970+ output is not None
971+ and should_send_default_pii ()
972+ and integration .include_prompts
973+ ):
974+ set_data_normalized (span , SPANDATA .GEN_AI_RESPONSE_TEXT , output )
1027975
1028- return result
1029- finally :
1030- # Ensure agent is popped even if an exception occurs
1031- _pop_agent ()
976+ return result
1032977
1033978 return new_invoke
1034979
@@ -1045,13 +990,11 @@ def new_stream(self: "Any", *args: "Any", **kwargs: "Any") -> "Any":
1045990
1046991 span = start_span_function (
1047992 op = OP .GEN_AI_INVOKE_AGENT ,
1048- name = f"invoke_agent { agent_name } " if agent_name else "invoke_agent" ,
993+ name = f"invoke_agent { agent_name } " . strip () ,
1049994 origin = LangchainIntegration .origin ,
1050995 )
1051996 span .__enter__ ()
1052997
1053- _push_agent (agent_name )
1054-
1055998 if agent_name :
1056999 span .set_data (SPANDATA .GEN_AI_AGENT_NAME , agent_name )
10571000
@@ -1084,57 +1027,43 @@ def new_stream(self: "Any", *args: "Any", **kwargs: "Any") -> "Any":
10841027
10851028 old_iterator = result
10861029
1087- def new_iterator () -> "Iterator[Any]" :
1088- exc_info : "tuple[Any, Any, Any]" = (None , None , None )
1089- try :
1090- for event in old_iterator :
1091- yield event
1030+ def new_iterator ():
1031+ # type: () -> Iterator[Any]
1032+ for event in old_iterator :
1033+ yield event
10921034
1093- try :
1094- output = event .get ("output" )
1095- except Exception :
1096- output = None
1097-
1098- if (
1099- output is not None
1100- and should_send_default_pii ()
1101- and integration .include_prompts
1102- ):
1103- set_data_normalized (span , SPANDATA .GEN_AI_RESPONSE_TEXT , output )
1104- except Exception :
1105- exc_info = sys .exc_info ()
1106- set_span_errored (span )
1107- raise
1108- finally :
1109- # Ensure cleanup happens even if iterator is abandoned or fails
1110- _pop_agent ()
1111- span .__exit__ (* exc_info )
1112-
1113- async def new_iterator_async () -> "AsyncIterator[Any]" :
1114- exc_info : "tuple[Any, Any, Any]" = (None , None , None )
11151035 try :
1116- async for event in old_iterator :
1117- yield event
1036+ output = event .get ("output" )
1037+ except Exception :
1038+ output = None
11181039
1119- try :
1120- output = event .get ("output" )
1121- except Exception :
1122- output = None
1123-
1124- if (
1125- output is not None
1126- and should_send_default_pii ()
1127- and integration .include_prompts
1128- ):
1129- set_data_normalized (span , SPANDATA .GEN_AI_RESPONSE_TEXT , output )
1040+ if (
1041+ output is not None
1042+ and should_send_default_pii ()
1043+ and integration .include_prompts
1044+ ):
1045+ set_data_normalized (span , SPANDATA .GEN_AI_RESPONSE_TEXT , output )
1046+
1047+ span .__exit__ (None , None , None )
1048+
1049+ async def new_iterator_async ():
1050+ # type: () -> AsyncIterator[Any]
1051+ async for event in old_iterator :
1052+ yield event
1053+
1054+ try :
1055+ output = event .get ("output" )
11301056 except Exception :
1131- exc_info = sys .exc_info ()
1132- set_span_errored (span )
1133- raise
1134- finally :
1135- # Ensure cleanup happens even if iterator is abandoned or fails
1136- _pop_agent ()
1137- span .__exit__ (* exc_info )
1057+ output = None
1058+
1059+ if (
1060+ output is not None
1061+ and should_send_default_pii ()
1062+ and integration .include_prompts
1063+ ):
1064+ set_data_normalized (span , SPANDATA .GEN_AI_RESPONSE_TEXT , output )
1065+
1066+ span .__exit__ (None , None , None )
11381067
11391068 if str (type (result )) == "<class 'async_generator'>" :
11401069 result = new_iterator_async ()
0 commit comments