Skip to content

Commit 12ba80b

Browse files
Merge branch 'master' into webb/toxfix
2 parents 32b3057 + c7e394c commit 12ba80b

File tree

2 files changed

+47
-36
lines changed

2 files changed

+47
-36
lines changed

sentry_sdk/integrations/httpx.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@
2323

2424

2525
try:
26-
from httpx import AsyncClient, Client, Request, Response # type: ignore
26+
from httpx import AsyncClient, Client, Request, Response
2727
except ImportError:
2828
raise DidNotEnable("httpx is not installed")
2929

@@ -94,7 +94,7 @@ def send(self: "Client", request: "Request", **kwargs: "Any") -> "Response":
9494

9595
return rv
9696

97-
Client.send = send
97+
Client.send = send # type: ignore
9898

9999

100100
def _install_httpx_async_client() -> None:
@@ -150,4 +150,4 @@ async def send(
150150

151151
return rv
152152

153-
AsyncClient.send = send
153+
AsyncClient.send = send # type: ignore

tests/integrations/openai_agents/test_openai_agents.py

Lines changed: 44 additions & 33 deletions
Original file line numberDiff line numberDiff line change
@@ -1220,38 +1220,37 @@ def simple_test_tool(message: str) -> str:
12201220
)
12211221
tool_span = next(span for span in spans if span["op"] == OP.GEN_AI_EXECUTE_TOOL)
12221222

1223-
available_tools = [
1224-
{
1225-
"name": "simple_test_tool",
1226-
"description": "A simple tool",
1227-
"params_json_schema": {
1228-
"properties": {"message": {"title": "Message", "type": "string"}},
1229-
"required": ["message"],
1230-
"title": "simple_test_tool_args",
1231-
"type": "object",
1232-
"additionalProperties": False,
1233-
},
1234-
"on_invoke_tool": mock.ANY,
1235-
"strict_json_schema": True,
1236-
"is_enabled": True,
1237-
}
1238-
]
1223+
available_tool = {
1224+
"name": "simple_test_tool",
1225+
"description": "A simple tool",
1226+
"params_json_schema": {
1227+
"properties": {"message": {"title": "Message", "type": "string"}},
1228+
"required": ["message"],
1229+
"title": "simple_test_tool_args",
1230+
"type": "object",
1231+
"additionalProperties": False,
1232+
},
1233+
"on_invoke_tool": mock.ANY,
1234+
"strict_json_schema": True,
1235+
"is_enabled": True,
1236+
}
1237+
12391238
if parse_version(OPENAI_AGENTS_VERSION) >= (0, 3, 3):
1240-
available_tools[0].update(
1239+
available_tool.update(
12411240
{"tool_input_guardrails": None, "tool_output_guardrails": None}
12421241
)
12431242

12441243
if parse_version(OPENAI_AGENTS_VERSION) >= (
12451244
0,
12461245
8,
12471246
):
1248-
available_tools[0]["needs_approval"] = False
1247+
available_tool["needs_approval"] = False
12491248
if parse_version(OPENAI_AGENTS_VERSION) >= (
12501249
0,
12511250
9,
12521251
0,
12531252
):
1254-
available_tools[0].update(
1253+
available_tool.update(
12551254
{
12561255
"timeout_seconds": None,
12571256
"timeout_behavior": "error_as_result",
@@ -1266,10 +1265,12 @@ def simple_test_tool(message: str) -> str:
12661265
assert agent_span["origin"] == "auto.ai.openai_agents"
12671266
assert agent_span["data"]["gen_ai.agent.name"] == "test_agent"
12681267
assert agent_span["data"]["gen_ai.operation.name"] == "invoke_agent"
1269-
assert (
1270-
json.loads(agent_span["data"]["gen_ai.request.available_tools"])
1271-
== available_tools
1272-
)
1268+
1269+
agent_span_available_tool = json.loads(
1270+
agent_span["data"]["gen_ai.request.available_tools"]
1271+
)[0]
1272+
assert all(agent_span_available_tool[k] == v for k, v in available_tool.items())
1273+
12731274
assert agent_span["data"]["gen_ai.request.max_tokens"] == 100
12741275
assert agent_span["data"]["gen_ai.request.model"] == "gpt-4"
12751276
assert agent_span["data"]["gen_ai.request.temperature"] == 0.7
@@ -1280,10 +1281,14 @@ def simple_test_tool(message: str) -> str:
12801281
assert ai_client_span1["data"]["gen_ai.operation.name"] == "chat"
12811282
assert ai_client_span1["data"]["gen_ai.system"] == "openai"
12821283
assert ai_client_span1["data"]["gen_ai.agent.name"] == "test_agent"
1283-
assert (
1284-
json.loads(ai_client_span1["data"]["gen_ai.request.available_tools"])
1285-
== available_tools
1284+
1285+
ai_client_span1_available_tool = json.loads(
1286+
ai_client_span1["data"]["gen_ai.request.available_tools"]
1287+
)[0]
1288+
assert all(
1289+
ai_client_span1_available_tool[k] == v for k, v in available_tool.items()
12861290
)
1291+
12871292
assert ai_client_span1["data"]["gen_ai.request.max_tokens"] == 100
12881293
assert ai_client_span1["data"]["gen_ai.request.messages"] == safe_serialize(
12891294
[
@@ -1323,10 +1328,12 @@ def simple_test_tool(message: str) -> str:
13231328
assert tool_span["description"] == "execute_tool simple_test_tool"
13241329
assert tool_span["data"]["gen_ai.agent.name"] == "test_agent"
13251330
assert tool_span["data"]["gen_ai.operation.name"] == "execute_tool"
1326-
assert (
1327-
json.loads(agent_span["data"]["gen_ai.request.available_tools"])
1328-
== available_tools
1329-
)
1331+
1332+
tool_span_available_tool = json.loads(
1333+
tool_span["data"]["gen_ai.request.available_tools"]
1334+
)[0]
1335+
assert all(tool_span_available_tool[k] == v for k, v in available_tool.items())
1336+
13301337
assert tool_span["data"]["gen_ai.request.max_tokens"] == 100
13311338
assert tool_span["data"]["gen_ai.request.model"] == "gpt-4"
13321339
assert tool_span["data"]["gen_ai.request.temperature"] == 0.7
@@ -1341,10 +1348,14 @@ def simple_test_tool(message: str) -> str:
13411348
assert ai_client_span2["description"] == "chat gpt-4"
13421349
assert ai_client_span2["data"]["gen_ai.agent.name"] == "test_agent"
13431350
assert ai_client_span2["data"]["gen_ai.operation.name"] == "chat"
1344-
assert (
1345-
json.loads(agent_span["data"]["gen_ai.request.available_tools"])
1346-
== available_tools
1351+
1352+
ai_client_span2_available_tool = json.loads(
1353+
ai_client_span2["data"]["gen_ai.request.available_tools"]
1354+
)[0]
1355+
assert all(
1356+
ai_client_span2_available_tool[k] == v for k, v in available_tool.items()
13471357
)
1358+
13481359
assert ai_client_span2["data"]["gen_ai.request.max_tokens"] == 100
13491360
assert ai_client_span2["data"]["gen_ai.request.messages"] == safe_serialize(
13501361
[

0 commit comments

Comments
 (0)