-
Notifications
You must be signed in to change notification settings - Fork 1
Expand file tree
/
Copy pathconftest.py
More file actions
352 lines (266 loc) · 8.84 KB
/
conftest.py
File metadata and controls
352 lines (266 loc) · 8.84 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
"""Integration 测试的公共 fixtures 和辅助函数
提供所有 integration 测试共享的 fixtures:
- Mock LLM Server
- Mock Model
- Mock ToolSet
- 消息工厂函数
"""
from typing import Any, Dict, List, Union
from unittest.mock import MagicMock
import pytest
from agentrun.integration.builtin.model import CommonModel
from agentrun.integration.langgraph import AgentRunConverter
from agentrun.integration.utils.tool import CommonToolSet, tool
from agentrun.model.model_proxy import ModelProxy
from agentrun.server.model import AgentEvent, EventType
from .mock_llm_server import MockLLMServer
from .scenarios import Scenarios
# =============================================================================
# 共享的 TestToolSet
# =============================================================================
class SharedTestToolSet(CommonToolSet):
"""共享的测试工具集
提供两个测试工具:
- weather_lookup: 查询天气
- get_time_now: 获取当前时间
"""
def __init__(self, timezone: str = "UTC"):
self.time_zone = timezone
self.call_history: List[Any] = []
super().__init__()
@tool(description="查询城市天气")
def weather_lookup(self, city: str) -> str:
result = f"{city} 天气晴朗"
self.call_history.append(result)
return result
@tool()
def get_time_now(self) -> dict:
"""返回当前时间"""
result = {
"time": "2025-01-02 15:04:05",
"timezone": self.time_zone,
}
self.call_history.append(result)
return result
# =============================================================================
# Mock 消息工厂函数
# =============================================================================
def create_mock_ai_message(
content: str = "",
tool_calls: List[Dict[str, Any]] = None,
) -> MagicMock:
"""创建模拟的 AIMessage 对象
Args:
content: 消息内容
tool_calls: 工具调用列表
Returns:
MagicMock: 模拟的 AIMessage 对象
"""
msg = MagicMock()
msg.content = content
msg.type = "ai"
msg.tool_calls = tool_calls or []
return msg
def create_mock_ai_message_chunk(
content: str = "",
tool_call_chunks: List[Dict] = None,
) -> MagicMock:
"""创建模拟的 AIMessageChunk 对象(流式输出)
Args:
content: 内容片段
tool_call_chunks: 工具调用片段列表
Returns:
MagicMock: 模拟的 AIMessageChunk 对象
"""
chunk = MagicMock()
chunk.content = content
chunk.tool_call_chunks = tool_call_chunks or []
return chunk
def create_mock_tool_message(content: str, tool_call_id: str) -> MagicMock:
"""创建模拟的 ToolMessage 对象
Args:
content: 工具执行结果
tool_call_id: 工具调用 ID
Returns:
MagicMock: 模拟的 ToolMessage 对象
"""
msg = MagicMock()
msg.content = content
msg.type = "tool"
msg.tool_call_id = tool_call_id
return msg
# =============================================================================
# 事件转换辅助函数
# =============================================================================
def convert_and_collect(events: List[Dict]) -> List[Union[str, AgentEvent]]:
"""转换事件列表并收集所有结果
Args:
events: LangChain/LangGraph 事件列表
Returns:
List: 转换后的 AgentEvent 列表
"""
converter = AgentRunConverter()
results: List[Union[str, AgentEvent]] = []
for event in events:
results.extend(converter.to_agui_events(event))
return results
def filter_agent_events(
results: List[Union[str, AgentEvent]], event_type: EventType
) -> List[AgentEvent]:
"""过滤特定类型的 AgentEvent
Args:
results: 转换结果列表
event_type: 要过滤的事件类型
Returns:
List[AgentEvent]: 过滤后的事件列表
"""
return [
r
for r in results
if isinstance(r, AgentEvent) and r.event == event_type
]
def get_event_types(results: List[Union[str, AgentEvent]]) -> List[EventType]:
"""获取结果中所有 AgentEvent 的类型
Args:
results: 转换结果列表
Returns:
List[EventType]: 事件类型列表
"""
return [r.event for r in results if isinstance(r, AgentEvent)]
# =============================================================================
# astream_events 格式的事件工厂
# =============================================================================
def create_on_chat_model_stream_event(chunk: MagicMock) -> Dict:
"""创建 on_chat_model_stream 事件
Args:
chunk: AIMessageChunk 对象
Returns:
Dict: astream_events 格式的事件
"""
return {
"event": "on_chat_model_stream",
"data": {"chunk": chunk},
}
def create_on_tool_start_event(
tool_name: str,
tool_input: Dict,
run_id: str = "run-123",
tool_call_id: str = None,
) -> Dict:
"""创建 on_tool_start 事件
Args:
tool_name: 工具名称
tool_input: 工具输入参数
run_id: 运行 ID
tool_call_id: 工具调用 ID(可选,会放入 metadata)
Returns:
Dict: astream_events 格式的事件
"""
event = {
"event": "on_tool_start",
"name": tool_name,
"run_id": run_id,
"data": {"input": tool_input},
}
if tool_call_id:
event["metadata"] = {"langgraph_tool_call_id": tool_call_id}
return event
def create_on_tool_end_event(
output: Any,
run_id: str = "run-123",
tool_call_id: str = None,
) -> Dict:
"""创建 on_tool_end 事件
Args:
output: 工具输出
run_id: 运行 ID
tool_call_id: 工具调用 ID(可选,会放入 metadata)
Returns:
Dict: astream_events 格式的事件
"""
event = {
"event": "on_tool_end",
"run_id": run_id,
"data": {"output": output},
}
if tool_call_id:
event["metadata"] = {"langgraph_tool_call_id": tool_call_id}
return event
def create_on_tool_error_event(
error: str,
run_id: str = "run-123",
) -> Dict:
"""创建 on_tool_error 事件
Args:
error: 错误信息
run_id: 运行 ID
Returns:
Dict: astream_events 格式的事件
"""
return {
"event": "on_tool_error",
"run_id": run_id,
"data": {"error": error},
}
# =============================================================================
# stream_mode 格式的事件工厂
# =============================================================================
def create_stream_updates_event(node_name: str, messages: List) -> Dict:
"""创建 stream_mode="updates" 格式的事件
Args:
node_name: 节点名称(如 "model", "agent", "tools")
messages: 消息列表
Returns:
Dict: stream_mode="updates" 格式的事件
"""
return {node_name: {"messages": messages}}
def create_stream_values_event(messages: List) -> Dict:
"""创建 stream_mode="values" 格式的事件
Args:
messages: 消息列表
Returns:
Dict: stream_mode="values" 格式的事件
"""
return {"messages": messages}
# =============================================================================
# Pytest Fixtures
# =============================================================================
@pytest.fixture
def ai_message_factory():
"""提供 AIMessage 工厂函数"""
return create_mock_ai_message
@pytest.fixture
def ai_message_chunk_factory():
"""提供 AIMessageChunk 工厂函数"""
return create_mock_ai_message_chunk
@pytest.fixture
def tool_message_factory():
"""提供 ToolMessage 工厂函数"""
return create_mock_tool_message
@pytest.fixture
def shared_mock_server(monkeypatch: Any, respx_mock: Any) -> MockLLMServer:
"""提供共享的 Mock LLM Server
预配置了默认场景。
关键修复:传入 respx_mock fixture 给 MockLLMServer
- 确保 HTTP mock 在所有环境(本地/CI)中一致生效
"""
server = MockLLMServer(expect_tools=True, validate_tools=False)
server.install(monkeypatch, respx_mock)
server.add_default_scenarios()
return server
@pytest.fixture
def shared_mocked_model(
shared_mock_server: MockLLMServer, monkeypatch: Any
) -> CommonModel:
"""提供共享的 Mock Model"""
from agentrun.integration.builtin.model import model
mock_model_proxy = ModelProxy(model_proxy_name="mock-model-proxy")
monkeypatch.setattr(
"agentrun.model.client.ModelClient.get",
lambda *args, **kwargs: mock_model_proxy,
)
return model("mock-model")
@pytest.fixture
def shared_mocked_toolset() -> SharedTestToolSet:
"""提供共享的 Mock ToolSet"""
return SharedTestToolSet(timezone="UTC")