Skip to content

Commit a4ed760

Browse files
test(langchain): Replace mocks with httpx types
1 parent fbde33f commit a4ed760

File tree

2 files changed

+202
-74
lines changed

2 files changed

+202
-74
lines changed

tests/conftest.py

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1018,10 +1018,14 @@ async def inner(values):
10181018

10191019
@pytest.fixture
10201020
def server_side_event_chunks():
1021-
def inner(events):
1021+
def inner(events, include_event_type=True):
10221022
for event in events:
10231023
payload = event.model_dump()
1024-
chunk = f"event: {payload['type']}\ndata: {json.dumps(payload)}\n\n"
1024+
chunk = (
1025+
f"event: {payload['type']}\ndata: {json.dumps(payload)}\n\n"
1026+
if include_event_type
1027+
else f"data: {json.dumps(payload)}\n\n"
1028+
)
10251029
yield chunk.encode("utf-8")
10261030

10271031
return inner

tests/integrations/langchain/test_langchain.py

Lines changed: 196 additions & 72 deletions
Original file line numberDiff line numberDiff line change
@@ -40,6 +40,21 @@
4040
from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
4141

4242

43+
from openai.types.chat.chat_completion_chunk import (
44+
ChatCompletionChunk,
45+
Choice,
46+
ChoiceDelta,
47+
ChoiceDeltaToolCall,
48+
ChoiceDeltaToolCallFunction,
49+
)
50+
51+
from openai.types.completion_usage import (
52+
CompletionTokensDetails,
53+
CompletionUsage,
54+
PromptTokensDetails,
55+
)
56+
57+
4358
@tool
4459
def get_word_length(word: str) -> int:
4560
"""Returns the length of a word."""
@@ -95,6 +110,8 @@ def test_langchain_agent(
95110
use_unknown_llm_type,
96111
system_instructions_content,
97112
request,
113+
get_model_response,
114+
server_side_event_chunks,
98115
):
99116
global llm_type
100117
llm_type = "acme-llm" if use_unknown_llm_type else "openai-chat"
@@ -120,87 +137,189 @@ def test_langchain_agent(
120137
MessagesPlaceholder(variable_name="agent_scratchpad"),
121138
]
122139
)
123-
global stream_result_mock
124-
stream_result_mock = Mock(
125-
side_effect=[
140+
141+
tool_response = get_model_response(
142+
server_side_event_chunks(
126143
[
127-
ChatGenerationChunk(
128-
type="ChatGenerationChunk",
129-
message=AIMessageChunk(
130-
content="",
131-
additional_kwargs={
132-
"tool_calls": [
133-
{
134-
"index": 0,
135-
"id": "call_BbeyNhCKa6kYLYzrD40NGm3b",
136-
"function": {
137-
"arguments": "",
138-
"name": "get_word_length",
139-
},
140-
"type": "function",
141-
}
142-
]
143-
},
144-
),
144+
ChatCompletionChunk(
145+
id="chatcmpl-turn-1",
146+
object="chat.completion.chunk",
147+
created=10000000,
148+
model="gpt-3.5-turbo",
149+
choices=[
150+
Choice(
151+
index=0,
152+
delta=ChoiceDelta(role="assistant"),
153+
finish_reason=None,
154+
),
155+
],
145156
),
146-
ChatGenerationChunk(
147-
type="ChatGenerationChunk",
148-
message=AIMessageChunk(
149-
content="",
150-
additional_kwargs={
151-
"tool_calls": [
152-
{
153-
"index": 0,
154-
"id": None,
155-
"function": {
156-
"arguments": '{"word": "eudca"}',
157-
"name": None,
158-
},
159-
"type": None,
160-
}
161-
]
162-
},
163-
),
157+
ChatCompletionChunk(
158+
id="chatcmpl-turn-1",
159+
object="chat.completion.chunk",
160+
created=10000000,
161+
model="gpt-3.5-turbo",
162+
choices=[
163+
Choice(
164+
index=0,
165+
delta=ChoiceDelta(
166+
tool_calls=[
167+
ChoiceDeltaToolCall(
168+
index=0,
169+
id="call_BbeyNhCKa6kYLYzrD40NGm3b",
170+
type="function",
171+
function=ChoiceDeltaToolCallFunction(
172+
name="get_word_length",
173+
arguments="",
174+
),
175+
),
176+
],
177+
),
178+
finish_reason=None,
179+
),
180+
],
164181
),
165-
ChatGenerationChunk(
166-
type="ChatGenerationChunk",
167-
message=AIMessageChunk(
168-
content="5",
169-
usage_metadata={
170-
"input_tokens": 142,
171-
"output_tokens": 50,
172-
"total_tokens": 192,
173-
"input_token_details": {"audio": 0, "cache_read": 0},
174-
"output_token_details": {"audio": 0, "reasoning": 0},
175-
},
182+
ChatCompletionChunk(
183+
id="chatcmpl-turn-1",
184+
object="chat.completion.chunk",
185+
created=10000000,
186+
model="gpt-3.5-turbo",
187+
choices=[
188+
Choice(
189+
index=0,
190+
delta=ChoiceDelta(
191+
tool_calls=[
192+
ChoiceDeltaToolCall(
193+
index=0,
194+
function=ChoiceDeltaToolCallFunction(
195+
arguments='{"word": "eudca"}',
196+
),
197+
),
198+
],
199+
),
200+
finish_reason=None,
201+
),
202+
],
203+
),
204+
ChatCompletionChunk(
205+
id="chatcmpl-turn-1",
206+
object="chat.completion.chunk",
207+
created=10000000,
208+
model="gpt-3.5-turbo",
209+
choices=[
210+
Choice(
211+
index=0,
212+
delta=ChoiceDelta(content="5"),
213+
finish_reason=None,
214+
),
215+
],
216+
),
217+
ChatCompletionChunk(
218+
id="chatcmpl-turn-1",
219+
object="chat.completion.chunk",
220+
created=10000000,
221+
model="gpt-3.5-turbo",
222+
choices=[
223+
Choice(
224+
index=0,
225+
delta=ChoiceDelta(),
226+
finish_reason="function_call",
227+
),
228+
],
229+
),
230+
ChatCompletionChunk(
231+
id="chatcmpl-turn-1",
232+
object="chat.completion.chunk",
233+
created=10000000,
234+
model="gpt-3.5-turbo",
235+
choices=[],
236+
usage=CompletionUsage(
237+
prompt_tokens=142,
238+
completion_tokens=50,
239+
total_tokens=192,
240+
prompt_tokens_details=PromptTokensDetails(
241+
audio_tokens=0,
242+
cached_tokens=0,
243+
),
244+
completion_tokens_details=CompletionTokensDetails(
245+
audio_tokens=0,
246+
reasoning_tokens=0,
247+
),
176248
),
177-
generation_info={"finish_reason": "function_call"},
178249
),
179250
],
251+
include_event_type=False,
252+
)
253+
)
254+
255+
final_response = get_model_response(
256+
server_side_event_chunks(
180257
[
181-
ChatGenerationChunk(
182-
text="The word eudca has 5 letters.",
183-
type="ChatGenerationChunk",
184-
message=AIMessageChunk(
185-
content="The word eudca has 5 letters.",
186-
usage_metadata={
187-
"input_tokens": 89,
188-
"output_tokens": 28,
189-
"total_tokens": 117,
190-
"input_token_details": {"audio": 0, "cache_read": 0},
191-
"output_token_details": {"audio": 0, "reasoning": 0},
192-
},
193-
),
258+
ChatCompletionChunk(
259+
id="chatcmpl-turn-2",
260+
object="chat.completion.chunk",
261+
created=10000000,
262+
model="gpt-3.5-turbo",
263+
choices=[
264+
Choice(
265+
index=0,
266+
delta=ChoiceDelta(role="assistant"),
267+
finish_reason=None,
268+
),
269+
],
194270
),
195-
ChatGenerationChunk(
196-
type="ChatGenerationChunk",
197-
generation_info={"finish_reason": "stop"},
198-
message=AIMessageChunk(content=""),
271+
ChatCompletionChunk(
272+
id="chatcmpl-turn-2",
273+
object="chat.completion.chunk",
274+
created=10000000,
275+
model="gpt-3.5-turbo",
276+
choices=[
277+
Choice(
278+
index=0,
279+
delta=ChoiceDelta(content="The word eudca has 5 letters."),
280+
finish_reason=None,
281+
),
282+
],
283+
),
284+
ChatCompletionChunk(
285+
id="chatcmpl-turn-2",
286+
object="chat.completion.chunk",
287+
created=10000000,
288+
model="gpt-3.5-turbo",
289+
choices=[
290+
Choice(
291+
index=0,
292+
delta=ChoiceDelta(),
293+
finish_reason="stop",
294+
),
295+
],
296+
),
297+
ChatCompletionChunk(
298+
id="chatcmpl-turn-2",
299+
object="chat.completion.chunk",
300+
created=10000000,
301+
model="gpt-3.5-turbo",
302+
choices=[],
303+
usage=CompletionUsage(
304+
prompt_tokens=89,
305+
completion_tokens=28,
306+
total_tokens=117,
307+
prompt_tokens_details=PromptTokensDetails(
308+
audio_tokens=0,
309+
cached_tokens=0,
310+
),
311+
completion_tokens_details=CompletionTokensDetails(
312+
audio_tokens=0,
313+
reasoning_tokens=0,
314+
),
315+
),
199316
),
200317
],
201-
]
318+
include_event_type=False,
319+
)
202320
)
203-
llm = MockOpenAI(
321+
322+
llm = ChatOpenAI(
204323
model_name="gpt-3.5-turbo",
205324
temperature=0,
206325
openai_api_key="badkey",
@@ -209,8 +328,13 @@ def test_langchain_agent(
209328

210329
agent_executor = AgentExecutor(agent=agent, tools=[get_word_length], verbose=True)
211330

212-
with start_transaction():
213-
list(agent_executor.stream({"input": "How many letters in the word eudca"}))
331+
with patch.object(
332+
llm.root_client._client,
333+
"send",
334+
side_effect=[tool_response, final_response],
335+
) as _:
336+
with start_transaction():
337+
list(agent_executor.stream({"input": "How many letters in the word eudca"}))
214338

215339
tx = events[0]
216340
assert tx["type"] == "transaction"

0 commit comments

Comments
 (0)