Skip to content

Commit 447ec33

Browse files
fix(google-genai): remap finish_reason to tool_calls when response contains tool calls
1 parent 111cf58 commit 447ec33

File tree

3 files changed

+21
-9
lines changed

3 files changed

+21
-9
lines changed

integrations/google_genai/src/haystack_integrations/components/generators/google_genai/chat/utils.py

Lines changed: 14 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -559,10 +559,16 @@ def _convert_google_genai_response_to_chatmessage(response: types.GenerateConten
559559

560560
usage.update(_convert_usage_metadata_to_serializable(usage_metadata))
561561

562+
# Remap finish_reason to "tool_calls" when tool calls are present, since Google GenAI returns
563+
# "STOP" for both normal completions and tool calls (no dedicated FUNCTION_CALL finish reason).
564+
mapped_finish_reason = FINISH_REASON_MAPPING.get(finish_reason or "")
565+
if mapped_finish_reason == "stop" and tool_calls:
566+
mapped_finish_reason = "tool_calls"
567+
562568
# Create meta with reasoning content and thought signatures if available
563569
meta: dict[str, Any] = {
564570
"model": model,
565-
"finish_reason": FINISH_REASON_MAPPING.get(finish_reason or ""),
571+
"finish_reason": mapped_finish_reason,
566572
"usage": usage,
567573
}
568574

@@ -675,13 +681,19 @@ def _convert_google_chunk_to_streaming_chunk(
675681
# Determine the effective content: tool_calls and reasoning take priority.
676682
effective_content = "" if tool_calls or reasoning else content
677683

684+
# Remap finish_reason to "tool_calls" when tool calls are present, since Google GenAI returns
685+
# "STOP" for both normal completions and tool calls (no dedicated FUNCTION_CALL finish reason).
686+
mapped_finish_reason = FINISH_REASON_MAPPING.get(finish_reason or "")
687+
if mapped_finish_reason == "stop" and tool_calls:
688+
mapped_finish_reason = "tool_calls"
689+
678690
return StreamingChunk(
679691
content=effective_content,
680692
tool_calls=tool_calls,
681693
component_info=component_info,
682694
index=index,
683695
start=start,
684-
finish_reason=FINISH_REASON_MAPPING.get(finish_reason or ""),
696+
finish_reason=mapped_finish_reason,
685697
meta=meta,
686698
reasoning=reasoning,
687699
)

integrations/google_genai/tests/test_chat_generator.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -401,7 +401,7 @@ def test_live_run_with_tools_streaming(self, tools):
401401

402402
assert isinstance(tool_message, ChatMessage), "Tool message is not a ChatMessage instance"
403403
assert ChatMessage.is_from(tool_message, ChatRole.ASSISTANT), "Tool message is not from the assistant"
404-
assert tool_message.meta["finish_reason"] == "stop"
404+
assert tool_message.meta["finish_reason"] == "tool_calls"
405405

406406
tool_call = tool_message.tool_calls[0]
407407
assert tool_call.tool_name == "weather"
@@ -425,7 +425,7 @@ def test_live_run_with_toolset(self, tools):
425425
# Google Gen AI (gemini-2.5-flash and gemini-2.5-pro-preview-05-06) does not provide ids for tool calls although
426426
# it is in the response schema, revisit in future to see if there are changes and id is provided
427427
# assert tool_call.id is not None, "Tool call has no id"
428-
assert message.meta["finish_reason"] == "stop"
428+
assert message.meta["finish_reason"] == "tool_calls"
429429

430430
assert tool_call.tool_name == "weather"
431431
assert tool_call.arguments == {"city": "Paris"}
@@ -559,7 +559,7 @@ def test_live_run_with_mixed_tools(self):
559559
assert tool_call.tool_name in ["weather", "population"]
560560
assert "city" in tool_call.arguments
561561
assert tool_call.arguments["city"] in ["Paris", "Berlin"]
562-
assert first_reply.meta["finish_reason"] == "stop"
562+
assert first_reply.meta["finish_reason"] == "tool_calls"
563563

564564
# Mock the response we'd get from ToolInvoker
565565
tool_result_messages = []
@@ -806,7 +806,7 @@ async def test_live_run_async_with_tools(self, tools):
806806
assert len(tool_message.tool_calls) == 1, "Tool message has multiple tool calls"
807807
assert tool_message.tool_calls[0].tool_name == "weather"
808808
assert tool_message.tool_calls[0].arguments == {"city": "Paris"}
809-
assert tool_message.meta["finish_reason"] == "stop"
809+
assert tool_message.meta["finish_reason"] == "tool_calls"
810810

811811
async def test_live_run_async_with_thinking(self):
812812
"""

integrations/google_genai/tests/test_chat_generator_utils.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -286,7 +286,7 @@ def test_convert_google_chunk_to_streaming_chunk_tool_call(self, monkeypatch):
286286
assert chunk.tool_calls[0].tool_name == "weather"
287287
assert chunk.tool_calls[0].arguments == '{"city": "Paris"}'
288288
assert chunk.tool_calls[0].id == "call_123"
289-
assert chunk.finish_reason == "stop"
289+
assert chunk.finish_reason == "tool_calls"
290290
assert chunk.index == 0
291291
assert "received_at" in chunk.meta
292292
assert chunk.component_info == component_info
@@ -332,7 +332,7 @@ def test_convert_google_chunk_to_streaming_chunk_mixed_content(self, monkeypatch
332332
assert len(chunk.tool_calls) == 1
333333
assert chunk.tool_calls[0].tool_name == "weather"
334334
assert chunk.tool_calls[0].arguments == '{"city": "London"}'
335-
assert chunk.finish_reason == "stop"
335+
assert chunk.finish_reason == "tool_calls"
336336
assert chunk.component_info == component_info
337337

338338
def test_convert_google_chunk_to_streaming_chunk_empty_parts(self, monkeypatch):
@@ -508,7 +508,7 @@ def test_convert_google_chunk_to_streaming_chunk_real_example(self, monkeypatch)
508508
assert streaming_chunk.content == ""
509509
assert streaming_chunk.tool_calls is not None
510510
assert len(streaming_chunk.tool_calls) == 6
511-
assert streaming_chunk.finish_reason == "stop"
511+
assert streaming_chunk.finish_reason == "tool_calls"
512512
assert streaming_chunk.index == 2
513513
assert "received_at" in streaming_chunk.meta
514514
assert streaming_chunk.meta["model"] == "gemini-2.5-flash"

0 commit comments

Comments
 (0)