Skip to content

Commit de4417d

Browse files
committed
fix: omit system parameter when None in AnthropicLlm
When system_instruction is None (e.g. during event compaction via LlmEventSummarizer), the Anthropic API rejects system=None with a 400 Bad Request. Use NOT_GIVEN instead so the parameter is omitted from the API call entirely. Fixes #5318
1 parent 6380f6a commit de4417d

1 file changed

Lines changed: 11 additions & 3 deletions

File tree

src/google/adk/models/anthropic_llm.py

Lines changed: 11 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -401,11 +401,18 @@ async def generate_content_async(
401401
if llm_request.tools_dict
402402
else NOT_GIVEN
403403
)
404+
# Anthropic API rejects system=None; omit the parameter when no
405+
# system instruction is set (e.g. during event compaction).
406+
system_instruction = (
407+
llm_request.config.system_instruction
408+
if llm_request.config and llm_request.config.system_instruction
409+
else NOT_GIVEN
410+
)
404411

405412
if not stream:
406413
message = await self._anthropic_client.messages.create(
407414
model=model_to_use,
408-
system=llm_request.config.system_instruction,
415+
system=system_instruction,
409416
messages=messages,
410417
tools=tools,
411418
tool_choice=tool_choice,
@@ -414,7 +421,7 @@ async def generate_content_async(
414421
yield message_to_generate_content_response(message)
415422
else:
416423
async for response in self._generate_content_streaming(
417-
llm_request, messages, tools, tool_choice
424+
llm_request, messages, tools, tool_choice, system_instruction
418425
):
419426
yield response
420427

@@ -424,6 +431,7 @@ async def _generate_content_streaming(
424431
messages: list[anthropic_types.MessageParam],
425432
tools: Union[Iterable[anthropic_types.ToolUnionParam], NotGiven],
426433
tool_choice: Union[anthropic_types.ToolChoiceParam, NotGiven],
434+
system_instruction: Union[str, NotGiven] = NOT_GIVEN,
427435
) -> AsyncGenerator[LlmResponse, None]:
428436
"""Handles streaming responses from Anthropic models.
429437
@@ -433,7 +441,7 @@ async def _generate_content_streaming(
433441
model_to_use = self._resolve_model_name(llm_request.model)
434442
raw_stream = await self._anthropic_client.messages.create(
435443
model=model_to_use,
436-
system=llm_request.config.system_instruction,
444+
system=system_instruction,
437445
messages=messages,
438446
tools=tools,
439447
tool_choice=tool_choice,

0 commit comments

Comments
 (0)