Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
48 changes: 48 additions & 0 deletions contributing/samples/hello_world_openrouter/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,48 @@
# OpenRouter with LiteLLM

This sample shows how to use an OpenRouter-hosted model with ADK through the
existing LiteLLM model connector.

## Setup

Install ADK with optional integrations so that `LiteLlm` is available:

```bash
pip install "google-adk[extensions]"
```

Set your OpenRouter API key:

```bash
export OPENROUTER_API_KEY="..."
```

Optionally choose a model:

```bash
export OPENROUTER_MODEL="openrouter/openai/gpt-5.2"
```

Run the sample:

```bash
adk run contributing/samples/hello_world_openrouter
```

## Notes

OpenRouter is used here through LiteLLM's OpenAI-compatible routing path:

```python
LiteLlm(
model="openrouter/openai/gpt-5.2",
api_key=os.getenv("OPENROUTER_API_KEY"),
api_base="https://openrouter.ai/api/v1",
)
```

For Gemini models routed through OpenRouter, use OpenRouter model IDs such as
`openrouter/google/gemini-2.5-pro:online`. ADK's built-in Google tools are
optimized for native Gemini model connections, so verify tool compatibility for
the routed model and provider you select.

13 changes: 13 additions & 0 deletions contributing/samples/hello_world_openrouter/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
# Copyright 2026 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
39 changes: 39 additions & 0 deletions contributing/samples/hello_world_openrouter/agent.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,39 @@
# Copyright 2026 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

from __future__ import annotations

import os

from google.adk.agents.llm_agent import Agent
from google.adk.models.lite_llm import LiteLlm

OPENROUTER_API_BASE = 'https://openrouter.ai/api/v1'
OPENROUTER_MODEL = os.getenv('OPENROUTER_MODEL', 'openrouter/openai/gpt-5.2')


root_agent = Agent(
name='openrouter_agent',
model=LiteLlm(
model=OPENROUTER_MODEL,
api_key=os.getenv('OPENROUTER_API_KEY'),
api_base=OPENROUTER_API_BASE,
),
description='A simple ADK agent that uses OpenRouter through LiteLLM.',
instruction=(
'You are a concise assistant running through OpenRouter. Answer the'
' user directly and mention when a question needs current or external'
' information.'
),
)
14 changes: 13 additions & 1 deletion src/google/adk/utils/model_name_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,8 @@ def extract_model_name(model_string: str) -> str:

Args:
model_string: Either a simple model name like "gemini-2.5-pro" or a
path-based model name like "projects/.../models/gemini-2.5-flash"
path-based model name like "projects/.../models/gemini-2.5-flash",
or a provider-prefixed model name like "gemini/gemini-2.5-flash".

Returns:
The extracted model name (e.g., "gemini-2.5-pro")
Expand All @@ -63,6 +64,17 @@ def extract_model_name(model_string: str) -> str:
if model_string.startswith('models/'):
return model_string[len('models/') :]

if model_string.startswith('projects/'):
return model_string

# Handle provider-prefixed LiteLLM-compatible names like
# "gemini/gemini-2.5-flash" or
# "openrouter/google/gemini-2.5-pro:online".
if '/' in model_string:
model_name = model_string.rsplit('/', 1)[1]
if model_name.startswith('gemini-'):
return model_name

# If it's not a path-based model, return as-is (simple model name)
return model_string

Expand Down
21 changes: 21 additions & 0 deletions tests/unittests/tools/test_google_search_tool.py
Original file line number Diff line number Diff line change
Expand Up @@ -250,6 +250,27 @@ async def test_process_llm_request_with_gemini_2_model_and_existing_tools_succee
assert llm_request.config.tools[0] == existing_tool
assert llm_request.config.tools[1].google_search is not None

@pytest.mark.asyncio
async def test_process_llm_request_with_provider_prefixed_gemini_model(
self,
):
"""Test processing LLM request with provider-prefixed Gemini model."""
tool = GoogleSearchTool()
tool_context = await _create_tool_context()

llm_request = LlmRequest(
model='openrouter/google/gemini-2.5-pro:online',
config=types.GenerateContentConfig(),
)

await tool.process_llm_request(
tool_context=tool_context, llm_request=llm_request
)

assert llm_request.config.tools is not None
assert len(llm_request.config.tools) == 1
assert llm_request.config.tools[0].google_search is not None

@pytest.mark.asyncio
async def test_process_llm_request_with_non_gemini_model_raises_error(self):
"""Test that non-Gemini model raises ValueError."""
Expand Down
41 changes: 41 additions & 0 deletions tests/unittests/utils/test_model_name_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,20 @@ def test_extract_model_name_with_models_prefix(self):
assert extract_model_name('models/gemini-2.5-pro') == 'gemini-2.5-pro'
assert extract_model_name('models/gemini-2.5-flash') == 'gemini-2.5-flash'

def test_extract_model_name_provider_prefixed_model(self):
"""Test extraction of provider-prefixed Gemini model names."""
assert extract_model_name('gemini/gemini-2.5-flash') == 'gemini-2.5-flash'
assert extract_model_name('vertex_ai/gemini-2.5-flash') == (
'gemini-2.5-flash'
)
assert (
extract_model_name('openrouter/google/gemini-2.5-pro:online')
== 'gemini-2.5-pro:online'
)
assert extract_model_name('openrouter/anthropic/claude-sonnet-4') == (
'openrouter/anthropic/claude-sonnet-4'
)

def test_extract_model_name_invalid_path(self):
"""Test that invalid path formats return the original string."""
invalid_paths = [
Expand Down Expand Up @@ -118,6 +132,13 @@ def test_is_gemini_model_path_based_names(self):
non_gemini_path = 'projects/265104255505/locations/us-central1/publishers/google/models/claude-3-sonnet'
assert is_gemini_model(non_gemini_path) is False

def test_is_gemini_model_provider_prefixed_names(self):
"""Test Gemini model detection with provider-prefixed model names."""
assert is_gemini_model('gemini/gemini-2.5-flash') is True
assert is_gemini_model('vertex_ai/gemini-2.5-flash') is True
assert is_gemini_model('openrouter/google/gemini-2.5-pro:online') is True
assert is_gemini_model('openrouter/anthropic/claude-sonnet-4') is False

def test_is_gemini_model_edge_cases(self):
"""Test edge cases for Gemini model detection."""
# Test with None
Expand Down Expand Up @@ -170,6 +191,13 @@ def test_is_gemini_1_model_path_based_names(self):
gemini_2_path = 'projects/265104255505/locations/us-central1/publishers/google/models/gemini-2.5-flash'
assert is_gemini_1_model(gemini_2_path) is False

def test_is_gemini_1_model_provider_prefixed_names(self):
"""Test Gemini 1.x detection with provider-prefixed model names."""
assert is_gemini_1_model('gemini/gemini-1.5-flash') is True
assert is_gemini_1_model('vertex_ai/gemini-1.5-flash') is True
assert is_gemini_1_model('openrouter/google/gemini-1.5-pro:online') is True
assert is_gemini_1_model('openrouter/google/gemini-2.5-pro') is False

def test_is_gemini_1_model_edge_cases(self):
"""Test edge cases for Gemini 1.x model detection."""
# Test with None
Expand Down Expand Up @@ -217,6 +245,17 @@ def test_is_gemini_2_or_above_path_based_names(self):
gemini_3_path = 'projects/12345/locations/us-east1/publishers/google/models/gemini-3.0-pro'
assert is_gemini_2_or_above(gemini_3_path) is True

def test_is_gemini_2_or_above_provider_prefixed_names(self):
"""Test Gemini 2.0+ detection with provider-prefixed model names."""
assert is_gemini_2_or_above('gemini/gemini-2.5-flash') is True
assert is_gemini_2_or_above('vertex_ai/gemini-2.5-flash') is True
assert (
is_gemini_2_or_above('openrouter/google/gemini-2.5-pro:online') is True
)
assert (
is_gemini_2_or_above('openrouter/google/gemini-1.5-pro:online') is False
)

def test_is_gemini_2_or_above_edge_cases(self):
"""Test edge cases for Gemini 2.0+ model detection."""
# Test with None
Expand Down Expand Up @@ -245,6 +284,8 @@ def test_model_classification_consistency(self):
'gemini-2.5-flash',
'gemini-2.5-pro',
'gemini-3.0-pro',
'gemini/gemini-2.5-flash',
'openrouter/google/gemini-2.5-pro:online',
'projects/123/locations/us-central1/publishers/google/models/gemini-1.5-pro',
'projects/123/locations/us-central1/publishers/google/models/gemini-2.5-flash',
'projects/123/locations/us-central1/publishers/google/models/gemini-3.0-pro',
Expand Down
Loading