|
12 | 12 | # See the License for the specific language governing permissions and |
13 | 13 | # limitations under the License. |
14 | 14 |
|
| 15 | +from __future__ import annotations |
| 16 | + |
| 17 | +import builtins |
| 18 | +import importlib.util |
| 19 | +import sys |
| 20 | +from typing import Any |
15 | 21 |
|
16 | 22 | from google.adk.models.anthropic_llm import Claude |
| 23 | +from google.adk.models.base_llm import BaseLlm |
17 | 24 | from google.adk.models.google_llm import Gemini |
18 | | -from google.adk.models.lite_llm import LiteLlm |
19 | 25 | from google.adk.utils.output_schema_utils import can_use_output_schema_with_tools |
20 | 26 | import pytest |
21 | 27 |
|
|
38 | 44 | (Claude(model="claude-3.7-sonnet"), "1", False), |
39 | 45 | (Claude(model="claude-3.7-sonnet"), "0", False), |
40 | 46 | (Claude(model="claude-3.7-sonnet"), None, False), |
41 | | - (LiteLlm(model="openai/gpt-4o"), "1", True), |
42 | | - (LiteLlm(model="openai/gpt-4o"), "0", True), |
43 | | - (LiteLlm(model="openai/gpt-4o"), None, True), |
44 | | - (LiteLlm(model="anthropic/claude-3.7-sonnet"), None, True), |
45 | | - (LiteLlm(model="fireworks_ai/llama-v3p1-70b"), None, True), |
46 | 47 | ], |
47 | 48 | ) |
48 | 49 | def test_can_use_output_schema_with_tools( |
49 | | - monkeypatch, model, env_value, expected |
50 | | -): |
| 50 | + monkeypatch: pytest.MonkeyPatch, |
| 51 | + model: str | BaseLlm, |
| 52 | + env_value: str | None, |
| 53 | + expected: bool, |
| 54 | +) -> None: |
51 | 55 | """Test can_use_output_schema_with_tools.""" |
52 | 56 | if env_value is not None: |
53 | 57 | monkeypatch.setenv("GOOGLE_GENAI_USE_VERTEXAI", env_value) |
54 | 58 | else: |
55 | 59 | monkeypatch.delenv("GOOGLE_GENAI_USE_VERTEXAI", raising=False) |
56 | 60 | assert can_use_output_schema_with_tools(model) == expected |
| 61 | + |
| 62 | + |
| 63 | +def test_can_use_output_schema_with_tools_with_litellm_model() -> None: |
| 64 | + """Test LiteLlm detection when the optional module is available.""" |
| 65 | + if importlib.util.find_spec("litellm") is None: |
| 66 | + pytest.skip("litellm is not installed") |
| 67 | + |
| 68 | + from google.adk.models.lite_llm import LiteLlm |
| 69 | + |
| 70 | + assert can_use_output_schema_with_tools(LiteLlm(model="openai/gpt-4o")) |
| 71 | + |
| 72 | + |
| 73 | +def test_can_use_output_schema_with_tools_without_litellm_module( |
| 74 | + monkeypatch: pytest.MonkeyPatch, |
| 75 | +) -> None: |
| 76 | + """Test optional LiteLlm import failures do not affect other models.""" |
| 77 | + original_import = builtins.__import__ |
| 78 | + |
| 79 | + def _failing_import( |
| 80 | + name: str, |
| 81 | + globals_dict: dict[str, Any] | None = None, |
| 82 | + locals_dict: dict[str, Any] | None = None, |
| 83 | + fromlist: tuple[str, ...] = (), |
| 84 | + level: int = 0, |
| 85 | + ) -> Any: |
| 86 | + if name.endswith("lite_llm"): |
| 87 | + raise ImportError("litellm not installed") |
| 88 | + return original_import(name, globals_dict, locals_dict, fromlist, level) |
| 89 | + |
| 90 | + monkeypatch.delenv("GOOGLE_GENAI_USE_VERTEXAI", raising=False) |
| 91 | + monkeypatch.delitem(sys.modules, "google.adk.models.lite_llm", raising=False) |
| 92 | + monkeypatch.setattr(builtins, "__import__", _failing_import) |
| 93 | + |
| 94 | + assert not can_use_output_schema_with_tools(Claude(model="claude-3.7-sonnet")) |
0 commit comments