-
Notifications
You must be signed in to change notification settings - Fork 646
Feat/add litellm plugin #546
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: main
Are you sure you want to change the base?
Changes from all commits
9c8ae4c
3f3d09d
842ab82
2cb8625
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,24 @@ | ||
| # vision-agents-plugins-litellm | ||
|
|
||
| LiteLLM plugin for [Vision Agents](https://github.com/GetStream/Vision-Agents), enabling access to 100+ LLM providers through a single unified interface. | ||
|
|
||
| ## Installation | ||
|
|
||
| ```bash | ||
| pip install vision-agents-plugins-litellm | ||
| ``` | ||
|
|
||
| ## Usage | ||
|
|
||
| ```python | ||
| from vision_agents.plugins.litellm import LiteLLMChatCompletions | ||
|
|
||
| # Use any litellm model string | ||
| llm = LiteLLMChatCompletions(model="anthropic/claude-sonnet-4-20250514") | ||
| llm = LiteLLMChatCompletions(model="azure/gpt-4o", api_key="...") | ||
| llm = LiteLLMChatCompletions(model="bedrock/anthropic.claude-3-haiku") | ||
| ``` | ||
|
|
||
| LiteLLM reads provider API keys from environment variables automatically (`ANTHROPIC_API_KEY`, `OPENAI_API_KEY`, etc.). | ||
|
|
||
| See https://docs.litellm.ai/docs/providers for all supported models. |
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,27 @@ | ||
| [build-system] | ||
| requires = ["hatchling", "hatch-vcs"] | ||
| build-backend = "hatchling.build" | ||
|
|
||
| [project] | ||
| name = "vision-agents-plugins-litellm" | ||
| dynamic = ["version"] | ||
| description = "LiteLLM plugin for Vision Agents - access 100+ LLM providers" | ||
| readme = "README.md" | ||
| requires-python = ">=3.10" | ||
| license = "MIT" | ||
| dependencies = [ | ||
| "vision-agents", | ||
| "litellm>=1.60.0,<2.0.0", | ||
| ] | ||
|
|
||
| [project.urls] | ||
| Documentation = "https://visionagents.ai/" | ||
| Website = "https://visionagents.ai/" | ||
| Source = "https://github.com/GetStream/Vision-Agents" | ||
|
|
||
| [tool.hatch.version] | ||
| source = "vcs" | ||
| raw-options = { root = "..", search_parent_directories = true, fallback_version = "0.0.0" } | ||
|
|
||
| [tool.hatch.build.targets.wheel] | ||
| packages = ["vision_agents"] | ||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,98 @@ | ||
| """Tests for LiteLLM plugin.""" | ||
|
|
||
| import ast | ||
| from pathlib import Path | ||
|
|
||
| import pytest | ||
|
|
||
| PLUGIN_PATH = ( | ||
| Path(__file__).resolve().parents[1] | ||
| / "vision_agents" | ||
| / "plugins" | ||
| / "litellm" | ||
| / "litellm_llm.py" | ||
| ) | ||
|
|
||
|
|
||
| class TestLiteLLMPluginStructure: | ||
| def _parse(self): | ||
| return ast.parse(PLUGIN_PATH.read_text()) | ||
|
|
||
| def test_file_exists(self): | ||
| assert PLUGIN_PATH.exists() | ||
|
|
||
| def test_has_litellm_chat_completions_class(self): | ||
| tree = self._parse() | ||
| classes = [n.name for n in ast.walk(tree) if isinstance(n, ast.ClassDef)] | ||
| assert "LiteLLMChatCompletions" in classes | ||
|
|
||
| def test_inherits_llm(self): | ||
| tree = self._parse() | ||
| for node in ast.walk(tree): | ||
| if isinstance(node, ast.ClassDef) and node.name == "LiteLLMChatCompletions": | ||
| base_names = [b.id for b in node.bases if isinstance(b, ast.Name)] | ||
| assert "LLM" in base_names | ||
| return | ||
| pytest.fail("LiteLLMChatCompletions not found") | ||
|
|
||
| def test_has_simple_response(self): | ||
| tree = self._parse() | ||
| for node in ast.walk(tree): | ||
| if isinstance(node, ast.ClassDef) and node.name == "LiteLLMChatCompletions": | ||
| methods = [ | ||
| n.name for n in node.body if isinstance(n, ast.AsyncFunctionDef) | ||
| ] | ||
| assert "simple_response" in methods | ||
| assert "create_response" in methods | ||
| return | ||
|
|
||
| def test_has_streaming_handler(self): | ||
| src = PLUGIN_PATH.read_text() | ||
| assert "_handle_streaming" in src | ||
| assert "_handle_non_streaming" in src | ||
|
|
||
| def test_uses_drop_params_true(self): | ||
| src = PLUGIN_PATH.read_text() | ||
| assert '"drop_params": True' in src | ||
|
|
||
| def test_uses_litellm_acompletion(self): | ||
| src = PLUGIN_PATH.read_text() | ||
| assert "litellm.acompletion" in src | ||
|
|
||
| def test_emits_events(self): | ||
| src = PLUGIN_PATH.read_text() | ||
| assert "LLMRequestStartedEvent" in src | ||
| assert "LLMResponseChunkEvent" in src | ||
| assert "LLMResponseCompletedEvent" in src | ||
|
|
||
| def test_plugin_name(self): | ||
| src = PLUGIN_PATH.read_text() | ||
| assert 'PLUGIN_NAME = "litellm"' in src | ||
|
|
||
| def test_converts_tools_to_provider_format(self): | ||
| src = PLUGIN_PATH.read_text() | ||
| assert "_convert_tools_to_provider_format" in src | ||
|
|
||
| def test_extracts_tool_calls(self): | ||
| src = PLUGIN_PATH.read_text() | ||
| assert "_extract_tool_calls_from_response" in src | ||
|
|
||
|
|
||
| class TestPluginPackage: | ||
| def test_pyproject_exists(self): | ||
| pyproject = Path(__file__).resolve().parents[1] / "pyproject.toml" | ||
| assert pyproject.exists() | ||
|
|
||
| def test_litellm_in_dependencies(self): | ||
| pyproject = (Path(__file__).resolve().parents[1] / "pyproject.toml").read_text() | ||
| assert "litellm" in pyproject | ||
|
|
||
| def test_init_exports_class(self): | ||
| init = ( | ||
| Path(__file__).resolve().parents[1] | ||
| / "vision_agents" | ||
| / "plugins" | ||
| / "litellm" | ||
| / "__init__.py" | ||
| ).read_text() | ||
| assert "LiteLLMChatCompletions" in init | ||
|
Comment on lines
+17
to
+98
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. 🛠️ Refactor suggestion | 🟠 Major | 🏗️ Heavy lift Replace structure checks with behavioral tests. All tests verify code structure (AST parsing, string presence) rather than testing actual behavior. This violates the coding guideline: "ALWAYS test behavior, not calling a path. Assert on outputs and state, not method calls." Tests should instantiate
Structure checks don't verify correctness and will pass even if the implementation is broken. As per coding guidelines: "ALWAYS test behavior, not calling a path. Assert on outputs and state, not method calls." |
||
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,14 @@ | ||
| """LiteLLM plugin for Vision Agents. | ||
|
|
||
| Routes to 100+ LLM providers (OpenAI, Anthropic, Google, Azure, Bedrock, | ||
| Ollama, etc.) via the litellm SDK. No proxy server needed. | ||
|
|
||
| Model strings use the provider/model format, e.g. | ||
| anthropic/claude-sonnet-4-20250514, azure/gpt-4o, openai/gpt-4o. | ||
|
|
||
| See https://docs.litellm.ai/docs/providers for all supported models. | ||
| """ | ||
|
|
||
| from .litellm_llm import LiteLLMChatCompletions | ||
|
|
||
| __all__ = ["LiteLLMChatCompletions"] |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
🧩 Analysis chain
🏁 Script executed:
Repository: GetStream/Vision-Agents
Length of output: 1354
Update litellm constraint to exclude CRITICAL and HIGH severity vulnerabilities.
Constraint
litellm>=1.60.0,<2.0.0allows vulnerable versions:Restrict to a patched version or narrow the range to exclude affected versions.