Skip to content

Commit d03311a

Browse files
BV-VenkyBV-Venky
andauthored
feat: widen openai dependency to support 2.x for litellm compatibility (#1793)
Co-authored-by: BV-Venky <venkateshcjjc@@gmail.com>
1 parent b66534b commit d03311a

File tree

1 file changed

+3
-3
lines changed

1 file changed

+3
-3
lines changed

pyproject.toml

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -46,15 +46,15 @@ dependencies = [
4646
[project.optional-dependencies]
4747
anthropic = ["anthropic>=0.21.0,<1.0.0"]
4848
gemini = ["google-genai>=1.32.0,<2.0.0"]
49-
litellm = ["litellm>=1.75.9,<2.0.0", "openai>=1.68.0,<1.110.0"]
49+
litellm = ["litellm>=1.75.9,<2.0.0", "openai>=1.68.0,<3.0.0"]
5050
llamaapi = ["llama-api-client>=0.1.0,<1.0.0"]
5151
mistral = ["mistralai>=1.8.2"]
5252
ollama = ["ollama>=0.4.8,<1.0.0"]
53-
openai = ["openai>=1.68.0,<2.0.0"]
53+
openai = ["openai>=1.68.0,<3.0.0"]
5454
writer = ["writer-sdk>=2.2.0,<3.0.0"]
5555
sagemaker = [
5656
"boto3-stubs[sagemaker-runtime]>=1.26.0,<2.0.0",
57-
"openai>=1.68.0,<2.0.0", # SageMaker uses OpenAI-compatible interface
57+
"openai>=1.68.0,<3.0.0", # SageMaker uses OpenAI-compatible interface
5858
]
5959
otel = ["opentelemetry-exporter-otlp-proto-http>=1.30.0,<2.0.0"]
6060
docs = [

0 commit comments

Comments
 (0)