Skip to content

Commit 9ecb2ee

Browse files
authored
Merge pull request #6 from John-Lin/refactor/remove-azure-client
Refactor/remove azure client
2 parents 03751b8 + 21b0b62 commit 9ecb2ee

3 files changed

Lines changed: 33 additions & 75 deletions

File tree

README.md

Lines changed: 4 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ See also: [agentic-telegram-bot](https://github.com/John-Lin/agentic-telegram-bo
99
- Channel @mention and DM support
1010
- Thread-aware conversations (follow-ups stay in the same thread)
1111
- Connects to any MCP server via `servers_config.json`
12-
- Supports OpenAI, Azure OpenAI, and OpenAI-compatible proxy endpoints
12+
- Supports OpenAI and OpenAI-compatible endpoints (including Azure OpenAI v1 API)
1313
- Per-conversation history with automatic truncation
1414

1515
## Install Dependencies
@@ -43,20 +43,12 @@ export OPENAI_API_KEY=""
4343
export OPENAI_MODEL="gpt-5.4"
4444
```
4545

46-
If you are using Azure OpenAI, set these instead:
46+
If you are using Azure OpenAI (v1 API) or another OpenAI-compatible endpoint:
4747

4848
```
49-
export AZURE_OPENAI_API_KEY=""
50-
export AZURE_OPENAI_ENDPOINT="https://<myopenai>.azure.com/"
49+
export OPENAI_API_KEY=""
50+
export OPENAI_BASE_URL="https://<resource-name>.openai.azure.com/openai/v1/"
5151
export OPENAI_MODEL="gpt-5.4"
52-
export OPENAI_API_VERSION="2025-03-01-preview"
53-
```
54-
55-
If you are using an OpenAI-compatible proxy:
56-
57-
```
58-
export OPENAI_PROXY_BASE_URL="https://my-proxy.example.com/v1"
59-
export OPENAI_PROXY_API_KEY=""
6052
```
6153

6254
Optional HTTP proxy for outbound requests:

bot/agent.py

Lines changed: 13 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -11,7 +11,7 @@
1111
from agents.mcp import MCPServerStdio
1212
from agents.mcp import MCPServerStreamableHttp
1313
from agents.models.openai_chatcompletions import OpenAIChatCompletionsModel
14-
from openai import AsyncAzureOpenAI
14+
from agents.models.openai_responses import OpenAIResponsesModel
1515
from openai import AsyncOpenAI
1616

1717
DEFAULT_INSTRUCTIONS = (
@@ -28,32 +28,23 @@
2828
MCP_SESSION_TIMEOUT_SECONDS = 30.0
2929

3030

31-
def _get_model() -> OpenAIChatCompletionsModel:
31+
def _get_model() -> OpenAIResponsesModel | OpenAIChatCompletionsModel:
3232
"""Create an OpenAI model from environment variables.
3333
34-
Client selection priority:
35-
1. Azure OpenAI — when both AZURE_OPENAI_API_KEY and AZURE_OPENAI_ENDPOINT are set.
36-
2. OpenAI-compatible proxy — when OPENAI_PROXY_BASE_URL is set.
37-
3. Default OpenAI — falls back to standard AsyncOpenAI (reads OPENAI_API_KEY).
34+
Uses the standard OpenAI client, which works with both OpenAI and
35+
Azure OpenAI v1 API (via OPENAI_BASE_URL + OPENAI_API_KEY).
36+
37+
OPENAI_API_TYPE controls which API the model uses:
38+
- "responses" (default): OpenAI Responses API — recommended by the SDK
39+
- "chat_completions": Chat Completions API
3840
"""
3941
model_name = os.getenv("OPENAI_MODEL", "gpt-5.4")
42+
api_type = os.getenv("OPENAI_API_TYPE", "responses")
43+
client = AsyncOpenAI()
4044

41-
client: AsyncOpenAI
42-
if os.getenv("AZURE_OPENAI_API_KEY") and os.getenv("AZURE_OPENAI_ENDPOINT"):
43-
client = AsyncAzureOpenAI(
44-
api_key=os.environ["AZURE_OPENAI_API_KEY"],
45-
azure_endpoint=os.environ["AZURE_OPENAI_ENDPOINT"],
46-
api_version=os.getenv("OPENAI_API_VERSION", "2025-04-01-preview"),
47-
)
48-
elif os.getenv("OPENAI_PROXY_BASE_URL"):
49-
client = AsyncOpenAI(
50-
base_url=os.environ["OPENAI_PROXY_BASE_URL"],
51-
api_key=os.getenv("OPENAI_PROXY_API_KEY") or os.getenv("OPENAI_API_KEY", ""),
52-
)
53-
else:
54-
client = AsyncOpenAI()
55-
56-
return OpenAIChatCompletionsModel(model=model_name, openai_client=client)
45+
if api_type == "chat_completions":
46+
return OpenAIChatCompletionsModel(model=model_name, openai_client=client)
47+
return OpenAIResponsesModel(model=model_name, openai_client=client)
5748

5849

5950
class OpenAIAgent:

tests/test_agent.py

Lines changed: 16 additions & 41 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,8 @@
55

66
import pytest
77
from agents.models.interface import Model
8-
from openai import AsyncAzureOpenAI
8+
from agents.models.openai_chatcompletions import OpenAIChatCompletionsModel
9+
from agents.models.openai_responses import OpenAIResponsesModel
910

1011
from bot.agent import DEFAULT_INSTRUCTIONS
1112
from bot.agent import MAX_TURNS
@@ -162,56 +163,30 @@ def _call_get_model(self, env_vars: dict):
162163
with patch.dict("os.environ", env_vars, clear=True):
163164
return agent_module._get_model()
164165

165-
def test_azure_path_when_both_key_and_endpoint_set(self):
166-
env = {
167-
"AZURE_OPENAI_API_KEY": "test-azure-key",
168-
"AZURE_OPENAI_ENDPOINT": "https://my-resource.openai.azure.com",
169-
}
170-
model = self._call_get_model(env)
171-
assert isinstance(model._client, AsyncAzureOpenAI)
172-
173-
def test_azure_key_without_endpoint_does_not_use_azure(self):
174-
"""When AZURE_OPENAI_API_KEY is set but AZURE_OPENAI_ENDPOINT is missing,
175-
should NOT enter the Azure path (and should not crash)."""
176-
env = {
177-
"AZURE_OPENAI_API_KEY": "test-azure-key",
178-
"OPENAI_API_KEY": "fallback-key",
179-
}
166+
def test_custom_model_name(self):
167+
env = {"OPENAI_MODEL": "gpt-5.2", "OPENAI_API_KEY": "test-key"}
180168
model = self._call_get_model(env)
181-
assert not isinstance(model._client, AsyncAzureOpenAI)
169+
assert model.model == "gpt-5.2"
182170

183-
def test_proxy_path_uses_base_url(self):
184-
env = {
185-
"OPENAI_PROXY_BASE_URL": "https://my-proxy.example.com/v1",
186-
"OPENAI_PROXY_API_KEY": "proxy-key",
187-
}
188-
model = self._call_get_model(env)
189-
assert not isinstance(model._client, AsyncAzureOpenAI)
190-
assert str(model._client.base_url).rstrip("/") == "https://my-proxy.example.com/v1"
191-
192-
def test_proxy_path_without_api_key_uses_openai_key(self):
193-
"""Proxy base URL set but no proxy API key — should fall back to OPENAI_API_KEY."""
194-
env = {
195-
"OPENAI_PROXY_BASE_URL": "https://my-proxy.example.com/v1",
196-
"OPENAI_API_KEY": "fallback-key",
197-
}
171+
def test_default_model_name(self):
172+
env = {"OPENAI_API_KEY": "test-key"}
198173
model = self._call_get_model(env)
199-
assert str(model._client.base_url).rstrip("/") == "https://my-proxy.example.com/v1"
174+
assert model.model == "gpt-5.4"
200175

201-
def test_default_path_uses_vanilla_openai(self):
176+
def test_returns_responses_model_by_default(self):
202177
env = {"OPENAI_API_KEY": "test-key"}
203178
model = self._call_get_model(env)
204-
assert not isinstance(model._client, AsyncAzureOpenAI)
179+
assert isinstance(model, OpenAIResponsesModel)
205180

206-
def test_custom_model_name(self):
207-
env = {"OPENAI_MODEL": "gpt-5.2", "OPENAI_API_KEY": "test-key"}
181+
def test_returns_chat_completions_model_when_api_type_set(self):
182+
env = {"OPENAI_API_KEY": "test-key", "OPENAI_API_TYPE": "chat_completions"}
208183
model = self._call_get_model(env)
209-
assert model.model == "gpt-5.2"
184+
assert isinstance(model, OpenAIChatCompletionsModel)
210185

211-
def test_default_model_name(self):
212-
env = {"OPENAI_API_KEY": "test-key"}
186+
def test_returns_chat_completions_when_api_type_set(self):
187+
env = {"OPENAI_API_KEY": "test-key", "OPENAI_API_TYPE": "chat_completions"}
213188
model = self._call_get_model(env)
214-
assert model.model == "gpt-5.4"
189+
assert isinstance(model, OpenAIChatCompletionsModel)
215190

216191

217192
class TestFromDict:

0 commit comments

Comments
 (0)