Skip to content

Commit 76185d5

Browse files
committed
feat: support Azure OpenAI via azure/ prefix
1 parent 4501d69 commit 76185d5

2 files changed

Lines changed: 11 additions & 8 deletions

File tree

agent/core/llm_params.py

Lines changed: 5 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -106,8 +106,8 @@ def _resolve_llm_params(
106106
will reject this; the probe's cascade catches that and falls back
107107
to no thinking.
108108
109-
• ``openai/<model>`` — ``reasoning_effort`` forwarded as a top-level
110-
kwarg (GPT-5 / o-series). LiteLLM uses the user's ``OPENAI_API_KEY``.
109+
• ``openai/<model>`` or ``azure/<model>`` — ``reasoning_effort`` forwarded
110+
as a top-level kwarg. LiteLLM uses the user's API key or Azure credentials.
111111
112112
• Anything else is treated as a HuggingFace router id. We hit the
113113
auto-routing OpenAI-compatible endpoint at
@@ -162,13 +162,14 @@ def _resolve_llm_params(
162162
# the same way, so we leave it off for now.
163163
return {"model": model_name}
164164

165-
if model_name.startswith("openai/"):
165+
if model_name.startswith(("openai/", "azure/")):
166166
params = {"model": model_name}
167167
if reasoning_effort:
168168
if reasoning_effort not in _OPENAI_EFFORTS:
169169
if strict:
170+
provider = "Azure" if model_name.startswith("azure/") else "OpenAI"
170171
raise UnsupportedEffortError(
171-
f"OpenAI doesn't accept effort={reasoning_effort!r}"
172+
f"{provider} doesn't accept effort={reasoning_effort!r}"
172173
)
173174
else:
174175
params["reasoning_effort"] = reasoning_effort

agent/core/model_switcher.py

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -44,6 +44,7 @@ def is_valid_model_id(model_id: str) -> bool:
4444
Accepts:
4545
• anthropic/<model>
4646
• openai/<model>
47+
• azure/<model>
4748
• <org>/<model>[:<tag>] (HF router; tag = provider or policy)
4849
• huggingface/<org>/<model>[:<tag>] (same, accepts legacy prefix)
4950
@@ -63,10 +64,10 @@ def _print_hf_routing_info(model_id: str, console) -> bool:
6364
proceed with the switch, ``False`` to indicate a hard problem the user
6465
should notice before we fire the effort probe.
6566
66-
Anthropic / OpenAI ids return ``True`` without printing anything —
67+
Anthropic / OpenAI / Azure ids return ``True`` without printing anything —
6768
the probe below covers "does this model exist".
6869
"""
69-
if model_id.startswith(("anthropic/", "openai/")):
70+
if model_id.startswith(("anthropic/", "openai/", "azure/")):
7071
return True
7172

7273
from agent.core import hf_router_catalog as cat
@@ -139,7 +140,7 @@ def print_model_listing(config, console) -> None:
139140
console.print(
140141
"\n[dim]Paste any HF model id (e.g. 'MiniMaxAI/MiniMax-M2.7').\n"
141142
"Add ':fastest', ':cheapest', ':preferred', or ':<provider>' to override routing.\n"
142-
"Use 'anthropic/<model>' or 'openai/<model>' for direct API access.[/dim]"
143+
"Use 'anthropic/<model>', 'openai/<model>', or 'azure/<model>' for direct API access.[/dim]"
143144
)
144145

145146

@@ -149,7 +150,8 @@ def print_invalid_id(arg: str, console) -> None:
149150
"[dim]Expected:\n"
150151
" • <org>/<model>[:tag] (HF router — paste from huggingface.co)\n"
151152
" • anthropic/<model>\n"
152-
" • openai/<model>[/dim]"
153+
" • openai/<model>\n"
154+
" • azure/<model>[/dim]"
153155
)
154156

155157

0 commit comments

Comments
 (0)