Skip to content

Commit 81827f0

Browse files
bdqnghiclaude
andcommitted
Add Azure OpenAI support (#49)
Add azure-openai as a new provider option, using the AzureOpenAI client from the openai package. Users can configure via --provider azure-openai with --azure-deployment and --api-version options. Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
1 parent b562a51 commit 81827f0

File tree

5 files changed

+115
-12
lines changed

5 files changed

+115
-12
lines changed

codewiki/cli/commands/config.py

Lines changed: 48 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -85,14 +85,24 @@ def config_group():
8585
)
8686
@click.option(
8787
"--provider",
88-
type=click.Choice(['openai-compatible', 'anthropic', 'bedrock'], case_sensitive=False),
88+
type=click.Choice(['openai-compatible', 'anthropic', 'bedrock', 'azure-openai'], case_sensitive=False),
8989
help="LLM provider type (default: openai-compatible)"
9090
)
9191
@click.option(
9292
"--aws-region",
9393
type=str,
9494
help="AWS region for Bedrock provider (default: us-east-1)"
9595
)
96+
@click.option(
97+
"--api-version",
98+
type=str,
99+
help="Azure OpenAI API version (default: 2024-12-01-preview)"
100+
)
101+
@click.option(
102+
"--azure-deployment",
103+
type=str,
104+
help="Azure OpenAI deployment name"
105+
)
96106
def config_set(
97107
api_key: Optional[str],
98108
base_url: Optional[str],
@@ -104,7 +114,9 @@ def config_set(
104114
max_token_per_leaf_module: Optional[int],
105115
max_depth: Optional[int],
106116
provider: Optional[str] = None,
107-
aws_region: Optional[str] = None
117+
aws_region: Optional[str] = None,
118+
api_version: Optional[str] = None,
119+
azure_deployment: Optional[str] = None
108120
):
109121
"""
110122
Set configuration values for CodeWiki.
@@ -139,7 +151,7 @@ def config_set(
139151
"""
140152
try:
141153
# Check if at least one option is provided
142-
if not any([api_key, base_url, main_model, cluster_model, fallback_model, max_tokens, max_token_per_module, max_token_per_leaf_module, max_depth, provider, aws_region]):
154+
if not any([api_key, base_url, main_model, cluster_model, fallback_model, max_tokens, max_token_per_module, max_token_per_leaf_module, max_depth, provider, aws_region, api_version, azure_deployment]):
143155
click.echo("No options provided. Use --help for usage information.")
144156
sys.exit(EXIT_CONFIG_ERROR)
145157

@@ -187,6 +199,12 @@ def config_set(
187199
if aws_region is not None:
188200
validated_data['aws_region'] = aws_region
189201

202+
if api_version is not None:
203+
validated_data['api_version'] = api_version
204+
205+
if azure_deployment is not None:
206+
validated_data['azure_deployment'] = azure_deployment
207+
190208
# Create config manager and save
191209
manager = ConfigManager()
192210
manager.load() # Load existing config if present
@@ -202,7 +220,9 @@ def config_set(
202220
max_token_per_leaf_module=validated_data.get('max_token_per_leaf_module'),
203221
max_depth=validated_data.get('max_depth'),
204222
provider=validated_data.get('provider'),
205-
aws_region=validated_data.get('aws_region')
223+
aws_region=validated_data.get('aws_region'),
224+
api_version=validated_data.get('api_version'),
225+
azure_deployment=validated_data.get('azure_deployment')
206226
)
207227

208228
# Display success messages
@@ -256,7 +276,13 @@ def config_set(
256276

257277
if aws_region:
258278
click.secho(f"✓ AWS Region: {aws_region}", fg="green")
259-
279+
280+
if api_version:
281+
click.secho(f"✓ API Version: {api_version}", fg="green")
282+
283+
if azure_deployment:
284+
click.secho(f"✓ Azure Deployment: {azure_deployment}", fg="green")
285+
260286
click.echo("\n" + click.style("Configuration updated successfully.", fg="green", bold=True))
261287

262288
except ConfigurationError as e:
@@ -342,6 +368,12 @@ def config_show(output_json: bool):
342368
click.echo(f" Main Model: {config.main_model or 'Not set'}")
343369
click.echo(f" Cluster Model: {config.cluster_model or 'Not set'}")
344370
click.echo(f" Fallback Model: {config.fallback_model or 'Not set'}")
371+
click.echo(f" Provider: {config.provider}")
372+
if config.provider == "bedrock":
373+
click.echo(f" AWS Region: {config.aws_region}")
374+
elif config.provider == "azure-openai":
375+
click.echo(f" API Version: {config.api_version}")
376+
click.echo(f" Azure Deployment: {config.azure_deployment or 'Not set'}")
345377
else:
346378
click.secho(" Not configured", fg="yellow")
347379

@@ -523,7 +555,17 @@ def config_validate(quick: bool, verbose: bool):
523555

524556
try:
525557
base_url_lower = (config.base_url or "").lower()
526-
if "api.anthropic.com" in base_url_lower:
558+
provider = getattr(config, 'provider', 'openai-compatible')
559+
if provider == "azure-openai" or ".openai.azure.com" in base_url_lower:
560+
# Use Azure OpenAI SDK
561+
from openai import AzureOpenAI
562+
client = AzureOpenAI(
563+
api_key=api_key,
564+
api_version=config.api_version,
565+
azure_endpoint=config.base_url,
566+
)
567+
client.models.list()
568+
elif "api.anthropic.com" in base_url_lower:
527569
# Use Anthropic SDK for native Anthropic endpoints
528570
import anthropic
529571
client = anthropic.Anthropic(api_key=api_key)

codewiki/cli/config_manager.py

Lines changed: 10 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -133,7 +133,9 @@ def save(
133133
max_token_per_leaf_module: Optional[int] = None,
134134
max_depth: Optional[int] = None,
135135
provider: Optional[str] = None,
136-
aws_region: Optional[str] = None
136+
aws_region: Optional[str] = None,
137+
api_version: Optional[str] = None,
138+
azure_deployment: Optional[str] = None
137139
):
138140
"""
139141
Save configuration to file and keyring.
@@ -149,8 +151,10 @@ def save(
149151
max_token_per_module: Maximum tokens per module for clustering
150152
max_token_per_leaf_module: Maximum tokens per leaf module
151153
max_depth: Maximum depth for hierarchical decomposition
152-
provider: LLM provider type (openai-compatible, anthropic, bedrock)
154+
provider: LLM provider type (openai-compatible, anthropic, bedrock, azure-openai)
153155
aws_region: AWS region for Bedrock provider
156+
api_version: Azure OpenAI API version
157+
azure_deployment: Azure OpenAI deployment name
154158
"""
155159
# Ensure config directory exists
156160
try:
@@ -196,6 +200,10 @@ def save(
196200
self._config.provider = provider
197201
if aws_region is not None:
198202
self._config.aws_region = aws_region
203+
if api_version is not None:
204+
self._config.api_version = api_version
205+
if azure_deployment is not None:
206+
self._config.azure_deployment = azure_deployment
199207

200208
# Validate configuration (only if base fields are set)
201209
if self._config.base_url and self._config.main_model and self._config.cluster_model:

codewiki/cli/models/config.py

Lines changed: 11 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -113,8 +113,10 @@ class Configuration:
113113
cluster_model: Model for module clustering
114114
fallback_model: Fallback model for documentation generation
115115
default_output: Default output directory
116-
provider: LLM provider type (openai-compatible, anthropic, bedrock)
116+
provider: LLM provider type (openai-compatible, anthropic, bedrock, azure-openai)
117117
aws_region: AWS region for Bedrock provider
118+
api_version: Azure OpenAI API version
119+
azure_deployment: Azure OpenAI deployment name
118120
max_tokens: Maximum tokens for LLM response (default: 32768)
119121
max_token_per_module: Maximum tokens per module for clustering (default: 36369)
120122
max_token_per_leaf_module: Maximum tokens per leaf module (default: 16000)
@@ -128,6 +130,8 @@ class Configuration:
128130
default_output: str = "docs"
129131
provider: str = "openai-compatible"
130132
aws_region: str = "us-east-1"
133+
api_version: str = "2024-12-01-preview"
134+
azure_deployment: str = ""
131135
max_tokens: int = 32768
132136
max_token_per_module: int = 36369
133137
max_token_per_leaf_module: int = 16000
@@ -155,6 +159,8 @@ def to_dict(self) -> dict:
155159
'default_output': self.default_output,
156160
'provider': self.provider,
157161
'aws_region': self.aws_region,
162+
'api_version': self.api_version,
163+
'azure_deployment': self.azure_deployment,
158164
'max_tokens': self.max_tokens,
159165
'max_token_per_module': self.max_token_per_module,
160166
'max_token_per_leaf_module': self.max_token_per_leaf_module,
@@ -187,6 +193,8 @@ def from_dict(cls, data: dict) -> 'Configuration':
187193
default_output=data.get('default_output', 'docs'),
188194
provider=data.get('provider', 'openai-compatible'),
189195
aws_region=data.get('aws_region', 'us-east-1'),
196+
api_version=data.get('api_version', '2024-12-01-preview'),
197+
azure_deployment=data.get('azure_deployment', ''),
190198
max_tokens=data.get('max_tokens', 32768),
191199
max_token_per_module=data.get('max_token_per_module', 36369),
192200
max_token_per_leaf_module=data.get('max_token_per_leaf_module', 16000),
@@ -243,6 +251,8 @@ def to_backend_config(self, repo_path: str, output_dir: str, api_key: str, runti
243251
fallback_model=self.fallback_model,
244252
provider=self.provider,
245253
aws_region=self.aws_region,
254+
api_version=self.api_version,
255+
azure_deployment=self.azure_deployment,
246256
max_tokens=self.max_tokens,
247257
max_token_per_module=self.max_token_per_module,
248258
max_token_per_leaf_module=self.max_token_per_leaf_module,

codewiki/src/be/llm_services.py

Lines changed: 36 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@
44
Includes a compatibility layer for OpenAI-compatible API proxies that may
55
return slightly non-standard responses (e.g. choices[].index = None).
66
7-
Supports multiple providers: openai-compatible, anthropic, bedrock.
7+
Supports multiple providers: openai-compatible, anthropic, bedrock, azure-openai.
88
"""
99
import logging
1010
from openai.types import chat
@@ -174,6 +174,9 @@ def call_llm(
174174
if provider in ("bedrock", "anthropic"):
175175
return _call_llm_via_litellm(prompt, config, model, temperature)
176176

177+
if provider == "azure-openai":
178+
return _call_llm_via_azure(prompt, config, model, temperature)
179+
177180
# Default: OpenAI-compatible
178181
client = create_openai_client(config)
179182

@@ -225,3 +228,35 @@ def _call_llm_via_litellm(
225228
api_key=config.llm_api_key if config.provider != "bedrock" else None,
226229
)
227230
return response.choices[0].message.content
231+
232+
233+
def _call_llm_via_azure(
234+
prompt: str,
235+
config: Config,
236+
model: str,
237+
temperature: float = 0.0
238+
) -> str:
239+
"""
240+
Call LLM via Azure OpenAI.
241+
242+
Uses the AzureOpenAI client from the openai package with
243+
azure_endpoint, api_version, and deployment name.
244+
"""
245+
from openai import AzureOpenAI
246+
247+
client = AzureOpenAI(
248+
api_key=config.llm_api_key,
249+
api_version=config.api_version,
250+
azure_endpoint=config.llm_base_url,
251+
)
252+
253+
deployment = config.azure_deployment or model
254+
logger.debug("Calling Azure OpenAI deployment %s (api_version=%s)", deployment, config.api_version)
255+
256+
response = client.chat.completions.create(
257+
model=deployment,
258+
messages=[{"role": "user", "content": prompt}],
259+
temperature=temperature,
260+
max_tokens=config.max_tokens,
261+
)
262+
return response.choices[0].message.content

codewiki/src/config.py

Lines changed: 10 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -58,8 +58,10 @@ class Config:
5858
cluster_model: str
5959
fallback_model: str = FALLBACK_MODEL_1
6060
# Provider configuration
61-
provider: str = "openai-compatible" # openai-compatible, anthropic, bedrock
61+
provider: str = "openai-compatible" # openai-compatible, anthropic, bedrock, azure-openai
6262
aws_region: str = "us-east-1"
63+
api_version: str = "2024-12-01-preview" # Azure OpenAI API version
64+
azure_deployment: str = "" # Azure OpenAI deployment name
6365
# Max token settings
6466
max_tokens: int = DEFAULT_MAX_TOKENS
6567
max_token_per_module: int = DEFAULT_MAX_TOKEN_PER_MODULE
@@ -160,6 +162,8 @@ def from_cli(
160162
fallback_model: str = FALLBACK_MODEL_1,
161163
provider: str = "openai-compatible",
162164
aws_region: str = "us-east-1",
165+
api_version: str = "2024-12-01-preview",
166+
azure_deployment: str = "",
163167
max_tokens: int = DEFAULT_MAX_TOKENS,
164168
max_token_per_module: int = DEFAULT_MAX_TOKEN_PER_MODULE,
165169
max_token_per_leaf_module: int = DEFAULT_MAX_TOKEN_PER_LEAF_MODULE,
@@ -177,8 +181,10 @@ def from_cli(
177181
main_model: Primary model
178182
cluster_model: Clustering model
179183
fallback_model: Fallback model
180-
provider: LLM provider type (openai-compatible, anthropic, bedrock)
184+
provider: LLM provider type (openai-compatible, anthropic, bedrock, azure-openai)
181185
aws_region: AWS region for Bedrock provider
186+
api_version: Azure OpenAI API version
187+
azure_deployment: Azure OpenAI deployment name
182188
max_tokens: Maximum tokens for LLM response
183189
max_token_per_module: Maximum tokens per module for clustering
184190
max_token_per_leaf_module: Maximum tokens per leaf module
@@ -204,6 +210,8 @@ def from_cli(
204210
fallback_model=fallback_model,
205211
provider=provider,
206212
aws_region=aws_region,
213+
api_version=api_version,
214+
azure_deployment=azure_deployment,
207215
max_tokens=max_tokens,
208216
max_token_per_module=max_token_per_module,
209217
max_token_per_leaf_module=max_token_per_leaf_module,

0 commit comments

Comments
 (0)