Skip to content

Commit fcdb725

Browse files
devatsecureclaude
andcommitted
feat: Add proxy support and explicit model override for LLM pipeline
- Add ANTHROPIC_BASE_URL env var support in LLMManager and AnthropicProvider — enables local proxies (e.g. localhost:3456) - Add MODEL env var to cli.py config dict — was missing, so explicit model selection via env var didn't reach LLMManager - Skip model fallback chain when MODEL is explicitly set (not "auto") — prevents hitting rate-limited Claude models when using alternative models like gpt-5.3-codex through an Anthropic-compatible proxy - Pass anthropic_base_url through config dict from cli.py Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
1 parent ba1d13c commit fcdb725

File tree

3 files changed

+19
-3
lines changed

3 files changed

+19
-3
lines changed

scripts/hybrid/cli.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -209,6 +209,8 @@ def main():
209209
"anthropic_api_key": os.getenv("ANTHROPIC_API_KEY"),
210210
"openai_api_key": os.getenv("OPENAI_API_KEY"),
211211
"ollama_endpoint": os.getenv("OLLAMA_ENDPOINT"),
212+
"model": os.getenv("MODEL", "auto"),
213+
"anthropic_base_url": os.getenv("ANTHROPIC_BASE_URL"),
212214
}
213215

214216
# Resolve feature flags: CLI arg > env var > config_loader default

scripts/orchestrator/llm_manager.py

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@
1818
"""
1919

2020
import logging
21+
import os
2122
import shutil
2223

2324
from error_classifier import (
@@ -221,8 +222,12 @@ def initialize(self, provider: str = None) -> bool:
221222
self.model = self.get_model_name(provider)
222223

223224
# For Anthropic, test model accessibility and fallback if needed
224-
if provider == "anthropic":
225+
# Skip fallback chain if model is explicitly set (not "auto")
226+
explicit_model = self.config.get("model", "auto")
227+
if provider == "anthropic" and explicit_model == "auto":
225228
self.model = self._get_working_model_with_fallback(self.client, self.model)
229+
elif provider == "anthropic":
230+
logger.info(f"Using explicitly configured model: {self.model} (skipping fallback chain)")
226231

227232
logger.info(f"Successfully initialized LLM Manager with {self.provider} / {self.model}")
228233
return True
@@ -251,6 +256,10 @@ def _get_client(self, provider: str):
251256
if not api_key:
252257
raise ValueError("ANTHROPIC_API_KEY not set")
253258

259+
base_url = self.config.get("anthropic_base_url") or os.environ.get("ANTHROPIC_BASE_URL")
260+
if base_url:
261+
logger.info(f"Using Anthropic API via proxy: {base_url}")
262+
return Anthropic(api_key=api_key, base_url=base_url), "anthropic"
254263
logger.info("Using Anthropic API")
255264
return Anthropic(api_key=api_key), "anthropic"
256265
except ImportError:

scripts/providers/anthropic_provider.py

Lines changed: 7 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -46,8 +46,13 @@ def _init_client(self):
4646
try:
4747
import anthropic
4848

49-
self.client = anthropic.Anthropic(api_key=self.api_key)
50-
logger.info(f"✅ Anthropic provider initialized with model {self.model}")
49+
base_url = os.environ.get("ANTHROPIC_BASE_URL")
50+
if base_url:
51+
self.client = anthropic.Anthropic(api_key=self.api_key, base_url=base_url)
52+
logger.info(f"✅ Anthropic provider initialized via proxy with model {self.model}")
53+
else:
54+
self.client = anthropic.Anthropic(api_key=self.api_key)
55+
logger.info(f"✅ Anthropic provider initialized with model {self.model}")
5156
except ImportError:
5257
raise ImportError("anthropic package not installed. Run: pip install anthropic")
5358

0 commit comments

Comments
 (0)