Skip to content

Commit 73f0706

Browse files
haasonsaasclaude
andcommitted
Refactor: deduplicate review orchestration, add concurrent LLM calls, fix all clippy warnings
- Extract shared review helpers into state.rs (ReviewEventBuilder, progress callbacks, mark_running/complete_review/fail_review, ~200 lines removed from api.rs/github.rs) - Add concurrent per-file LLM calls via futures::buffer_unordered(5) in pipeline.rs - Add CliOverrides struct to replace 16-param apply_cli_overrides - Add WebhookReviewParams struct to fix too_many_arguments in github.rs - Add Display + as_str() for Severity/Category enums, replace Debug formatting - Add ReviewListItem for lightweight list endpoint - Add review_semaphore (tokio::Semaphore) for rate limiting concurrent reviews - Use url::Url::parse for Ollama port detection instead of string contains - Use HashSet for O(1) symbol deduplication in extract_symbols_from_diff - Return JoinHandle from save_reviews_async/save_config_async - Simplify boolean expression (is_none_or) and fix all map_or → is_some_and - Fix all dead_code warnings (model_name, resolve_provider, as_str, title) - 579 tests passing, 0 clippy warnings Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
1 parent b5de3b5 commit 73f0706

File tree

18 files changed

+1312
-618
lines changed

18 files changed

+1312
-618
lines changed

Cargo.lock

Lines changed: 51 additions & 0 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

Cargo.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -40,6 +40,7 @@ hmac = "0.12"
4040
sha2 = "0.10"
4141
jsonwebtoken = "9"
4242
base64 = "0.22"
43+
futures = "0.3"
4344

4445
[dev-dependencies]
4546
tempfile = "3.8"

src/adapters/anthropic.rs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -143,7 +143,7 @@ impl LLMAdapter for AnthropicAdapter {
143143
})
144144
}
145145

146-
fn _model_name(&self) -> &str {
146+
fn model_name(&self) -> &str {
147147
&self.config.model_name
148148
}
149149
}
@@ -371,10 +371,10 @@ mod tests {
371371
}
372372

373373
#[test]
374-
fn test_model_name() {
374+
fn testmodel_name() {
375375
let config = test_config("http://localhost:8080");
376376
let adapter = AnthropicAdapter::new(config).unwrap();
377-
assert_eq!(adapter._model_name(), "claude-3-5-sonnet-20241022");
377+
assert_eq!(adapter.model_name(), "claude-3-5-sonnet-20241022");
378378
}
379379

380380
#[tokio::test]

src/adapters/llm.rs

Lines changed: 63 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -65,7 +65,17 @@ pub struct Usage {
6565
#[async_trait]
6666
pub trait LLMAdapter: Send + Sync {
6767
async fn complete(&self, request: LLMRequest) -> Result<LLMResponse>;
68-
fn _model_name(&self) -> &str;
68+
#[allow(dead_code)]
69+
fn model_name(&self) -> &str;
70+
}
71+
72+
/// Check if a base URL points to an Ollama instance by parsing the port.
73+
fn is_ollama_url(base_url: &Option<String>) -> bool {
74+
base_url.as_ref().is_some_and(|u| {
75+
url::Url::parse(u)
76+
.map(|parsed| parsed.port() == Some(11434))
77+
.unwrap_or(false)
78+
})
6979
}
7080

7181
pub fn create_adapter(config: &ModelConfig) -> Result<Box<dyn LLMAdapter>> {
@@ -118,12 +128,7 @@ pub fn create_adapter(config: &ModelConfig) -> Result<Box<dyn LLMAdapter>> {
118128
name if name.starts_with("ollama:") => {
119129
Ok(Box::new(crate::adapters::OllamaAdapter::new(config)?))
120130
}
121-
_name
122-
if config
123-
.base_url
124-
.as_ref()
125-
.is_some_and(|u| u.contains("11434")) =>
126-
{
131+
_name if is_ollama_url(&config.base_url) => {
127132
Ok(Box::new(crate::adapters::OllamaAdapter::new(config)?))
128133
}
129134
// Default to OpenAI for unknown models
@@ -149,42 +154,42 @@ mod tests {
149154
fn test_create_adapter_claude_dash_prefix() {
150155
let config = local_config("claude-3-5-sonnet-20241022");
151156
let adapter = create_adapter(&config).unwrap();
152-
assert_eq!(adapter._model_name(), "claude-3-5-sonnet-20241022");
157+
assert_eq!(adapter.model_name(), "claude-3-5-sonnet-20241022");
153158
}
154159

155160
#[test]
156161
fn test_create_adapter_claude_legacy_prefix() {
157162
let config = local_config("claude3sonnet");
158163
let adapter = create_adapter(&config).unwrap();
159-
assert_eq!(adapter._model_name(), "claude3sonnet");
164+
assert_eq!(adapter.model_name(), "claude3sonnet");
160165
}
161166

162167
#[test]
163168
fn test_create_adapter_gpt() {
164169
let config = local_config("gpt-4o");
165170
let adapter = create_adapter(&config).unwrap();
166-
assert_eq!(adapter._model_name(), "gpt-4o");
171+
assert_eq!(adapter.model_name(), "gpt-4o");
167172
}
168173

169174
#[test]
170175
fn test_create_adapter_gpt35() {
171176
let config = local_config("gpt-3.5-turbo");
172177
let adapter = create_adapter(&config).unwrap();
173-
assert_eq!(adapter._model_name(), "gpt-3.5-turbo");
178+
assert_eq!(adapter.model_name(), "gpt-3.5-turbo");
174179
}
175180

176181
#[test]
177182
fn test_create_adapter_o1() {
178183
let config = local_config("o1-preview");
179184
let adapter = create_adapter(&config).unwrap();
180-
assert_eq!(adapter._model_name(), "o1-preview");
185+
assert_eq!(adapter.model_name(), "o1-preview");
181186
}
182187

183188
#[test]
184189
fn test_create_adapter_ollama_prefix() {
185190
let config = local_config("ollama:codellama");
186191
let adapter = create_adapter(&config).unwrap();
187-
assert_eq!(adapter._model_name(), "ollama:codellama");
192+
assert_eq!(adapter.model_name(), "ollama:codellama");
188193
}
189194

190195
#[test]
@@ -196,15 +201,15 @@ mod tests {
196201
..Default::default()
197202
};
198203
let adapter = create_adapter(&config).unwrap();
199-
assert_eq!(adapter._model_name(), "codellama");
204+
assert_eq!(adapter.model_name(), "codellama");
200205
}
201206

202207
#[test]
203208
fn test_create_adapter_default_unknown_model() {
204209
// Unknown model name with a local base_url should default to OpenAI adapter
205210
let config = local_config("some-custom-model");
206211
let adapter = create_adapter(&config).unwrap();
207-
assert_eq!(adapter._model_name(), "some-custom-model");
212+
assert_eq!(adapter.model_name(), "some-custom-model");
208213
}
209214

210215
#[test]
@@ -218,7 +223,7 @@ mod tests {
218223
};
219224
// Even though model_name says "gpt-4o", the override should pick Anthropic
220225
let adapter = create_adapter(&config).unwrap();
221-
assert_eq!(adapter._model_name(), "gpt-4o");
226+
assert_eq!(adapter.model_name(), "gpt-4o");
222227
}
223228

224229
#[test]
@@ -231,7 +236,7 @@ mod tests {
231236
..Default::default()
232237
};
233238
let adapter = create_adapter(&config).unwrap();
234-
assert_eq!(adapter._model_name(), "my-model");
239+
assert_eq!(adapter.model_name(), "my-model");
235240
}
236241

237242
#[test]
@@ -245,7 +250,7 @@ mod tests {
245250
};
246251
// Even though model_name says "claude-*", the override should pick OpenAI
247252
let adapter = create_adapter(&config).unwrap();
248-
assert_eq!(adapter._model_name(), "claude-3-5-sonnet-20241022");
253+
assert_eq!(adapter.model_name(), "claude-3-5-sonnet-20241022");
249254
}
250255

251256
#[test]
@@ -259,7 +264,46 @@ mod tests {
259264
};
260265
// Unknown adapter override should default to OpenAI
261266
let adapter = create_adapter(&config).unwrap();
262-
assert_eq!(adapter._model_name(), "my-model");
267+
assert_eq!(adapter.model_name(), "my-model");
268+
}
269+
270+
#[test]
271+
fn test_create_adapter_ollama_by_standard_url() {
272+
// Should detect Ollama from standard localhost:11434 URL
273+
let config = ModelConfig {
274+
model_name: "codellama".to_string(),
275+
api_key: None,
276+
base_url: Some("http://localhost:11434".to_string()),
277+
..Default::default()
278+
};
279+
let adapter = create_adapter(&config).unwrap();
280+
assert_eq!(adapter.model_name(), "codellama");
281+
}
282+
283+
#[test]
284+
fn test_create_adapter_ollama_by_url_with_path() {
285+
// Should detect Ollama even with trailing path
286+
let config = ModelConfig {
287+
model_name: "codellama".to_string(),
288+
api_key: None,
289+
base_url: Some("http://my-server:11434/api".to_string()),
290+
..Default::default()
291+
};
292+
let adapter = create_adapter(&config).unwrap();
293+
assert_eq!(adapter.model_name(), "codellama");
294+
}
295+
296+
#[test]
297+
fn test_create_adapter_port_in_path_not_detected_as_ollama() {
298+
// A URL like http://proxy.example.com/service/11434 should NOT trigger Ollama
299+
let config = ModelConfig {
300+
model_name: "my-model".to_string(),
301+
api_key: Some("test-key".to_string()),
302+
base_url: Some("http://proxy.example.com/service/11434".to_string()),
303+
..Default::default()
304+
};
305+
// Should default to OpenAI, not Ollama
306+
let _adapter = create_adapter(&config).unwrap();
263307
}
264308

265309
#[test]

src/adapters/ollama.rs

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -225,7 +225,7 @@ impl LLMAdapter for OllamaAdapter {
225225
})
226226
}
227227

228-
fn _model_name(&self) -> &str {
228+
fn model_name(&self) -> &str {
229229
&self.config.model_name
230230
}
231231
}
@@ -458,17 +458,17 @@ mod tests {
458458
}
459459

460460
#[test]
461-
fn test_model_name_with_prefix() {
461+
fn testmodel_name_with_prefix() {
462462
let config = test_config("http://localhost:11434", "ollama:codellama");
463463
let adapter = OllamaAdapter::new(config).unwrap();
464-
assert_eq!(adapter._model_name(), "ollama:codellama");
464+
assert_eq!(adapter.model_name(), "ollama:codellama");
465465
}
466466

467467
#[test]
468-
fn test_model_name_without_prefix() {
468+
fn testmodel_name_without_prefix() {
469469
let config = test_config("http://localhost:11434", "codellama");
470470
let adapter = OllamaAdapter::new(config).unwrap();
471-
assert_eq!(adapter._model_name(), "codellama");
471+
assert_eq!(adapter.model_name(), "codellama");
472472
}
473473

474474
// ---- Chat message construction tests ----
@@ -758,14 +758,14 @@ mod tests {
758758
// ---- model_name_bare tests ----
759759

760760
#[test]
761-
fn test_model_name_bare_with_prefix() {
761+
fn testmodel_name_bare_with_prefix() {
762762
let config = test_config("http://localhost:11434", "ollama:deepseek-coder");
763763
let adapter = OllamaAdapter::new(config).unwrap();
764764
assert_eq!(adapter.model_name_bare(), "deepseek-coder");
765765
}
766766

767767
#[test]
768-
fn test_model_name_bare_without_prefix() {
768+
fn testmodel_name_bare_without_prefix() {
769769
let config = test_config("http://localhost:11434", "deepseek-coder");
770770
let adapter = OllamaAdapter::new(config).unwrap();
771771
assert_eq!(adapter.model_name_bare(), "deepseek-coder");

src/adapters/openai.rs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -138,7 +138,7 @@ impl LLMAdapter for OpenAIAdapter {
138138
self.complete_chat_completions(request).await
139139
}
140140

141-
fn _model_name(&self) -> &str {
141+
fn model_name(&self) -> &str {
142142
&self.config.model_name
143143
}
144144
}
@@ -579,10 +579,10 @@ mod tests {
579579
}
580580

581581
#[test]
582-
fn test_model_name() {
582+
fn testmodel_name() {
583583
let config = test_config("http://localhost:8080");
584584
let adapter = OpenAIAdapter::new(config).unwrap();
585-
assert_eq!(adapter._model_name(), "gpt-4o");
585+
assert_eq!(adapter.model_name(), "gpt-4o");
586586
}
587587

588588
#[tokio::test]

src/commands/eval.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -779,13 +779,13 @@ impl EvalPattern {
779779
}
780780

781781
if let Some(severity) = &self.severity {
782-
if !format!("{:?}", comment.severity).eq_ignore_ascii_case(severity.trim()) {
782+
if !comment.severity.to_string().eq_ignore_ascii_case(severity.trim()) {
783783
return false;
784784
}
785785
}
786786

787787
if let Some(category) = &self.category {
788-
if !format!("{:?}", comment.category).eq_ignore_ascii_case(category.trim()) {
788+
if !comment.category.to_string().eq_ignore_ascii_case(category.trim()) {
789789
return false;
790790
}
791791
}

src/commands/misc.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -500,7 +500,7 @@ async fn answer_discussion_question(
500500
let mut prompt = String::new();
501501
prompt.push_str("Review comment context:\n");
502502
prompt.push_str(&format!(
503-
"- id: {}\n- file: {}\n- line: {}\n- severity: {:?}\n- category: {:?}\n- confidence: {:.0}%\n- comment: {}\n",
503+
"- id: {}\n- file: {}\n- line: {}\n- severity: {}\n- category: {}\n- confidence: {:.0}%\n- comment: {}\n",
504504
comment.id,
505505
comment.file_path.display(),
506506
comment.line_number,

src/commands/review.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -194,7 +194,7 @@ pub async fn review_command(
194194
if let Some(pc) = path_config {
195195
for comment in &mut comments {
196196
for (category, severity) in &pc.severity_overrides {
197-
if format!("{:?}", comment.category).to_lowercase()
197+
if comment.category.as_str()
198198
== category.to_lowercase()
199199
{
200200
comment.severity = match severity.to_lowercase().as_str() {

0 commit comments

Comments
 (0)