Skip to content

Commit 92c51ac

Browse files
committed
fix(clippy): satisfy Rust 1.95 unnecessary_sort_by + explicit_counter_loop
Rust 1.95 promoted two clippy lints to deny-by-default in CI's -D warnings build. Replace sort_by with sort_by_key(|b| Reverse(...)) and derive vector IDs from the enumerate() index instead of a manual counter variable.
1 parent 84dff52 commit 92c51ac

5 files changed

Lines changed: 15 additions & 20 deletions

File tree

src/identity.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -53,7 +53,7 @@ pub fn extract_l1_facts(store: &Store, profile: &VaultProfile) -> Result<L1Summa
5353
Some((*f, incoming.len()))
5454
})
5555
.collect();
56-
scored.sort_by(|a, b| b.1.cmp(&a.1));
56+
scored.sort_by_key(|b| std::cmp::Reverse(b.1));
5757

5858
for (file, _count) in scored.into_iter().take(5) {
5959
let name = file_stem(&file.path);
@@ -71,7 +71,7 @@ pub fn extract_l1_facts(store: &Store, profile: &VaultProfile) -> Result<L1Summa
7171
.collect();
7272

7373
// Sort by note_date descending (most recent first).
74-
daily_files.sort_by(|a, b| b.note_date.cmp(&a.note_date));
74+
daily_files.sort_by_key(|b| std::cmp::Reverse(b.note_date));
7575

7676
// ── Current focus (most recent daily note) ──────────────
7777
if let Some(latest) = daily_files.first()

src/indexer.rs

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -358,15 +358,14 @@ pub fn index_file(
358358
note_date,
359359
)?;
360360

361-
let mut next_vector_id: u64 = store.next_vector_id()?;
361+
let start_vector_id: u64 = store.next_vector_id()?;
362362
let total_chunks = chunks.len();
363363

364364
for (chunk_seq, chunk) in chunks.iter().enumerate() {
365365
let heading = chunk.heading.clone().unwrap_or_default();
366366
let snippet = &chunk.snippet;
367367
let vector = &all_vectors[chunk_seq];
368-
let vector_id = next_vector_id;
369-
next_vector_id += 1;
368+
let vector_id = start_vector_id + chunk_seq as u64;
370369

371370
store.insert_chunk_with_vector(
372371
file_id,

src/links.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -95,7 +95,7 @@ pub(crate) fn build_name_index(store: &Store, vault_path: &Path) -> Result<Vec<N
9595
}
9696

9797
// Sort by name length descending — match longer names first
98-
entries.sort_by(|a, b| b.name.len().cmp(&a.name.len()));
98+
entries.sort_by_key(|b| std::cmp::Reverse(b.name.len()));
9999
Ok(entries)
100100
}
101101

@@ -701,7 +701,7 @@ pub fn apply_links(content: &str, links: &[DiscoveredLink]) -> String {
701701
}
702702

703703
// Sort by position descending so we can replace from end to start
704-
replacements.sort_by(|a, b| b.0.cmp(&a.0));
704+
replacements.sort_by_key(|b| std::cmp::Reverse(b.0));
705705

706706
let mut result = content.to_string();
707707
for (start, end, replacement) in replacements {

src/llm.rs

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1039,9 +1039,9 @@ impl LlamaOrchestrator {
10391039
// Each token may produce multi-byte UTF-8 sequences; use an encoding_rs decoder
10401040
// to correctly reassemble them across token boundaries.
10411041
let mut decoder = encoding_rs::UTF_8.new_decoder();
1042-
let mut n_cur = tokens.len();
1042+
let prompt_len = tokens.len();
10431043

1044-
for _ in 0..max_tokens {
1044+
for step in 0..max_tokens {
10451045
let new_token = sampler.sample(&ctx, batch.n_tokens() - 1);
10461046
sampler.accept(new_token);
10471047

@@ -1060,9 +1060,8 @@ impl LlamaOrchestrator {
10601060
// Add token to batch for next iteration.
10611061
batch.clear();
10621062
batch
1063-
.add(new_token, n_cur as i32, &[0], true)
1063+
.add(new_token, (prompt_len + step) as i32, &[0], true)
10641064
.map_err(|e| anyhow::anyhow!("adding generated token to batch: {e}"))?;
1065-
n_cur += 1;
10661065

10671066
ctx.decode(&mut batch)
10681067
.map_err(|e| anyhow::anyhow!("generation decode failed: {e}"))?;

src/writer.rs

Lines changed: 6 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -637,10 +637,9 @@ pub fn create_note(
637637
None,
638638
)?;
639639

640-
let mut next_vid = store.next_vector_id()?;
640+
let start_vid = store.next_vector_id()?;
641641
for (chunk_seq, (heading, snippet, vector, token_count)) in chunk_data.iter().enumerate() {
642-
let vid = next_vid;
643-
next_vid += 1;
642+
let vid = start_vid + chunk_seq as u64;
644643
store.insert_chunk_with_vector(file_id, heading, snippet, vid, *token_count, vector)?;
645644
store.insert_vec(vid, vector)?;
646645
store.insert_fts_chunk(file_id, chunk_seq as i64, snippet)?;
@@ -785,10 +784,9 @@ pub fn append_to_note(
785784
None,
786785
)?;
787786

788-
let mut next_vid = store.next_vector_id()?;
787+
let start_vid = store.next_vector_id()?;
789788
for (chunk_seq, (heading, snippet, vector, token_count)) in chunk_data.iter().enumerate() {
790-
let vid = next_vid;
791-
next_vid += 1;
789+
let vid = start_vid + chunk_seq as u64;
792790
store.insert_chunk_with_vector(file_id, heading, snippet, vid, *token_count, vector)?;
793791
store.insert_vec(vid, vector)?;
794792
store.insert_fts_chunk(file_id, chunk_seq as i64, snippet)?;
@@ -1561,10 +1559,9 @@ pub fn unarchive_note(
15611559
None,
15621560
)?;
15631561

1564-
let mut next_vid = store.next_vector_id()?;
1562+
let start_vid = store.next_vector_id()?;
15651563
for (seq, (heading, snippet, vector, token_count)) in chunk_data.iter().enumerate() {
1566-
let vid = next_vid;
1567-
next_vid += 1;
1564+
let vid = start_vid + seq as u64;
15681565
store.insert_chunk_with_vector(file_id, heading, snippet, vid, *token_count, vector)?;
15691566
store.insert_vec(vid, vector)?;
15701567
store.insert_fts_chunk(file_id, seq as i64, snippet)?;

0 commit comments

Comments
 (0)