Skip to content

Commit 8fcc3e1

Browse files
07-C9SomethingNew71
authored andcommitted
refactor(parser): address review feedback on Link parser
Locate the ld2 metadata block via the blocks vector rather than using hardcoded absolute file offsets (survives any future change to lf3 size). Pre-allocate the all_times vector with the known total sample count to skip reallocations during push. Truncate the f64 times vector at the same 50_001-sample ceiling the inner loop enforces on data_matrix, so large logs don't pay the cost of a timeline allocation that gets discarded. Drop the now-redundant times[..data_matrix.len()].to_vec() slice at the end. No behavior change on any file in exampleLogs/link/ or on the three .llgx files from a real G4X install; same sample counts, same channel min/max values. All tests still pass.
1 parent c9dc1bb commit 8fcc3e1

1 file changed

Lines changed: 22 additions & 13 deletions

File tree

src/parsers/link.rs

Lines changed: 22 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -223,15 +223,18 @@ impl Link {
223223
return Err("Missing or malformed lf3 header block".into());
224224
}
225225

226-
// Metadata. Keys inside ld2 are at known offsets relative to file start
227-
// (ld2 starts right after lf3 at offset 215 for all observed files).
226+
// Metadata. Fields live inside the ld2 block's content at fixed offsets
227+
// relative to the block's content start. Locating ld2 via the blocks
228+
// vector instead of hardcoded file offsets keeps this robust against
229+
// future changes to lf3 size or the ordering of preceding blocks.
228230
let mut meta = LinkMeta::default();
229-
if data.len() > 0x1A00 {
230-
meta.ecu_model = Self::read_utf16_string(data, 0x336, 32);
231-
meta.log_date = Self::read_utf16_string(data, 0x1786, 16);
232-
meta.log_time = Self::read_utf16_string(data, 0x184e, 16);
233-
meta.software_version = Self::read_utf16_string(data, 0x1916, 20);
234-
meta.source = Self::read_utf16_string(data, 0x1aa6, 20);
231+
if let Some(ld2_block) = blocks.iter().find(|(_, _, marker, _)| marker == b"ld2") {
232+
let content_start = ld2_block.0 + 8;
233+
meta.ecu_model = Self::read_utf16_string(data, content_start + 0x257, 32);
234+
meta.log_date = Self::read_utf16_string(data, content_start + 0x16A7, 16);
235+
meta.log_time = Self::read_utf16_string(data, content_start + 0x176F, 16);
236+
meta.software_version = Self::read_utf16_string(data, content_start + 0x1837, 20);
237+
meta.source = Self::read_utf16_string(data, content_start + 0x19C7, 20);
235238
}
236239

237240
// Collect ds3 blocks and their data-region boundaries.
@@ -287,7 +290,8 @@ impl Link {
287290
}
288291

289292
// Build the common timeline: all distinct timestamps, sorted.
290-
let mut all_times: Vec<f32> = Vec::new();
293+
let total_samples: usize = channel_samples.iter().map(|v| v.len()).sum();
294+
let mut all_times: Vec<f32> = Vec::with_capacity(total_samples);
291295
for points in &channel_samples {
292296
for &(t, _) in points {
293297
all_times.push(t);
@@ -305,9 +309,15 @@ impl Link {
305309
});
306310
}
307311

308-
// Normalize to a f64 seconds axis starting at 0.
312+
// Normalize to a f64 seconds axis starting at 0. Cap at the same
313+
// 50_001 ceiling the inner loop enforces on data_matrix so we don't
314+
// allocate a huge times vector that will be truncated anyway.
309315
let first_time = *all_times.first().unwrap();
310-
let times: Vec<f64> = all_times.iter().map(|t| (*t - first_time) as f64).collect();
316+
let times: Vec<f64> = all_times
317+
.iter()
318+
.take(50_001)
319+
.map(|t| (*t - first_time) as f64)
320+
.collect();
311321

312322
// Build the data matrix with last-observation-carried-forward semantics.
313323
let row_cap = channels.len();
@@ -350,11 +360,10 @@ impl Link {
350360
meta.ecu_model
351361
);
352362

353-
let times_out = times[..data_matrix.len()].to_vec();
354363
Ok(Log {
355364
meta: Meta::Link(meta),
356365
channels: channels.into_iter().map(Channel::Link).collect(),
357-
times: times_out,
366+
times,
358367
data: data_matrix,
359368
})
360369
}

0 commit comments

Comments
 (0)