1use rayon::prelude::*;
2use std::sync::Arc;
3use mscore::timstof::frame::TimsFrame;
4use serde::{Deserialize, Serialize};
5use crate::cluster::utility::{fallback_rt_peak_from_trace, find_im_peaks_row, quad_subsample, robust_noise_mad, rt_peak_id, smooth_vector_gaussian, trapezoid_area_fractional, MobilityFn, TofScale};
6
7#[derive(Clone, Copy, Debug, Serialize, Deserialize)]
17pub enum ThresholdMode {
18 Fixed(f32),
20 AdaptiveNoise(f32),
23}
24
25impl Default for ThresholdMode {
26 fn default() -> Self {
27 Self::AdaptiveNoise(3.0)
29 }
30}
31
32impl ThresholdMode {
33 #[inline]
35 pub fn fixed(value: f32) -> Self {
36 Self::Fixed(value)
37 }
38
39 #[inline]
41 pub fn adaptive(sigma_multiplier: f32) -> Self {
42 Self::AdaptiveNoise(sigma_multiplier)
43 }
44
45 #[inline]
47 pub fn effective(&self, noise: f32) -> f32 {
48 match self {
49 Self::Fixed(val) => {
50 if noise > 0.0 { val.max(3.0 * noise) } else { *val }
52 }
53 Self::AdaptiveNoise(sigma) => {
54 if noise > 0.0 { sigma * noise } else { 1.0 }
56 }
57 }
58 }
59}
60
61pub type PeakId = i64;
63
64#[derive(Clone, Debug)]
65pub struct ImPeak1D {
66 pub tof_row: usize, pub tof_center: i32, pub tof_bounds: (i32, i32), pub rt_bounds: (usize, usize), pub frame_id_bounds: (u32, u32), pub window_group: Option<u32>, pub scan: usize,
74 pub left: usize,
75 pub right: usize,
76
77 pub scan_abs: usize,
78 pub left_abs: usize,
79 pub right_abs: usize,
80
81 pub scan_sigma: Option<f32>,
82 pub mobility: Option<f32>,
83 pub apex_smoothed: f32,
84 pub apex_raw: f32,
85 pub prominence: f32,
86 pub left_x: f32,
87 pub right_x: f32,
88 pub width_scans: usize, pub area_raw: f32,
90 pub subscan: f32,
91 pub id: PeakId,
92}
93
94#[derive(Clone, Debug)]
95pub struct FrameBinView {
96 pub _frame_id: u32,
97 pub unique_bins: Vec<usize>,
98 pub offsets: Vec<usize>,
99 pub scan_idx: Vec<u32>, pub intensity: Vec<f32>,
101}
102
103#[derive(Clone, Debug)]
104pub struct TofScanGrid {
105 pub scans: Vec<usize>,
106 pub data: Vec<f32>,
107 pub rows: usize,
108 pub cols: usize,
109 pub data_raw: Option<Vec<f32>>,
110 pub scale: TofScale, }
112
113impl TofScanGrid {
114 #[inline]
115 pub fn tof_center_for_row(&self, r: usize) -> f32 {
116 self.scale.center(r)
119 }
120 #[inline]
121 pub fn tof_bounds_for_row(&self, r: usize) -> (f32, f32) {
122 (self.scale.edges[r], self.scale.edges[r + 1])
123 }
124}
125
126#[derive(Clone, Debug)]
127pub struct TofScanWindowGrid {
128 pub rt_range_frames: (usize, usize),
129 pub rt_range_sec: (f32, f32),
130 pub frame_id_bounds: (u32, u32),
131 pub window_group: Option<u32>,
132
133 pub scale: Arc<TofScale>, pub scans: Vec<usize>,
136 pub data: Option<Vec<f32>>,
137 pub rows: usize,
138 pub cols: usize,
139 pub data_raw: Option<Vec<f32>>,
140}
141
142impl TofScanWindowGrid {
143 #[inline]
144 pub fn tof_center_for_row(&self, r: usize) -> f32 {
145 self.scale.center(r)
146 }
147 #[inline]
148 pub fn tof_bounds_for_row(&self, r: usize) -> (f32, f32) {
149 (self.scale.edges[r], self.scale.edges[r + 1])
150 }
151}
152
153pub fn build_frame_bin_view(
154 fr: TimsFrame,
155 scale: &TofScale, global_num_scans: usize,
157) -> FrameBinView {
158 let n = fr.tof.len();
159 let mut bins_idx: Vec<usize> = Vec::with_capacity(n);
160 let mut scans_u: Vec<u32> = Vec::with_capacity(n);
161 let mut intens: Vec<f32> = Vec::with_capacity(n);
162
163 let scans_vec: &Vec<i32> = &fr.scan;
164 let tofs: &Vec<i32> = &fr.tof;
165
166 for i in 0..n {
167 let tof = tofs[i];
168 if let Some(idx) = scale.index_of_tof(tof) {
169 bins_idx.push(idx);
170
171 let s_val = scans_vec[i];
172 debug_assert!(s_val >= 0, "Negative scan index in frame {}", fr.frame_id);
173 let s_u32: u32 = u32::try_from(s_val).expect("scan index does not fit u32");
174 scans_u.push((s_u32 as usize).min(global_num_scans.saturating_sub(1)) as u32);
175
176 intens.push(fr.ims_frame.intensity[i] as f32);
177 }
178 }
179
180 let mut idx: Vec<usize> = (0..bins_idx.len()).collect();
182 idx.sort_unstable_by_key(|&i| bins_idx[i]);
183
184 let mut unique_bins: Vec<usize> = Vec::new();
185 let mut counts: Vec<usize> = Vec::new();
186 let mut scan_sorted: Vec<u32> = Vec::with_capacity(idx.len());
187 let mut inten_sorted: Vec<f32> = Vec::with_capacity(idx.len());
188
189 let mut cur: Option<usize> = None;
190 for &i in &idx {
191 let b = bins_idx[i];
192 if cur.map_or(true, |c| c != b) {
193 unique_bins.push(b);
194 counts.push(0);
195 cur = Some(b);
196 }
197 *counts.last_mut().unwrap() += 1;
198 scan_sorted.push(scans_u[i]);
199 inten_sorted.push(intens[i]);
200 }
201
202 let mut offsets = Vec::with_capacity(unique_bins.len() + 1);
203 offsets.push(0);
204 for c in &counts {
205 offsets.push(offsets.last().unwrap() + *c);
206 }
207
208 FrameBinView {
209 _frame_id: fr.frame_id as u32,
210 unique_bins,
211 offsets,
212 scan_idx: scan_sorted,
213 intensity: inten_sorted,
214 }
215}
216
217#[inline]
218fn sum_frame_bins_scans(
219 fbv: &FrameBinView,
220 bin_lo: usize,
221 bin_hi: usize,
222 scan_lo: usize,
223 scan_hi: usize,
224) -> f32 {
225 let ub = &fbv.unique_bins;
226 if ub.is_empty() || bin_lo > bin_hi { return 0.0; }
227
228 let start = match ub.binary_search(&bin_lo) {
229 Ok(i) => i,
230 Err(i) => i.min(ub.len()),
231 };
232 let mut acc = 0.0f32;
233
234 let end_bin = bin_hi;
235 let mut i = start;
236 while i < ub.len() {
237 let b = ub[i];
238 if b > end_bin { break; }
239
240 let lo = fbv.offsets[i];
241 let hi = fbv.offsets[i + 1];
242 let scans = &fbv.scan_idx[lo..hi];
243 let ints = &fbv.intensity[lo..hi];
244
245 for (s, val) in scans.iter().zip(ints.iter()) {
246 let s = *s as usize;
247 if s >= scan_lo && s <= scan_hi {
248 acc += *val;
249 }
250 }
251 i += 1;
252 }
253 acc
254}
255
256pub fn rt_trace_for_im_peak(
258 frames: &[FrameBinView], tof_row: usize,
260 bin_pad: usize, scan_lo: usize,
262 scan_hi: usize,
263) -> Vec<f32> {
264 let bin_lo = tof_row.saturating_sub(bin_pad);
265 let bin_hi = tof_row.saturating_add(bin_pad);
266 let mut v = Vec::with_capacity(frames.len());
267 for fbv in frames {
268 v.push(sum_frame_bins_scans(fbv, bin_lo, bin_hi, scan_lo, scan_hi));
269 }
270 v
271}
272
273#[derive(Clone, Debug)]
274pub struct RtFrames {
275 pub frames: Vec<FrameBinView>,
276 pub frame_ids: Vec<u32>,
277 pub rt_times: Vec<f32>,
278 pub scale: Arc<TofScale>,
279}
280
281impl RtFrames {
282 #[inline]
283 pub fn ctx(&self) -> RtTraceCtx<'_> {
284 RtTraceCtx {
285 frame_ids_sorted: &self.frame_ids,
286 rt_times_sec: &self.rt_times,
287 }
288 }
289 #[inline]
290 pub fn is_consistent(&self) -> bool {
291 self.frames.len() == self.frame_ids.len()
292 && self.frame_ids.len() == self.rt_times.len()
293 }
294}
295
296#[derive(Clone, Copy, Debug)]
297pub struct RtTraceCtx<'a> {
298 pub frame_ids_sorted: &'a [u32],
299 pub rt_times_sec: &'a [f32],
300}
301
302#[derive(Clone, Debug)]
303pub struct RtPeak1D {
304 pub rt_idx: usize,
306 pub rt_sec: Option<f32>,
307 pub apex_smoothed: f32,
308 pub apex_raw: f32,
309 pub prominence: f32,
310 pub left_x: f32,
311 pub right_x: f32,
312 pub width_frames: usize,
313 pub area_raw: f32,
314 pub subframe: f32,
315
316 pub rt_bounds_frames: (usize, usize), pub frame_id_bounds: (u32, u32), pub window_group: Option<u32>,
320
321 pub tof_row: usize,
323 pub tof_center: i32,
324 pub tof_bounds: (i32, i32),
325
326 pub parent_im_id: Option<PeakId>,
328 pub id: PeakId,
329}
330
331#[derive(Clone, Debug)]
332pub struct RtPeaksForIm {
333 pub im_index: usize, pub im_id: PeakId, pub peaks: Vec<RtPeak1D>,
336}
337
338#[derive(Clone, Debug)]
339pub struct RtLocalPeak {
340 pub rt_idx: usize,
341 pub rt_sec: Option<f32>,
342 pub apex_smoothed: f32,
343 pub apex_raw: f32,
344 pub prominence: f32,
345 pub left_x: f32,
346 pub right_x: f32,
347 pub width_frames: usize,
348 pub area_raw: f32,
349 pub subframe: f32,
350 pub left_sec: Option<f32>,
351 pub right_sec: Option<f32>,
352 pub width_sec: Option<f32>,
353}
354
355#[derive(Clone, Copy, Debug, Serialize, Deserialize)]
356pub struct RtExpandParams {
357 pub bin_pad: usize,
358 pub smooth_sigma_sec: f32,
359 pub smooth_trunc_k: f32,
360 pub min_prom: ThresholdMode,
364 pub min_sep_sec: f32,
365 pub min_width_sec: f32,
366 pub fallback_if_frames_lt: usize,
367 pub fallback_frac_width: f32,
368 pub rt_pad_frames: usize,
371}
372
373impl Default for RtExpandParams {
374 fn default() -> Self {
375 Self {
376 bin_pad: 2,
377 smooth_sigma_sec: 0.5,
378 smooth_trunc_k: 3.0,
379 min_prom: ThresholdMode::default(), min_sep_sec: 1.0,
381 min_width_sec: 0.5,
382 fallback_if_frames_lt: 3,
383 fallback_frac_width: 0.25,
384 rt_pad_frames: 2,
385 }
386 }
387}
388
389impl RtExpandParams {
390 pub fn with_fixed_prom(mut self, prom: f32) -> Self {
392 self.min_prom = ThresholdMode::Fixed(prom);
393 self
394 }
395
396 pub fn with_adaptive_prom(mut self, sigma_multiplier: f32) -> Self {
398 self.min_prom = ThresholdMode::AdaptiveNoise(sigma_multiplier);
399 self
400 }
401
402 pub fn legacy_defaults() -> Self {
404 Self {
405 min_prom: ThresholdMode::Fixed(100.0),
406 ..Default::default()
407 }
408 }
409}
410
411#[inline]
412pub fn rt_trace_for_im_peak_by_bounds(
413 frames: &[FrameBinView],
414 rt_scale: &TofScale, tof_bounds: (i32, i32), extra_bins_pad: usize, scan_lo: usize,
418 scan_hi: usize,
419) -> Vec<f32> {
420 let (mut bin_l, mut bin_r) = rt_scale.index_range_for_tof_window(tof_bounds.0, tof_bounds.1);
422 bin_l = bin_l.saturating_sub(extra_bins_pad);
423 bin_r = bin_r.saturating_add(extra_bins_pad);
424
425 let mut v = Vec::with_capacity(frames.len());
426 for fbv in frames {
427 v.push(sum_frame_bins_scans(fbv, bin_l, bin_r, scan_lo, scan_hi));
428 }
429 v
430}
431
432pub fn expand_im_peak_along_rt(
433 im_peak: &ImPeak1D,
434 frames_sorted: &[FrameBinView],
435 rt_ctx: RtTraceCtx<'_>,
436 tof_scale: &TofScale,
437 p: RtExpandParams,
438) -> Vec<RtPeak1D> {
439 let (fid_lo_abs, fid_hi_abs) = im_peak.frame_id_bounds;
441
442 let mut allow_lo = match rt_ctx.frame_ids_sorted.binary_search(&fid_lo_abs) {
443 Ok(i) => i,
444 Err(_) => return Vec::new(),
445 };
446 let mut allow_hi = match rt_ctx.frame_ids_sorted.binary_search(&fid_hi_abs) {
447 Ok(i) => i,
448 Err(_) => return Vec::new(),
449 };
450 if allow_lo > allow_hi {
451 std::mem::swap(&mut allow_lo, &mut allow_hi);
452 }
453
454 let n_frames = frames_sorted.len();
455 if n_frames == 0 {
456 return Vec::new();
457 }
458
459 let pad = p.rt_pad_frames.min(n_frames.saturating_sub(1));
461 allow_lo = allow_lo.saturating_sub(pad);
462 allow_hi = (allow_hi + pad).min(n_frames.saturating_sub(1));
463
464 if allow_lo >= n_frames || allow_hi >= n_frames {
465 return Vec::new();
466 }
467
468 let trace_raw_full = rt_trace_for_im_peak_by_bounds(
470 frames_sorted,
471 tof_scale,
472 im_peak.tof_bounds,
473 p.bin_pad,
474 im_peak.left_abs,
475 im_peak.right_abs,
476 );
477 if trace_raw_full.is_empty() {
478 return Vec::new();
479 }
480
481 let trace_raw = &trace_raw_full[allow_lo..=allow_hi];
483 let rt_times_clamped = &rt_ctx.rt_times_sec[allow_lo..=allow_hi];
484 let n_clamped = trace_raw.len();
485 if n_clamped == 0 || !trace_raw.iter().any(|x| *x > 0.0) {
486 return Vec::new();
487 }
488
489 if n_clamped < p.fallback_if_frames_lt {
491 if let Some(pk) =
492 fallback_rt_peak_from_trace(trace_raw, rt_ctx, im_peak, p.fallback_frac_width)
493 {
494 let l = allow_lo + pk.left_x.floor().clamp(0.0, (n_clamped - 1) as f32) as usize;
495 let r = allow_lo + pk.right_x.ceil().clamp(0.0, (n_clamped - 1) as f32) as usize;
496 let lo_fid = rt_ctx.frame_ids_sorted[l];
497 let hi_fid = rt_ctx.frame_ids_sorted[r];
498
499 return vec![RtPeak1D {
500 parent_im_id: Some(im_peak.id),
501 window_group: im_peak.window_group,
502
503 tof_row: im_peak.tof_row,
504 tof_center: im_peak.tof_center,
505 tof_bounds: im_peak.tof_bounds,
506
507 rt_idx: allow_lo + pk.rt_idx,
508 rt_sec: pk.rt_sec,
509 apex_smoothed: pk.apex_smoothed,
510 apex_raw: pk.apex_raw,
511 prominence: pk.prominence,
512 left_x: pk.left_x + allow_lo as f32,
513 right_x: pk.right_x + allow_lo as f32,
514 width_frames: pk.width_frames,
515 area_raw: pk.area_raw,
516 subframe: pk.subframe,
517
518 rt_bounds_frames: (l, r),
519 frame_id_bounds: (lo_fid.min(hi_fid), lo_fid.max(hi_fid)),
520 id: rt_peak_id(&pk),
521 }];
522 }
523 return Vec::new();
524 }
525
526 let dt = effective_dt(rt_times_clamped);
528 let sigma_frames = (p.smooth_sigma_sec / dt).max(0.75);
529 let mut trace_smooth = trace_raw.to_vec();
530 smooth_vector_gaussian(&mut trace_smooth[..], sigma_frames, p.smooth_trunc_k);
531
532 let base = find_rt_peaks(
536 &trace_smooth,
537 trace_raw,
538 rt_times_clamped,
539 p.min_prom,
540 p.min_sep_sec,
541 p.min_width_sec,
542 );
543
544 if base.is_empty() {
545 if let Some(pk) =
546 fallback_rt_peak_from_trace(trace_raw, rt_ctx, im_peak, p.fallback_frac_width)
547 {
548 let l = allow_lo + pk.left_x.floor().clamp(0.0, (n_clamped - 1) as f32) as usize;
549 let r = allow_lo + pk.right_x.ceil().clamp(0.0, (n_clamped - 1) as f32) as usize;
550 let lo_fid = rt_ctx.frame_ids_sorted[l];
551 let hi_fid = rt_ctx.frame_ids_sorted[r];
552
553 return vec![RtPeak1D {
554 parent_im_id: Some(im_peak.id),
555 window_group: im_peak.window_group,
556
557 tof_row: im_peak.tof_row,
558 tof_center: im_peak.tof_center,
559 tof_bounds: im_peak.tof_bounds,
560
561 rt_idx: allow_lo + pk.rt_idx,
562 rt_sec: pk.rt_sec,
563 apex_smoothed: pk.apex_smoothed,
564 apex_raw: pk.apex_raw,
565 prominence: pk.prominence,
566 left_x: pk.left_x + allow_lo as f32,
567 right_x: pk.right_x + allow_lo as f32,
568 width_frames: pk.width_frames,
569 area_raw: pk.area_raw,
570 subframe: pk.subframe,
571
572 rt_bounds_frames: (l, r),
573 frame_id_bounds: (lo_fid.min(hi_fid), lo_fid.max(hi_fid)),
574 id: rt_peak_id(&pk),
575 }];
576 }
577 return Vec::new();
578 }
579
580 let n_frames_total = frames_sorted.len();
582
583 base.into_iter()
584 .map(|r0| {
585 let local_left =
586 r0.left_x.floor().clamp(0.0, (n_clamped - 1) as f32) as usize;
587 let local_right =
588 r0.right_x.ceil().clamp(0.0, (n_clamped - 1) as f32) as usize;
589
590 let global_left = allow_lo + local_left;
591 let global_right = allow_hi
592 .min(allow_lo + local_right)
593 .min(n_frames_total - 1);
594
595 let lo_fid = rt_ctx.frame_ids_sorted[global_left];
596 let hi_fid = rt_ctx.frame_ids_sorted[global_right];
597
598 let mut r = RtPeak1D {
599 parent_im_id: Some(im_peak.id),
600 window_group: im_peak.window_group,
601
602 tof_row: im_peak.tof_row,
603 tof_center: im_peak.tof_center,
604 tof_bounds: im_peak.tof_bounds,
605
606 rt_idx: allow_lo + r0.rt_idx,
607 rt_sec: r0.rt_sec,
608 apex_smoothed: r0.apex_smoothed,
609 apex_raw: r0.apex_raw,
610 prominence: r0.prominence,
611 left_x: r0.left_x + allow_lo as f32,
612 right_x: r0.right_x + allow_lo as f32,
613 width_frames: r0.width_frames,
614 area_raw: r0.area_raw,
615 subframe: r0.subframe,
616
617 rt_bounds_frames: (global_left, global_right),
618 frame_id_bounds: (lo_fid.min(hi_fid), lo_fid.max(hi_fid)),
619 id: 0,
620 };
621 r.id = rt_peak_id(&r);
622 r
623 })
624 .collect()
625}
626
627pub fn expand_many_im_peaks_along_rt(
628 im_peaks: &[ImPeak1D],
629 frames_sorted: &[FrameBinView],
630 ctx: RtTraceCtx<'_>,
631 tof_scale: &TofScale, p: RtExpandParams,
633) -> Vec<Vec<RtPeak1D>> {
634 if im_peaks.is_empty() { return Vec::new(); }
635
636 #[cfg(debug_assertions)]
637 {
638 let first_g = im_peaks[0].window_group;
639 let same = im_peaks.iter().all(|x| x.window_group == first_g);
640 debug_assert!(same, "expand_many_im_peaks_along_rt: mixed window_group in batch");
641 }
642
643 im_peaks
644 .par_iter()
645 .map(|im| expand_im_peak_along_rt(im, frames_sorted, ctx, tof_scale, p))
646 .collect()
647}
648
649pub fn expand_many_im_peaks_along_rt_flat(
650 im_peaks: &[ImPeak1D],
651 frames_sorted: &[FrameBinView],
652 ctx: RtTraceCtx<'_>,
653 tof_scale: &TofScale, p: RtExpandParams,
655) -> Vec<RtPeaksForIm> {
656 if im_peaks.is_empty() { return Vec::new(); }
657
658 (0..im_peaks.len())
659 .into_par_iter()
660 .map(|i| {
661 let im = &im_peaks[i];
662 let peaks = expand_im_peak_along_rt(im, frames_sorted, ctx, tof_scale, p);
663 RtPeaksForIm { im_index: i, im_id: im.id, peaks }
664 })
665 .collect()
666}
667
668fn nms_by_time(mut peaks: Vec<RtLocalPeak>, min_sep_sec: f32) -> Vec<RtLocalPeak> {
671 if peaks.is_empty() { return peaks; }
672
673 peaks.sort_by(|a, b| {
675 b.apex_smoothed
676 .partial_cmp(&a.apex_smoothed)
677 .unwrap_or(std::cmp::Ordering::Equal)
678 });
679
680 let mut selected: Vec<RtLocalPeak> = Vec::new();
681 'outer: for p in peaks {
682 let t_p = p.rt_sec.unwrap_or(0.0);
683 for q in &selected {
684 let t_q = q.rt_sec.unwrap_or(0.0);
685 if (t_p - t_q).abs() < min_sep_sec {
686 continue 'outer;
688 }
689 }
690 selected.push(p);
691 }
692
693 selected.sort_by(|a, b| {
695 a.rt_sec
696 .unwrap_or(0.0)
697 .partial_cmp(&b.rt_sec.unwrap_or(0.0))
698 .unwrap_or(std::cmp::Ordering::Equal)
699 });
700
701 selected
702}
703
704pub fn find_rt_peaks(
705 y_smoothed: &[f32],
706 y_raw: &[f32],
707 rt_times: &[f32],
708 min_prom: ThresholdMode,
709 min_sep_sec: f32,
710 min_width_sec: f32,
711) -> Vec<RtLocalPeak> {
712 let n = y_smoothed.len();
713 if n < 3 || y_raw.len() != n || rt_times.len() != n { return Vec::new(); }
714
715 let noise = robust_noise_mad(y_smoothed);
717 let min_prom_eff = min_prom.effective(noise);
718
719 let row_max = y_raw.iter().copied().fold(0.0f32, f32::max);
721 if row_max < min_prom_eff { return Vec::new(); }
722
723 let mut cands = Vec::with_capacity(n / 4);
725 for i in 1..n-1 {
726 let yi = y_smoothed[i];
727 if yi > y_smoothed[i-1] && yi >= y_smoothed[i+1] { cands.push(i); }
728 }
729
730 let mut peaks: Vec<RtLocalPeak> = Vec::with_capacity(n / 4);
731 for &i in &cands {
732 let apex = y_smoothed[i];
733
734 let mut l = i; let mut left_min = apex;
736 while l > 0 { l -= 1; left_min = left_min.min(y_smoothed[l]); if y_smoothed[l] > apex { break; } }
737 let mut r = i; let mut right_min = apex;
738 while r + 1 < n { r += 1; right_min = right_min.min(y_smoothed[r]); if y_smoothed[r] > apex { break; } }
739
740 let baseline = left_min.max(right_min);
741 let prom = apex - baseline;
742 if prom < min_prom_eff { continue; }
743
744 let half = baseline + 0.5 * prom;
746
747 let mut wl = i;
748 while wl > 0 && y_smoothed[wl] > half { wl -= 1; }
749 let left_x = if wl < i && wl + 1 < n {
750 let y0 = y_smoothed[wl]; let y1 = y_smoothed[wl + 1];
751 wl as f32 + if y1 != y0 { (half - y0) / (y1 - y0) } else { 0.0 }
752 } else { wl as f32 };
753
754 let mut wr = i;
755 while wr + 1 < n && y_smoothed[wr] > half { wr += 1; }
756 let right_x = if wr > i && wr < n {
757 let y0 = y_smoothed[wr - 1]; let y1 = y_smoothed[wr];
758 (wr - 1) as f32 + if y1 != y0 { (half - y0) / (y1 - y0) } else { 0.0 }
759 } else { wr as f32 };
760
761 let left_t = t_at_index_frac(rt_times, left_x.max(0.0));
763 let right_t = t_at_index_frac(rt_times, right_x.min((n - 1) as f32));
764 let width_sec = (right_t - left_t).max(0.0);
765 if width_sec < min_width_sec { continue; }
766
767 let sub = if i > 0 && i + 1 < n {
769 quad_subsample(y_smoothed[i - 1], y_smoothed[i], y_smoothed[i + 1]).clamp(-0.5, 0.5)
770 } else { 0.0 };
771
772 let area = trapezoid_area_fractional(y_raw, left_x.max(0.0), right_x.min((n - 1) as f32));
774
775 let apex_t = t_at_index_frac(rt_times, i as f32 + sub);
777
778 peaks.push(RtLocalPeak {
779 rt_idx: i,
780 rt_sec: Some(apex_t),
781 apex_smoothed: apex,
782 apex_raw: y_raw[i],
783 prominence: prom,
784 left_x,
785 right_x,
786 width_frames: ((right_x - left_x).max(0.0)).round() as usize, area_raw: area,
788 subframe: sub,
789 left_sec: Some(left_t),
790 right_sec: Some(right_t),
791 width_sec: Some(width_sec),
792 });
793 }
794
795 if peaks.is_empty() {
796 return peaks;
797 }
798
799 let mut total_area = 0.0f32;
801 for j in 0..(n - 1) {
802 total_area += 0.5 * (y_raw[j] + y_raw[j + 1]);
803 }
804
805 let mut peaks = nms_by_time(peaks, min_sep_sec);
807 if peaks.is_empty() {
808 return Vec::new();
809 }
810
811 let max_area = peaks
813 .iter()
814 .map(|p| p.area_raw)
815 .fold(0.0f32, f32::max)
816 .max(1e-6);
817
818 let min_area_frac_trace = 0.02; let min_area_frac_best = 0.05; peaks.retain(|p| {
822 let frac_trace = p.area_raw / total_area.max(1e-6);
823 let frac_best = p.area_raw / max_area;
824 frac_trace >= min_area_frac_trace && frac_best >= min_area_frac_best
825 });
826
827 peaks
828}
829
830#[inline]
832fn effective_dt(rt_times: &[f32]) -> f32 {
833 if rt_times.len() < 2 { return 1.0; }
834 let mut d: Vec<f32> = rt_times.windows(2).map(|w| (w[1] - w[0]).abs()).collect();
835 d.sort_by(|a,b| a.partial_cmp(b).unwrap_or(std::cmp::Ordering::Equal));
836 d[d.len()/2].max(1e-3) }
838
839#[inline]
840fn t_at_index_frac(t: &[f32], x: f32) -> f32 {
841 if t.is_empty() { return 0.0; }
842 if x <= 0.0 { return t[0]; }
843 let n1 = (t.len() - 1) as f32;
844 if x >= n1 { return t[t.len()-1]; }
845 let j0 = x.floor() as usize;
846 let j1 = (j0 + 1).min(t.len()-1);
847 let frac = x - j0 as f32;
848 (1.0 - frac) * t[j0] + frac * t[j1]
849}
850
851#[derive(Clone, Debug)]
852pub struct TofRtGrid {
853 pub rt_range_frames: (usize, usize),
855 pub rt_range_sec: (f32, f32),
857 pub frame_id_bounds: (u32, u32),
859
860 pub window_group: Option<u32>,
862
863 pub scale: Arc<TofScale>,
865
866 pub rt_times: Vec<f32>,
868 pub frame_ids: Vec<u32>,
870
871 pub rows: usize,
873 pub cols: usize,
874
875 pub data: Vec<f32>,
878}
879
880pub fn build_tof_rt_grid_full(rt_frames: &RtFrames, window_group: Option<u32>) -> TofRtGrid {
886 assert!(rt_frames.is_consistent(), "RtFrames inconsistent");
887
888 let cols = rt_frames.frames.len();
889 let rows = rt_frames.scale.num_bins();
890
891 let mut data = vec![0.0f32; rows.saturating_mul(cols.max(1))];
892
893 for (col, fbv) in rt_frames.frames.iter().enumerate() {
894 let ub = &fbv.unique_bins;
895 if ub.is_empty() {
896 continue;
897 }
898
899 for b_idx in 0..ub.len() {
900 let bin = ub[b_idx];
901 if bin >= rows {
902 continue; }
904
905 let lo = fbv.offsets[b_idx];
906 let hi = fbv.offsets[b_idx + 1];
907
908 let mut sum = 0.0f32;
909 for k in lo..hi {
910 sum += fbv.intensity[k];
911 }
912
913 let idx = bin * cols + col; data[idx] += sum;
915 }
916 }
917
918 let rt_range_frames = if cols > 0 { (0, cols - 1) } else { (0, 0) };
919 let rt_range_sec = if cols > 0 {
920 (rt_frames.rt_times[0], *rt_frames.rt_times.last().unwrap())
921 } else {
922 (0.0, 0.0)
923 };
924
925 let frame_id_bounds = if rt_frames.frame_ids.is_empty() {
926 (0, 0)
927 } else {
928 (
929 *rt_frames.frame_ids.first().unwrap(),
930 *rt_frames.frame_ids.last().unwrap(),
931 )
932 };
933
934 TofRtGrid {
935 rt_range_frames,
936 rt_range_sec,
937 frame_id_bounds,
938 window_group,
939 scale: rt_frames.scale.clone(),
940 rt_times: rt_frames.rt_times.clone(),
941 frame_ids: rt_frames.frame_ids.clone(),
942 rows,
943 cols,
944 data,
945 }
946}
947
948#[derive(Clone, Copy, Debug)]
949pub struct ImDetectParams {
950 pub min_prom: f32,
952 pub min_distance_scans: usize,
954 pub min_width_scans: usize,
956 pub smooth_sigma_scans: f32,
958 pub smooth_trunc_k: f32,
959}
960
961pub fn detect_im_peaks_from_tof_scan_grid(
962 grid: &TofScanGrid,
963 rt_bounds_frames: (usize, usize),
964 frame_id_bounds: (u32, u32),
965 window_group: Option<u32>,
966 mobility_of: MobilityFn,
967 p: ImDetectParams,
968) -> Vec<ImPeak1D> {
969 use rayon::prelude::*;
970
971 let rows = grid.rows;
972 let cols = grid.cols;
973 if rows == 0 || cols == 0 {
974 return Vec::new();
975 }
976
977 let scans_axis = &grid.scans;
978 debug_assert_eq!(scans_axis.len(), cols, "scans axis must match grid.cols");
979
980 let data_raw = grid
981 .data_raw
982 .as_ref()
983 .unwrap_or(&grid.data); debug_assert_eq!(
986 data_raw.len(),
987 rows * cols,
988 "TofScanGrid data size mismatch"
989 );
990
991 let mobility_of_copy = mobility_of;
993
994 let per_row: Vec<Vec<ImPeak1D>> = (0..rows)
996 .into_par_iter()
997 .map(|tof_row| {
998 let offset = tof_row * cols;
999 let row_raw = &data_raw[offset..offset + cols];
1000
1001 if row_raw.iter().all(|v| *v <= 0.0) {
1003 return Vec::new();
1004 }
1005
1006 let mut row_smooth = row_raw.to_vec();
1008 if p.smooth_sigma_scans > 0.0 && cols > 2 {
1009 smooth_vector_gaussian(&mut row_smooth, p.smooth_sigma_scans, p.smooth_trunc_k);
1010 }
1011
1012 let (tof_lo_f, tof_hi_f) = grid.tof_bounds_for_row(tof_row);
1014 let tof_center_f = grid.tof_center_for_row(tof_row);
1015 let tof_center = tof_center_f.round() as i32;
1016 let tof_bounds = (tof_lo_f.round() as i32, tof_hi_f.round() as i32);
1017
1018 find_im_peaks_row(
1020 &row_smooth,
1021 row_raw,
1022 tof_row,
1023 tof_center,
1024 tof_bounds,
1025 rt_bounds_frames,
1026 frame_id_bounds,
1027 window_group,
1028 mobility_of_copy,
1029 p.min_prom,
1030 p.min_distance_scans,
1031 p.min_width_scans,
1032 scans_axis,
1033 )
1034 })
1035 .collect();
1036
1037 let total_peaks: usize = per_row.iter().map(|v| v.len()).sum();
1039 let mut out = Vec::with_capacity(total_peaks);
1040 for mut v in per_row {
1041 out.append(&mut v);
1042 }
1043 out
1044}
1045
1046pub fn detect_im_peaks_from_tof_scan_window(
1047 win: &TofScanWindowGrid,
1048 mobility_of: MobilityFn,
1049 p: ImDetectParams,
1050) -> Vec<ImPeak1D> {
1051 let rows = win.rows;
1052 let cols = win.cols;
1053 if rows == 0 || cols == 0 {
1054 return Vec::new();
1055 }
1056
1057 let data = match &win.data {
1059 Some(d) => d,
1060 None => {
1061 panic!("TofScanWindowGrid.data is None; materialize grid before IM detection");
1064 }
1065 };
1066
1067 let data_raw = win.data_raw.as_ref().unwrap_or(data);
1068 if data_raw.len() != rows * cols {
1069 panic!("TofScanWindowGrid data size mismatch");
1070 }
1071 debug_assert_eq!(win.scans.len(), cols, "scan axis length mismatch");
1072
1073 let grid = TofScanGrid {
1075 scans: win.scans.clone(),
1076 data: data.clone(),
1077 rows,
1078 cols,
1079 data_raw: win.data_raw.clone(),
1080 scale: (*win.scale).clone(),
1081 };
1082
1083 detect_im_peaks_from_tof_scan_grid(
1084 &grid,
1085 win.rt_range_frames,
1086 win.frame_id_bounds,
1087 win.window_group,
1088 mobility_of,
1089 p,
1090 )
1091}