1mod autoperiod;
13mod change;
14mod hilbert;
15mod lomb_scargle;
16mod matrix_profile;
17mod peak;
18mod period;
19mod sazed;
20mod ssa;
21mod strength;
22
23#[cfg(test)]
24mod tests;
25
26use crate::iter_maybe_parallel;
27use crate::matrix::FdMatrix;
28use num_complex::Complex;
29#[cfg(feature = "parallel")]
30use rayon::iter::ParallelIterator;
31use rustfft::FftPlanner;
32use std::f64::consts::PI;
33
34pub use autoperiod::{
36 autoperiod, autoperiod_fdata, cfd_autoperiod, cfd_autoperiod_fdata, AutoperiodCandidate,
37 AutoperiodResult, CfdAutoperiodResult,
38};
39pub use change::{
40 analyze_peak_timing, classify_seasonality, detect_amplitude_modulation,
41 detect_amplitude_modulation_wavelet, detect_seasonality_changes,
42 detect_seasonality_changes_auto, AmplitudeModulationResult, ModulationType, SeasonalType,
43 SeasonalityClassification, ThresholdMethod, WaveletAmplitudeResult,
44};
45pub use hilbert::{hilbert_transform, instantaneous_period};
46pub use lomb_scargle::{lomb_scargle, lomb_scargle_fdata, LombScargleResult};
47pub use matrix_profile::{
48 matrix_profile, matrix_profile_fdata, matrix_profile_seasonality, MatrixProfileResult,
49};
50pub use peak::detect_peaks;
51pub use period::{
52 detect_multiple_periods, estimate_period_acf, estimate_period_fft, estimate_period_regression,
53};
54pub use sazed::{sazed, sazed_fdata, SazedComponents, SazedResult};
55pub use ssa::{ssa, ssa_fdata, ssa_seasonality, SsaResult};
56pub use strength::{
57 seasonal_strength_spectral, seasonal_strength_variance, seasonal_strength_wavelet,
58 seasonal_strength_windowed,
59};
60
61pub use self::types::*;
63
64mod types {
65 #[derive(Debug, Clone)]
67 #[non_exhaustive]
68 pub struct PeriodEstimate {
69 pub period: f64,
71 pub frequency: f64,
73 pub power: f64,
75 pub confidence: f64,
77 }
78
79 #[derive(Debug, Clone)]
81 #[non_exhaustive]
82 pub struct Peak {
83 pub time: f64,
85 pub value: f64,
87 pub prominence: f64,
89 }
90
91 #[derive(Debug, Clone)]
93 #[non_exhaustive]
94 pub struct PeakDetectionResult {
95 pub peaks: Vec<Vec<Peak>>,
97 pub inter_peak_distances: Vec<Vec<f64>>,
99 pub mean_period: f64,
101 }
102
103 #[derive(Debug, Clone)]
105 #[non_exhaustive]
106 pub struct DetectedPeriod {
107 pub period: f64,
109 pub confidence: f64,
111 pub strength: f64,
113 pub amplitude: f64,
115 pub phase: f64,
117 pub iteration: usize,
119 }
120
121 #[derive(Debug, Clone, Copy, PartialEq, Eq)]
123 #[non_exhaustive]
124 pub enum StrengthMethod {
125 Variance,
127 Spectral,
129 }
130
131 #[derive(Debug, Clone)]
133 #[non_exhaustive]
134 pub struct ChangePoint {
135 pub time: f64,
137 pub change_type: ChangeType,
139 pub strength_before: f64,
141 pub strength_after: f64,
143 }
144
145 #[derive(Debug, Clone, Copy, PartialEq, Eq)]
147 #[non_exhaustive]
148 pub enum ChangeType {
149 Onset,
151 Cessation,
153 }
154
155 #[derive(Debug, Clone)]
157 #[non_exhaustive]
158 pub struct ChangeDetectionResult {
159 pub change_points: Vec<ChangePoint>,
161 pub strength_curve: Vec<f64>,
163 }
164
165 #[derive(Debug, Clone)]
167 #[non_exhaustive]
168 pub struct InstantaneousPeriod {
169 pub period: Vec<f64>,
171 pub frequency: Vec<f64>,
173 pub amplitude: Vec<f64>,
175 }
176
177 #[derive(Debug, Clone, PartialEq)]
179 #[non_exhaustive]
180 pub struct PeakTimingResult {
181 pub peak_times: Vec<f64>,
183 pub peak_values: Vec<f64>,
185 pub normalized_timing: Vec<f64>,
187 pub mean_timing: f64,
189 pub std_timing: f64,
191 pub range_timing: f64,
193 pub variability_score: f64,
195 pub timing_trend: f64,
197 pub cycle_indices: Vec<usize>,
199 }
200}
201
202#[inline]
215pub(super) fn compute_mean_curve_impl(data: &FdMatrix, parallel: bool) -> Vec<f64> {
216 let (n, m) = data.shape();
217 if parallel && m >= 100 {
218 iter_maybe_parallel!(0..m)
220 .map(|j| {
221 let mut sum = 0.0;
222 for i in 0..n {
223 sum += data[(i, j)];
224 }
225 sum / n as f64
226 })
227 .collect()
228 } else {
229 (0..m)
231 .map(|j| {
232 let mut sum = 0.0;
233 for i in 0..n {
234 sum += data[(i, j)];
235 }
236 sum / n as f64
237 })
238 .collect()
239 }
240}
241
242#[inline]
244pub(super) fn compute_mean_curve(data: &FdMatrix) -> Vec<f64> {
245 compute_mean_curve_impl(data, true)
246}
247
248#[inline]
258pub(super) fn interior_bounds(m: usize) -> Option<(usize, usize)> {
259 let edge_skip = (m as f64 * 0.1) as usize;
260 let interior_start = edge_skip.min(m / 4);
261 let interior_end = m.saturating_sub(edge_skip).max(m * 3 / 4);
262
263 if interior_end <= interior_start {
264 None
265 } else {
266 Some((interior_start, interior_end))
267 }
268}
269
270pub(super) fn valid_interior_bounds(m: usize, min_span: usize) -> Option<(usize, usize)> {
272 interior_bounds(m).filter(|&(s, e)| e > s + min_span)
273}
274
275pub(super) fn periodogram(data: &[f64], argvals: &[f64]) -> (Vec<f64>, Vec<f64>) {
278 let n = data.len();
279 if n < 2 || argvals.len() != n {
280 return (Vec::new(), Vec::new());
281 }
282
283 let dt = (argvals[n - 1] - argvals[0]) / (n - 1) as f64;
284 let fs = 1.0 / dt; let mut planner = FftPlanner::<f64>::new();
287 let fft = planner.plan_fft_forward(n);
288
289 let mut buffer: Vec<Complex<f64>> = data.iter().map(|&x| Complex::new(x, 0.0)).collect();
290 fft.process(&mut buffer);
291
292 let n_freq = n / 2 + 1;
294 let mut power = Vec::with_capacity(n_freq);
295 let mut frequencies = Vec::with_capacity(n_freq);
296
297 for k in 0..n_freq {
298 let freq = k as f64 * fs / n as f64;
299 frequencies.push(freq);
300
301 let p = buffer[k].norm_sqr() / (n as f64 * n as f64);
302 let p = if k > 0 && k < n / 2 { 2.0 * p } else { p };
304 power.push(p);
305 }
306
307 (frequencies, power)
308}
309
310pub(super) fn autocorrelation_naive(data: &[f64], max_lag: usize, mean: f64, var: f64) -> Vec<f64> {
312 let n = data.len();
313 let max_lag = max_lag.min(n - 1);
314 let mut acf = Vec::with_capacity(max_lag + 1);
315 for lag in 0..=max_lag {
316 let mut sum = 0.0;
317 for i in 0..(n - lag) {
318 sum += (data[i] - mean) * (data[i + lag] - mean);
319 }
320 acf.push(sum / (n as f64 * var));
321 }
322 acf
323}
324
325pub(super) fn autocorrelation(data: &[f64], max_lag: usize) -> Vec<f64> {
330 let n = data.len();
331 if n == 0 {
332 return Vec::new();
333 }
334
335 let mean: f64 = data.iter().sum::<f64>() / n as f64;
336 let var: f64 = data.iter().map(|&x| (x - mean).powi(2)).sum::<f64>() / n as f64;
338
339 if var < 1e-15 {
340 return vec![1.0; max_lag.min(n) + 1];
341 }
342
343 let max_lag = max_lag.min(n - 1);
344
345 if n <= 64 {
346 return autocorrelation_naive(data, max_lag, mean, var);
347 }
348
349 let fft_len = (2 * n).next_power_of_two();
351
352 let mut planner = FftPlanner::<f64>::new();
353 let fft_forward = planner.plan_fft_forward(fft_len);
354 let fft_inverse = planner.plan_fft_inverse(fft_len);
355
356 let mut buffer: Vec<Complex<f64>> = Vec::with_capacity(fft_len);
358 for &x in data {
359 buffer.push(Complex::new(x - mean, 0.0));
360 }
361 buffer.resize(fft_len, Complex::new(0.0, 0.0));
362
363 fft_forward.process(&mut buffer);
365
366 for c in &mut buffer {
368 *c = Complex::new(c.norm_sqr(), 0.0);
369 }
370
371 fft_inverse.process(&mut buffer);
373
374 let norm = fft_len as f64 * n as f64 * var;
376 (0..=max_lag).map(|lag| buffer[lag].re / norm).collect()
377}
378
379pub(super) fn try_add_peak(
381 peaks: &mut Vec<usize>,
382 candidate: usize,
383 signal: &[f64],
384 min_distance: usize,
385) {
386 if let Some(&last) = peaks.last() {
387 if candidate - last >= min_distance {
388 peaks.push(candidate);
389 } else if signal[candidate] > signal[last] {
390 *peaks.last_mut().unwrap_or(&mut 0) = candidate;
392 }
393 } else {
394 peaks.push(candidate);
395 }
396}
397
398pub(crate) fn find_peaks_1d(signal: &[f64], min_distance: usize) -> Vec<usize> {
400 let n = signal.len();
401 if n < 3 {
402 return Vec::new();
403 }
404
405 let mut peaks = Vec::new();
406
407 for i in 1..(n - 1) {
408 if signal[i] > signal[i - 1] && signal[i] > signal[i + 1] {
409 try_add_peak(&mut peaks, i, signal, min_distance);
410 }
411 }
412
413 peaks
414}
415
416pub(crate) fn compute_prominence(signal: &[f64], peak_idx: usize) -> f64 {
418 let n = signal.len();
419 let peak_val = signal[peak_idx];
420
421 let mut left_min = peak_val;
423 for i in (0..peak_idx).rev() {
424 if signal[i] >= peak_val {
425 break;
426 }
427 left_min = left_min.min(signal[i]);
428 }
429
430 let mut right_min = peak_val;
431 for i in (peak_idx + 1)..n {
432 if signal[i] >= peak_val {
433 break;
434 }
435 right_min = right_min.min(signal[i]);
436 }
437
438 peak_val - left_min.max(right_min)
439}
440
441pub(super) fn unwrap_phase(phase: &[f64]) -> Vec<f64> {
443 if phase.is_empty() {
444 return Vec::new();
445 }
446
447 let mut unwrapped = vec![phase[0]];
448 let mut cumulative_correction = 0.0;
449
450 for i in 1..phase.len() {
451 let diff = phase[i] - phase[i - 1];
452
453 if diff > PI {
455 cumulative_correction -= 2.0 * PI;
456 } else if diff < -PI {
457 cumulative_correction += 2.0 * PI;
458 }
459
460 unwrapped.push(phase[i] + cumulative_correction);
461 }
462
463 unwrapped
464}
465
466pub(super) fn morlet_wavelet(t: f64, omega0: f64) -> Complex<f64> {
473 let gaussian = (-t * t / 2.0).exp();
474 let oscillation = Complex::new((omega0 * t).cos(), (omega0 * t).sin());
475 oscillation * gaussian
476}
477
478#[allow(dead_code)]
492pub(super) fn cwt_morlet(
493 signal: &[f64],
494 argvals: &[f64],
495 scale: f64,
496 omega0: f64,
497) -> Vec<Complex<f64>> {
498 let n = signal.len();
499 if n == 0 || scale <= 0.0 {
500 return Vec::new();
501 }
502
503 let dt = (argvals[n - 1] - argvals[0]) / (n - 1) as f64;
504
505 let norm = 1.0 / scale.sqrt();
508
509 (0..n)
510 .map(|b| {
511 let mut sum = Complex::new(0.0, 0.0);
512 for k in 0..n {
513 let t_normalized = (argvals[k] - argvals[b]) / scale;
514 if t_normalized.abs() < 6.0 {
516 let wavelet = morlet_wavelet(t_normalized, omega0);
517 sum += signal[k] * wavelet.conj();
518 }
519 }
520 sum * norm * dt
521 })
522 .collect()
523}
524
525pub(super) fn cwt_morlet_fft(
529 signal: &[f64],
530 argvals: &[f64],
531 scale: f64,
532 omega0: f64,
533) -> Vec<Complex<f64>> {
534 let n = signal.len();
535 if n == 0 || scale <= 0.0 {
536 return Vec::new();
537 }
538
539 let dt = (argvals[n - 1] - argvals[0]) / (n - 1) as f64;
540 let norm = 1.0 / scale.sqrt();
541
542 let wavelet_time: Vec<Complex<f64>> = (0..n)
544 .map(|k| {
545 let t = if k <= n / 2 {
547 k as f64 * dt / scale
548 } else {
549 (k as f64 - n as f64) * dt / scale
550 };
551 morlet_wavelet(t, omega0) * norm
552 })
553 .collect();
554
555 let mut planner = FftPlanner::<f64>::new();
556 let fft_forward = planner.plan_fft_forward(n);
557 let fft_inverse = planner.plan_fft_inverse(n);
558
559 let mut signal_fft: Vec<Complex<f64>> = signal.iter().map(|&x| Complex::new(x, 0.0)).collect();
561 fft_forward.process(&mut signal_fft);
562
563 let mut wavelet_fft = wavelet_time;
565 fft_forward.process(&mut wavelet_fft);
566
567 let mut result: Vec<Complex<f64>> = signal_fft
569 .iter()
570 .zip(wavelet_fft.iter())
571 .map(|(s, w)| *s * w.conj())
572 .collect();
573
574 fft_inverse.process(&mut result);
576
577 for c in &mut result {
579 *c *= dt / n as f64;
580 }
581
582 result
583}
584
585pub(super) fn otsu_threshold(values: &[f64]) -> f64 {
590 if values.is_empty() {
591 return 0.5;
592 }
593
594 let valid: Vec<f64> = values.iter().copied().filter(|x| x.is_finite()).collect();
596 if valid.is_empty() {
597 return 0.5;
598 }
599
600 let vmin = valid.iter().copied().fold(f64::INFINITY, f64::min);
601 let vmax = valid.iter().copied().fold(f64::NEG_INFINITY, f64::max);
602
603 if (vmax - vmin).abs() < 1e-10 {
604 return (vmin + vmax) / 2.0;
605 }
606
607 let n_bins = 256;
608 let (histogram, hist_min, bin_width) = build_histogram(&valid, n_bins);
609 let (best_bin, _) = find_optimal_threshold_bin(&histogram, valid.len() as f64);
610
611 hist_min + (best_bin as f64 + 0.5) * bin_width
612}
613
614pub(super) fn linear_slope(x: &[f64], y: &[f64]) -> f64 {
616 if x.len() != y.len() || x.len() < 2 {
617 return 0.0;
618 }
619
620 let n = x.len() as f64;
621 let mean_x: f64 = x.iter().sum::<f64>() / n;
622 let mean_y: f64 = y.iter().sum::<f64>() / n;
623
624 let mut num = 0.0;
625 let mut den = 0.0;
626
627 for (&xi, &yi) in x.iter().zip(y.iter()) {
628 num += (xi - mean_x) * (yi - mean_y);
629 den += (xi - mean_x).powi(2);
630 }
631
632 if den.abs() < 1e-15 {
633 0.0
634 } else {
635 num / den
636 }
637}
638
639pub(super) struct AmplitudeEnvelopeStats {
641 pub(super) modulation_score: f64,
642 pub(super) amplitude_trend: f64,
643 pub(super) has_modulation: bool,
644 pub(super) modulation_type: change::ModulationType,
645 pub(super) _mean_amp: f64,
646 pub(super) min_amp: f64,
647 pub(super) max_amp: f64,
648}
649
650pub(super) fn analyze_amplitude_envelope(
652 interior_envelope: &[f64],
653 interior_times: &[f64],
654 modulation_threshold: f64,
655) -> AmplitudeEnvelopeStats {
656 let n_interior = interior_envelope.len() as f64;
657
658 let mean_amp = interior_envelope.iter().sum::<f64>() / n_interior;
659 let min_amp = interior_envelope
660 .iter()
661 .copied()
662 .fold(f64::INFINITY, f64::min);
663 let max_amp = interior_envelope
664 .iter()
665 .copied()
666 .fold(f64::NEG_INFINITY, f64::max);
667
668 let variance = interior_envelope
669 .iter()
670 .map(|&a| (a - mean_amp).powi(2))
671 .sum::<f64>()
672 / n_interior;
673 let std_amp = variance.sqrt();
674 let modulation_score = if mean_amp > 1e-10 {
675 std_amp / mean_amp
676 } else {
677 0.0
678 };
679
680 let t_mean = interior_times.iter().sum::<f64>() / n_interior;
681 let mut cov_ta = 0.0;
682 let mut var_t = 0.0;
683 for (&t, &a) in interior_times.iter().zip(interior_envelope.iter()) {
684 cov_ta += (t - t_mean) * (a - mean_amp);
685 var_t += (t - t_mean).powi(2);
686 }
687 let slope = if var_t > 1e-10 { cov_ta / var_t } else { 0.0 };
688
689 let time_span = interior_times.last().unwrap_or(&1.0) - interior_times.first().unwrap_or(&0.0);
690 let amplitude_trend = if mean_amp > 1e-10 && time_span > 1e-10 {
691 (slope * time_span / mean_amp).clamp(-1.0, 1.0)
692 } else {
693 0.0
694 };
695
696 let has_modulation = modulation_score > modulation_threshold;
697 let modulation_type = if !has_modulation {
698 change::ModulationType::Stable
699 } else if amplitude_trend > 0.3 {
700 change::ModulationType::Emerging
701 } else if amplitude_trend < -0.3 {
702 change::ModulationType::Fading
703 } else {
704 change::ModulationType::Oscillating
705 };
706
707 AmplitudeEnvelopeStats {
708 modulation_score,
709 amplitude_trend,
710 has_modulation,
711 modulation_type,
712 _mean_amp: mean_amp,
713 min_amp,
714 max_amp,
715 }
716}
717
718pub(super) fn fit_and_subtract_sinusoid(
720 residual: &mut [f64],
721 argvals: &[f64],
722 period: f64,
723) -> (f64, f64, f64, f64) {
724 let m = residual.len();
725 let omega = 2.0 * PI / period;
726 let mut cos_sum = 0.0;
727 let mut sin_sum = 0.0;
728
729 for (j, &t) in argvals.iter().enumerate() {
730 cos_sum += residual[j] * (omega * t).cos();
731 sin_sum += residual[j] * (omega * t).sin();
732 }
733
734 let a = 2.0 * cos_sum / m as f64;
735 let b = 2.0 * sin_sum / m as f64;
736 let amplitude = (a * a + b * b).sqrt();
737 let phase = b.atan2(a);
738
739 for (j, &t) in argvals.iter().enumerate() {
740 residual[j] -= a * (omega * t).cos() + b * (omega * t).sin();
741 }
742
743 (a, b, amplitude, phase)
744}
745
746pub(super) fn validate_sazed_component(
748 period: f64,
749 confidence: f64,
750 min_period: f64,
751 max_period: f64,
752 threshold: f64,
753) -> Option<f64> {
754 if period.is_finite() && period > min_period && period < max_period && confidence > threshold {
755 Some(period)
756 } else {
757 None
758 }
759}
760
761pub(super) fn count_agreeing_periods(
763 periods: &[f64],
764 reference: f64,
765 tolerance: f64,
766) -> (usize, f64) {
767 let mut count = 0;
768 let mut sum = 0.0;
769 for &p in periods {
770 let rel_diff = (reference - p).abs() / reference.max(p);
771 if rel_diff <= tolerance {
772 count += 1;
773 sum += p;
774 }
775 }
776 (count, sum)
777}
778
779pub(super) fn find_acf_descent_end(acf: &[f64]) -> usize {
781 for i in 1..acf.len() {
782 if acf[i] < 0.0 {
783 return i;
784 }
785 if i > 1 && acf[i] > acf[i - 1] {
786 return i - 1;
787 }
788 }
789 1
790}
791
792pub(super) fn find_first_acf_peak(acf: &[f64]) -> Option<(usize, f64)> {
794 if acf.len() < 4 {
795 return None;
796 }
797
798 let min_search_start = find_acf_descent_end(acf);
799 let peaks = find_peaks_1d(&acf[min_search_start..], 1);
800 if peaks.is_empty() {
801 return None;
802 }
803
804 let peak_lag = peaks[0] + min_search_start;
805 Some((peak_lag, acf[peak_lag].max(0.0)))
806}
807
808pub(super) fn compute_cycle_strengths(
810 data: &FdMatrix,
811 argvals: &[f64],
812 period: f64,
813 strength_thresh: f64,
814) -> (Vec<f64>, Vec<usize>) {
815 let (n, m) = data.shape();
816 let t_start = argvals[0];
817 let t_end = argvals[m - 1];
818 let n_cycles = ((t_end - t_start) / period).floor() as usize;
819
820 let mut cycle_strengths = Vec::with_capacity(n_cycles);
821 let mut weak_seasons = Vec::new();
822
823 for cycle in 0..n_cycles {
824 let cycle_start = t_start + cycle as f64 * period;
825 let cycle_end = cycle_start + period;
826
827 let start_idx = argvals.iter().position(|&t| t >= cycle_start).unwrap_or(0);
828 let end_idx = argvals.iter().position(|&t| t > cycle_end).unwrap_or(m);
829
830 let cycle_m = end_idx - start_idx;
831 if cycle_m < 4 {
832 cycle_strengths.push(f64::NAN);
833 continue;
834 }
835
836 let cycle_data: Vec<f64> = (start_idx..end_idx)
837 .flat_map(|j| (0..n).map(move |i| data[(i, j)]))
838 .collect();
839 let Ok(cycle_mat) = FdMatrix::from_column_major(cycle_data, n, cycle_m) else {
840 cycle_strengths.push(f64::NAN);
841 continue;
842 };
843 let cycle_argvals: Vec<f64> = argvals[start_idx..end_idx].to_vec();
844
845 let strength_val =
846 strength::seasonal_strength_variance(&cycle_mat, &cycle_argvals, period, 3);
847
848 cycle_strengths.push(strength_val);
849 if strength_val < strength_thresh {
850 weak_seasons.push(cycle);
851 }
852 }
853
854 (cycle_strengths, weak_seasons)
855}
856
857pub(super) fn build_histogram(valid: &[f64], n_bins: usize) -> (Vec<usize>, f64, f64) {
859 let min_val = valid.iter().copied().fold(f64::INFINITY, f64::min);
860 let max_val = valid.iter().copied().fold(f64::NEG_INFINITY, f64::max);
861 let bin_width = (max_val - min_val) / n_bins as f64;
862 let mut histogram = vec![0usize; n_bins];
863 for &v in valid {
864 let bin = ((v - min_val) / bin_width).min(n_bins as f64 - 1.0) as usize;
865 histogram[bin] += 1;
866 }
867 (histogram, min_val, bin_width)
868}
869
870pub(super) fn find_optimal_threshold_bin(histogram: &[usize], total: f64) -> (usize, f64) {
872 let n_bins = histogram.len();
873 let mut sum_total = 0.0;
874 for (i, &count) in histogram.iter().enumerate() {
875 sum_total += i as f64 * count as f64;
876 }
877
878 let mut best_bin = 0;
879 let mut best_variance = 0.0;
880 let mut sum_b = 0.0;
881 let mut weight_b = 0.0;
882
883 for t in 0..n_bins {
884 weight_b += histogram[t] as f64;
885 if weight_b == 0.0 {
886 continue;
887 }
888 let weight_f = total - weight_b;
889 if weight_f == 0.0 {
890 break;
891 }
892 sum_b += t as f64 * histogram[t] as f64;
893 let mean_b = sum_b / weight_b;
894 let mean_f = (sum_total - sum_b) / weight_f;
895 let variance = weight_b * weight_f * (mean_b - mean_f).powi(2);
896 if variance > best_variance {
897 best_variance = variance;
898 best_bin = t;
899 }
900 }
901
902 (best_bin, best_variance)
903}
904
905pub(super) fn sum_harmonic_power(
907 frequencies: &[f64],
908 power: &[f64],
909 fundamental_freq: f64,
910 tolerance: f64,
911) -> (f64, f64) {
912 let mut seasonal_power = 0.0;
913 let mut total_power = 0.0;
914
915 for (i, (&freq, &p)) in frequencies.iter().zip(power.iter()).enumerate() {
916 if i == 0 {
917 continue;
918 }
919 total_power += p;
920 let ratio = freq / fundamental_freq;
921 let nearest_harmonic = ratio.round();
922 if (ratio - nearest_harmonic).abs() < tolerance && nearest_harmonic >= 1.0 {
923 seasonal_power += p;
924 }
925 }
926
927 (seasonal_power, total_power)
928}
929
930pub(super) fn crossing_direction(
934 ss: f64,
935 threshold: f64,
936 in_seasonal: bool,
937 i: usize,
938 last_change_idx: Option<usize>,
939 min_dur_points: usize,
940) -> Option<bool> {
941 if ss.is_nan() {
942 return None;
943 }
944 let now_seasonal = ss > threshold;
945 if now_seasonal == in_seasonal {
946 return None;
947 }
948 if last_change_idx.is_some_and(|last_idx| i - last_idx < min_dur_points) {
949 return None;
950 }
951 Some(now_seasonal)
952}
953
954pub(super) fn build_change_point(
956 i: usize,
957 ss: f64,
958 now_seasonal: bool,
959 strength_curve: &[f64],
960 argvals: &[f64],
961) -> ChangePoint {
962 let change_type = if now_seasonal {
963 ChangeType::Onset
964 } else {
965 ChangeType::Cessation
966 };
967 let strength_before = if i > 0 && !strength_curve[i - 1].is_nan() {
968 strength_curve[i - 1]
969 } else {
970 ss
971 };
972 ChangePoint {
973 time: argvals[i],
974 change_type,
975 strength_before,
976 strength_after: ss,
977 }
978}
979
980pub(super) fn detect_threshold_crossings(
982 strength_curve: &[f64],
983 argvals: &[f64],
984 threshold: f64,
985 min_dur_points: usize,
986) -> Vec<ChangePoint> {
987 let mut change_points = Vec::new();
988 let mut in_seasonal = strength_curve[0] > threshold;
989 let mut last_change_idx: Option<usize> = None;
990
991 for (i, &ss) in strength_curve.iter().enumerate().skip(1) {
992 let Some(now_seasonal) = crossing_direction(
993 ss,
994 threshold,
995 in_seasonal,
996 i,
997 last_change_idx,
998 min_dur_points,
999 ) else {
1000 continue;
1001 };
1002
1003 change_points.push(build_change_point(
1004 i,
1005 ss,
1006 now_seasonal,
1007 strength_curve,
1008 argvals,
1009 ));
1010
1011 in_seasonal = now_seasonal;
1012 last_change_idx = Some(i);
1013 }
1014
1015 change_points
1016}
1017
1018pub(super) fn find_spectral_peaks(power: &[f64]) -> Vec<usize> {
1020 if power.len() < 3 {
1021 return Vec::new();
1022 }
1023
1024 let mut sorted_power: Vec<f64> = power.to_vec();
1026 crate::helpers::sort_nan_safe(&mut sorted_power);
1027 let noise_floor = sorted_power[sorted_power.len() / 2];
1028 let threshold = noise_floor * 2.0; let mut peaks: Vec<(usize, f64)> = Vec::new();
1032 for i in 1..(power.len() - 1) {
1033 if power[i] > power[i - 1] && power[i] > power[i + 1] && power[i] > threshold {
1034 peaks.push((i, power[i]));
1035 }
1036 }
1037
1038 peaks.sort_by(|a, b| b.1.partial_cmp(&a.1).unwrap_or(std::cmp::Ordering::Equal));
1040
1041 peaks.into_iter().map(|(idx, _)| idx).collect()
1042}
1043
1044pub(super) fn find_consensus_period(periods: &[f64], tolerance: f64) -> (f64, usize) {
1046 if periods.is_empty() {
1047 return (f64::NAN, 0);
1048 }
1049 if periods.len() == 1 {
1050 return (periods[0], 1);
1051 }
1052
1053 let mut best_period = periods[0];
1054 let mut best_count = 0;
1055 let mut best_sum = 0.0;
1056
1057 for &p1 in periods {
1058 let (count, sum) = count_agreeing_periods(periods, p1, tolerance);
1059
1060 if count > best_count
1061 || (count == best_count && sum / count as f64 > best_sum / best_count.max(1) as f64)
1062 {
1063 best_count = count;
1064 best_period = sum / count as f64;
1065 best_sum = sum;
1066 }
1067 }
1068
1069 (best_period, best_count)
1070}
1071
1072pub(super) fn validate_period_acf(acf: &[f64], period: f64, dt: f64) -> f64 {
1074 let lag = (period / dt).round() as usize;
1075
1076 if lag == 0 || lag >= acf.len() {
1077 return 0.0;
1078 }
1079
1080 let acf_at_lag = acf[lag];
1083
1084 let half_lag = lag / 2;
1086 let double_lag = lag * 2;
1087
1088 let mut score = acf_at_lag.max(0.0);
1089
1090 if half_lag > 0 && half_lag < acf.len() {
1093 let half_acf = acf[half_lag];
1094 if half_acf > acf_at_lag * 0.7 {
1096 score *= 0.5;
1097 }
1098 }
1099
1100 if double_lag < acf.len() {
1101 let double_acf = acf[double_lag];
1102 if double_acf > 0.3 {
1104 score *= 1.2;
1105 }
1106 }
1107
1108 score.min(1.0)
1109}
1110
1111pub(super) fn refine_period_gradient(
1113 acf: &[f64],
1114 initial_period: f64,
1115 dt: f64,
1116 steps: usize,
1117) -> f64 {
1118 let mut period = initial_period;
1119 let step_size = dt * 0.5; for _ in 0..steps {
1122 let current_score = validate_period_acf(acf, period, dt);
1123 let left_score = validate_period_acf(acf, period - step_size, dt);
1124 let right_score = validate_period_acf(acf, period + step_size, dt);
1125
1126 if left_score > current_score && left_score > right_score {
1127 period -= step_size;
1128 } else if right_score > current_score {
1129 period += step_size;
1130 }
1131 }
1133
1134 period.max(dt) }
1136
1137pub(super) fn cluster_periods(
1139 candidates: &[(f64, f64)],
1140 tolerance: f64,
1141 min_size: usize,
1142) -> Vec<(f64, f64)> {
1143 if candidates.is_empty() {
1144 return Vec::new();
1145 }
1146
1147 let mut sorted: Vec<(f64, f64)> = candidates.to_vec();
1149 sorted.sort_by(|a, b| a.0.partial_cmp(&b.0).unwrap_or(std::cmp::Ordering::Equal));
1150
1151 let mut clusters: Vec<(f64, f64)> = Vec::new(); let mut current_cluster: Vec<(f64, f64)> = vec![sorted[0]];
1153
1154 for &(period, power) in sorted.iter().skip(1) {
1155 let cluster_center =
1156 current_cluster.iter().map(|(p, _)| p).sum::<f64>() / current_cluster.len() as f64;
1157
1158 let rel_diff = (period - cluster_center).abs() / cluster_center.max(period);
1159
1160 if rel_diff <= tolerance {
1161 current_cluster.push((period, power));
1163 } else {
1164 if current_cluster.len() >= min_size {
1166 let center = current_cluster.iter().map(|(p, pw)| p * pw).sum::<f64>()
1167 / current_cluster
1168 .iter()
1169 .map(|(_, pw)| pw)
1170 .sum::<f64>()
1171 .max(1e-15);
1172 let total_power: f64 = current_cluster.iter().map(|(_, pw)| pw).sum();
1173 clusters.push((center, total_power));
1174 }
1175 current_cluster = vec![(period, power)];
1176 }
1177 }
1178
1179 if current_cluster.len() >= min_size {
1181 let center = current_cluster.iter().map(|(p, pw)| p * pw).sum::<f64>()
1182 / current_cluster
1183 .iter()
1184 .map(|(_, pw)| pw)
1185 .sum::<f64>()
1186 .max(1e-15);
1187 let total_power: f64 = current_cluster.iter().map(|(_, pw)| pw).sum();
1188 clusters.push((center, total_power));
1189 }
1190
1191 clusters
1192}