1use crate::basis::fourier_basis_with_period;
13use crate::fdata::deriv_1d;
14use crate::{iter_maybe_parallel, slice_maybe_parallel};
15use num_complex::Complex;
16#[cfg(feature = "parallel")]
17use rayon::iter::ParallelIterator;
18use rustfft::FftPlanner;
19use std::f64::consts::PI;
20
21#[derive(Debug, Clone)]
23pub struct PeriodEstimate {
24 pub period: f64,
26 pub frequency: f64,
28 pub power: f64,
30 pub confidence: f64,
32}
33
34#[derive(Debug, Clone)]
36pub struct Peak {
37 pub time: f64,
39 pub value: f64,
41 pub prominence: f64,
43}
44
45#[derive(Debug, Clone)]
47pub struct PeakDetectionResult {
48 pub peaks: Vec<Vec<Peak>>,
50 pub inter_peak_distances: Vec<Vec<f64>>,
52 pub mean_period: f64,
54}
55
56#[derive(Debug, Clone)]
58pub struct DetectedPeriod {
59 pub period: f64,
61 pub confidence: f64,
63 pub strength: f64,
65 pub amplitude: f64,
67 pub phase: f64,
69 pub iteration: usize,
71}
72
73#[derive(Debug, Clone, Copy, PartialEq, Eq)]
75pub enum StrengthMethod {
76 Variance,
78 Spectral,
80}
81
82#[derive(Debug, Clone)]
84pub struct ChangePoint {
85 pub time: f64,
87 pub change_type: ChangeType,
89 pub strength_before: f64,
91 pub strength_after: f64,
93}
94
95#[derive(Debug, Clone, Copy, PartialEq, Eq)]
97pub enum ChangeType {
98 Onset,
100 Cessation,
102}
103
104#[derive(Debug, Clone)]
106pub struct ChangeDetectionResult {
107 pub change_points: Vec<ChangePoint>,
109 pub strength_curve: Vec<f64>,
111}
112
113#[derive(Debug, Clone)]
115pub struct InstantaneousPeriod {
116 pub period: Vec<f64>,
118 pub frequency: Vec<f64>,
120 pub amplitude: Vec<f64>,
122}
123
124#[derive(Debug, Clone)]
126pub struct PeakTimingResult {
127 pub peak_times: Vec<f64>,
129 pub peak_values: Vec<f64>,
131 pub normalized_timing: Vec<f64>,
133 pub mean_timing: f64,
135 pub std_timing: f64,
137 pub range_timing: f64,
139 pub variability_score: f64,
141 pub timing_trend: f64,
143 pub cycle_indices: Vec<usize>,
145}
146
147#[derive(Debug, Clone, Copy, PartialEq, Eq)]
149pub enum SeasonalType {
150 StableSeasonal,
152 VariableTiming,
154 IntermittentSeasonal,
156 NonSeasonal,
158}
159
160#[derive(Debug, Clone)]
162pub struct SeasonalityClassification {
163 pub is_seasonal: bool,
165 pub has_stable_timing: bool,
167 pub timing_variability: f64,
169 pub seasonal_strength: f64,
171 pub cycle_strengths: Vec<f64>,
173 pub weak_seasons: Vec<usize>,
175 pub classification: SeasonalType,
177 pub peak_timing: Option<PeakTimingResult>,
179}
180
181#[derive(Debug, Clone, Copy, PartialEq)]
183pub enum ThresholdMethod {
184 Fixed(f64),
186 Percentile(f64),
188 Otsu,
190}
191
192#[derive(Debug, Clone, Copy, PartialEq, Eq)]
194pub enum ModulationType {
195 Stable,
197 Emerging,
199 Fading,
201 Oscillating,
203 NonSeasonal,
205}
206
207#[derive(Debug, Clone)]
209pub struct AmplitudeModulationResult {
210 pub is_seasonal: bool,
212 pub seasonal_strength: f64,
214 pub has_modulation: bool,
216 pub modulation_type: ModulationType,
218 pub modulation_score: f64,
220 pub amplitude_trend: f64,
222 pub strength_curve: Vec<f64>,
224 pub time_points: Vec<f64>,
226 pub min_strength: f64,
228 pub max_strength: f64,
230}
231
232#[derive(Debug, Clone)]
234pub struct WaveletAmplitudeResult {
235 pub is_seasonal: bool,
237 pub seasonal_strength: f64,
239 pub has_modulation: bool,
241 pub modulation_type: ModulationType,
243 pub modulation_score: f64,
245 pub amplitude_trend: f64,
247 pub wavelet_amplitude: Vec<f64>,
249 pub time_points: Vec<f64>,
251 pub scale: f64,
253}
254
255#[inline]
270fn compute_mean_curve_impl(data: &[f64], n: usize, m: usize, parallel: bool) -> Vec<f64> {
271 if parallel && m >= 100 {
272 iter_maybe_parallel!(0..m)
274 .map(|j| {
275 let mut sum = 0.0;
276 for i in 0..n {
277 sum += data[i + j * n];
278 }
279 sum / n as f64
280 })
281 .collect()
282 } else {
283 (0..m)
285 .map(|j| {
286 let mut sum = 0.0;
287 for i in 0..n {
288 sum += data[i + j * n];
289 }
290 sum / n as f64
291 })
292 .collect()
293 }
294}
295
296#[inline]
298fn compute_mean_curve(data: &[f64], n: usize, m: usize) -> Vec<f64> {
299 compute_mean_curve_impl(data, n, m, true)
300}
301
302#[inline]
312fn interior_bounds(m: usize) -> Option<(usize, usize)> {
313 let edge_skip = (m as f64 * 0.1) as usize;
314 let interior_start = edge_skip.min(m / 4);
315 let interior_end = m.saturating_sub(edge_skip).max(m * 3 / 4);
316
317 if interior_end <= interior_start {
318 None
319 } else {
320 Some((interior_start, interior_end))
321 }
322}
323
324fn valid_interior_bounds(m: usize, min_span: usize) -> Option<(usize, usize)> {
326 interior_bounds(m).filter(|&(s, e)| e > s + min_span)
327}
328
329fn periodogram(data: &[f64], argvals: &[f64]) -> (Vec<f64>, Vec<f64>) {
332 let n = data.len();
333 if n < 2 || argvals.len() != n {
334 return (Vec::new(), Vec::new());
335 }
336
337 let dt = (argvals[n - 1] - argvals[0]) / (n - 1) as f64;
338 let fs = 1.0 / dt; let mut planner = FftPlanner::<f64>::new();
341 let fft = planner.plan_fft_forward(n);
342
343 let mut buffer: Vec<Complex<f64>> = data.iter().map(|&x| Complex::new(x, 0.0)).collect();
344 fft.process(&mut buffer);
345
346 let n_freq = n / 2 + 1;
348 let mut power = Vec::with_capacity(n_freq);
349 let mut frequencies = Vec::with_capacity(n_freq);
350
351 for k in 0..n_freq {
352 let freq = k as f64 * fs / n as f64;
353 frequencies.push(freq);
354
355 let p = buffer[k].norm_sqr() / (n as f64 * n as f64);
356 let p = if k > 0 && k < n / 2 { 2.0 * p } else { p };
358 power.push(p);
359 }
360
361 (frequencies, power)
362}
363
364fn autocorrelation(data: &[f64], max_lag: usize) -> Vec<f64> {
366 let n = data.len();
367 if n == 0 {
368 return Vec::new();
369 }
370
371 let mean: f64 = data.iter().sum::<f64>() / n as f64;
372 let var: f64 = data.iter().map(|&x| (x - mean).powi(2)).sum::<f64>() / n as f64;
373
374 if var < 1e-15 {
375 return vec![1.0; max_lag.min(n) + 1];
376 }
377
378 let max_lag = max_lag.min(n - 1);
379 let mut acf = Vec::with_capacity(max_lag + 1);
380
381 for lag in 0..=max_lag {
382 let mut sum = 0.0;
383 for i in 0..(n - lag) {
384 sum += (data[i] - mean) * (data[i + lag] - mean);
385 }
386 acf.push(sum / (n as f64 * var));
387 }
388
389 acf
390}
391
392fn try_add_peak(peaks: &mut Vec<usize>, candidate: usize, signal: &[f64], min_distance: usize) {
394 if peaks.is_empty() || candidate - *peaks.last().unwrap() >= min_distance {
395 peaks.push(candidate);
396 } else if signal[candidate] > signal[*peaks.last().unwrap()] {
397 *peaks.last_mut().unwrap() = candidate;
398 }
399}
400
401fn find_peaks_1d(signal: &[f64], min_distance: usize) -> Vec<usize> {
403 let n = signal.len();
404 if n < 3 {
405 return Vec::new();
406 }
407
408 let mut peaks = Vec::new();
409
410 for i in 1..(n - 1) {
411 if signal[i] > signal[i - 1] && signal[i] > signal[i + 1] {
412 try_add_peak(&mut peaks, i, signal, min_distance);
413 }
414 }
415
416 peaks
417}
418
419fn compute_prominence(signal: &[f64], peak_idx: usize) -> f64 {
421 let n = signal.len();
422 let peak_val = signal[peak_idx];
423
424 let mut left_min = peak_val;
426 for i in (0..peak_idx).rev() {
427 if signal[i] >= peak_val {
428 break;
429 }
430 left_min = left_min.min(signal[i]);
431 }
432
433 let mut right_min = peak_val;
434 for i in (peak_idx + 1)..n {
435 if signal[i] >= peak_val {
436 break;
437 }
438 right_min = right_min.min(signal[i]);
439 }
440
441 peak_val - left_min.max(right_min)
442}
443
444pub fn hilbert_transform(signal: &[f64]) -> Vec<Complex<f64>> {
452 let n = signal.len();
453 if n == 0 {
454 return Vec::new();
455 }
456
457 let mut planner = FftPlanner::<f64>::new();
458 let fft_forward = planner.plan_fft_forward(n);
459 let fft_inverse = planner.plan_fft_inverse(n);
460
461 let mut buffer: Vec<Complex<f64>> = signal.iter().map(|&x| Complex::new(x, 0.0)).collect();
463 fft_forward.process(&mut buffer);
464
465 let half = n / 2;
468 for k in 1..half {
469 buffer[k] *= 2.0;
470 }
471 for k in (half + 1)..n {
472 buffer[k] = Complex::new(0.0, 0.0);
473 }
474
475 fft_inverse.process(&mut buffer);
477
478 for c in buffer.iter_mut() {
480 *c /= n as f64;
481 }
482
483 buffer
484}
485
486fn unwrap_phase(phase: &[f64]) -> Vec<f64> {
488 if phase.is_empty() {
489 return Vec::new();
490 }
491
492 let mut unwrapped = vec![phase[0]];
493 let mut cumulative_correction = 0.0;
494
495 for i in 1..phase.len() {
496 let diff = phase[i] - phase[i - 1];
497
498 if diff > PI {
500 cumulative_correction -= 2.0 * PI;
501 } else if diff < -PI {
502 cumulative_correction += 2.0 * PI;
503 }
504
505 unwrapped.push(phase[i] + cumulative_correction);
506 }
507
508 unwrapped
509}
510
511fn morlet_wavelet(t: f64, omega0: f64) -> Complex<f64> {
518 let gaussian = (-t * t / 2.0).exp();
519 let oscillation = Complex::new((omega0 * t).cos(), (omega0 * t).sin());
520 oscillation * gaussian
521}
522
523#[allow(dead_code)]
537fn cwt_morlet(signal: &[f64], argvals: &[f64], scale: f64, omega0: f64) -> Vec<Complex<f64>> {
538 let n = signal.len();
539 if n == 0 || scale <= 0.0 {
540 return Vec::new();
541 }
542
543 let dt = (argvals[n - 1] - argvals[0]) / (n - 1) as f64;
544
545 let norm = 1.0 / scale.sqrt();
548
549 (0..n)
550 .map(|b| {
551 let mut sum = Complex::new(0.0, 0.0);
552 for k in 0..n {
553 let t_normalized = (argvals[k] - argvals[b]) / scale;
554 if t_normalized.abs() < 6.0 {
556 let wavelet = morlet_wavelet(t_normalized, omega0);
557 sum += signal[k] * wavelet.conj();
558 }
559 }
560 sum * norm * dt
561 })
562 .collect()
563}
564
565fn cwt_morlet_fft(signal: &[f64], argvals: &[f64], scale: f64, omega0: f64) -> Vec<Complex<f64>> {
569 let n = signal.len();
570 if n == 0 || scale <= 0.0 {
571 return Vec::new();
572 }
573
574 let dt = (argvals[n - 1] - argvals[0]) / (n - 1) as f64;
575 let norm = 1.0 / scale.sqrt();
576
577 let wavelet_time: Vec<Complex<f64>> = (0..n)
579 .map(|k| {
580 let t = if k <= n / 2 {
582 k as f64 * dt / scale
583 } else {
584 (k as f64 - n as f64) * dt / scale
585 };
586 morlet_wavelet(t, omega0) * norm
587 })
588 .collect();
589
590 let mut planner = FftPlanner::<f64>::new();
591 let fft_forward = planner.plan_fft_forward(n);
592 let fft_inverse = planner.plan_fft_inverse(n);
593
594 let mut signal_fft: Vec<Complex<f64>> = signal.iter().map(|&x| Complex::new(x, 0.0)).collect();
596 fft_forward.process(&mut signal_fft);
597
598 let mut wavelet_fft = wavelet_time;
600 fft_forward.process(&mut wavelet_fft);
601
602 let mut result: Vec<Complex<f64>> = signal_fft
604 .iter()
605 .zip(wavelet_fft.iter())
606 .map(|(s, w)| *s * w.conj())
607 .collect();
608
609 fft_inverse.process(&mut result);
611
612 for c in result.iter_mut() {
614 *c *= dt / n as f64;
615 }
616
617 result
618}
619
620fn otsu_threshold(values: &[f64]) -> f64 {
625 if values.is_empty() {
626 return 0.5;
627 }
628
629 let valid: Vec<f64> = values.iter().copied().filter(|x| x.is_finite()).collect();
631 if valid.is_empty() {
632 return 0.5;
633 }
634
635 let vmin = valid.iter().cloned().fold(f64::INFINITY, f64::min);
636 let vmax = valid.iter().cloned().fold(f64::NEG_INFINITY, f64::max);
637
638 if (vmax - vmin).abs() < 1e-10 {
639 return (vmin + vmax) / 2.0;
640 }
641
642 let n_bins = 256;
643 let (histogram, hist_min, bin_width) = build_histogram(&valid, n_bins);
644 let (best_bin, _) = find_optimal_threshold_bin(&histogram, valid.len() as f64);
645
646 hist_min + (best_bin as f64 + 0.5) * bin_width
647}
648
649fn linear_slope(x: &[f64], y: &[f64]) -> f64 {
651 if x.len() != y.len() || x.len() < 2 {
652 return 0.0;
653 }
654
655 let n = x.len() as f64;
656 let mean_x: f64 = x.iter().sum::<f64>() / n;
657 let mean_y: f64 = y.iter().sum::<f64>() / n;
658
659 let mut num = 0.0;
660 let mut den = 0.0;
661
662 for (&xi, &yi) in x.iter().zip(y.iter()) {
663 num += (xi - mean_x) * (yi - mean_y);
664 den += (xi - mean_x).powi(2);
665 }
666
667 if den.abs() < 1e-15 {
668 0.0
669 } else {
670 num / den
671 }
672}
673
674struct AmplitudeEnvelopeStats {
676 modulation_score: f64,
677 amplitude_trend: f64,
678 has_modulation: bool,
679 modulation_type: ModulationType,
680 _mean_amp: f64,
681 min_amp: f64,
682 max_amp: f64,
683}
684
685fn analyze_amplitude_envelope(
687 interior_envelope: &[f64],
688 interior_times: &[f64],
689 modulation_threshold: f64,
690) -> AmplitudeEnvelopeStats {
691 let n_interior = interior_envelope.len() as f64;
692
693 let mean_amp = interior_envelope.iter().sum::<f64>() / n_interior;
694 let min_amp = interior_envelope
695 .iter()
696 .cloned()
697 .fold(f64::INFINITY, f64::min);
698 let max_amp = interior_envelope
699 .iter()
700 .cloned()
701 .fold(f64::NEG_INFINITY, f64::max);
702
703 let variance = interior_envelope
704 .iter()
705 .map(|&a| (a - mean_amp).powi(2))
706 .sum::<f64>()
707 / n_interior;
708 let std_amp = variance.sqrt();
709 let modulation_score = if mean_amp > 1e-10 {
710 std_amp / mean_amp
711 } else {
712 0.0
713 };
714
715 let t_mean = interior_times.iter().sum::<f64>() / n_interior;
716 let mut cov_ta = 0.0;
717 let mut var_t = 0.0;
718 for (&t, &a) in interior_times.iter().zip(interior_envelope.iter()) {
719 cov_ta += (t - t_mean) * (a - mean_amp);
720 var_t += (t - t_mean).powi(2);
721 }
722 let slope = if var_t > 1e-10 { cov_ta / var_t } else { 0.0 };
723
724 let time_span = interior_times.last().unwrap_or(&1.0) - interior_times.first().unwrap_or(&0.0);
725 let amplitude_trend = if mean_amp > 1e-10 && time_span > 1e-10 {
726 (slope * time_span / mean_amp).clamp(-1.0, 1.0)
727 } else {
728 0.0
729 };
730
731 let has_modulation = modulation_score > modulation_threshold;
732 let modulation_type = if !has_modulation {
733 ModulationType::Stable
734 } else if amplitude_trend > 0.3 {
735 ModulationType::Emerging
736 } else if amplitude_trend < -0.3 {
737 ModulationType::Fading
738 } else {
739 ModulationType::Oscillating
740 };
741
742 AmplitudeEnvelopeStats {
743 modulation_score,
744 amplitude_trend,
745 has_modulation,
746 modulation_type,
747 _mean_amp: mean_amp,
748 min_amp,
749 max_amp,
750 }
751}
752
753fn fit_and_subtract_sinusoid(
755 residual: &mut [f64],
756 argvals: &[f64],
757 period: f64,
758) -> (f64, f64, f64, f64) {
759 let m = residual.len();
760 let omega = 2.0 * PI / period;
761 let mut cos_sum = 0.0;
762 let mut sin_sum = 0.0;
763
764 for (j, &t) in argvals.iter().enumerate() {
765 cos_sum += residual[j] * (omega * t).cos();
766 sin_sum += residual[j] * (omega * t).sin();
767 }
768
769 let a = 2.0 * cos_sum / m as f64;
770 let b = 2.0 * sin_sum / m as f64;
771 let amplitude = (a * a + b * b).sqrt();
772 let phase = b.atan2(a);
773
774 for (j, &t) in argvals.iter().enumerate() {
775 residual[j] -= a * (omega * t).cos() + b * (omega * t).sin();
776 }
777
778 (a, b, amplitude, phase)
779}
780
781fn validate_sazed_component(
783 period: f64,
784 confidence: f64,
785 min_period: f64,
786 max_period: f64,
787 threshold: f64,
788) -> Option<f64> {
789 if period.is_finite() && period > min_period && period < max_period && confidence > threshold {
790 Some(period)
791 } else {
792 None
793 }
794}
795
796fn count_agreeing_periods(periods: &[f64], reference: f64, tolerance: f64) -> (usize, f64) {
798 let mut count = 0;
799 let mut sum = 0.0;
800 for &p in periods {
801 let rel_diff = (reference - p).abs() / reference.max(p);
802 if rel_diff <= tolerance {
803 count += 1;
804 sum += p;
805 }
806 }
807 (count, sum)
808}
809
810fn find_acf_descent_end(acf: &[f64]) -> usize {
812 for i in 1..acf.len() {
813 if acf[i] < 0.0 {
814 return i;
815 }
816 if i > 1 && acf[i] > acf[i - 1] {
817 return i - 1;
818 }
819 }
820 1
821}
822
823fn find_first_acf_peak(acf: &[f64]) -> Option<(usize, f64)> {
825 if acf.len() < 4 {
826 return None;
827 }
828
829 let min_search_start = find_acf_descent_end(acf);
830 let peaks = find_peaks_1d(&acf[min_search_start..], 1);
831 if peaks.is_empty() {
832 return None;
833 }
834
835 let peak_lag = peaks[0] + min_search_start;
836 Some((peak_lag, acf[peak_lag].max(0.0)))
837}
838
839fn compute_cycle_strengths(
841 data: &[f64],
842 n: usize,
843 m: usize,
844 argvals: &[f64],
845 period: f64,
846 strength_thresh: f64,
847) -> (Vec<f64>, Vec<usize>) {
848 let t_start = argvals[0];
849 let t_end = argvals[m - 1];
850 let n_cycles = ((t_end - t_start) / period).floor() as usize;
851
852 let mut cycle_strengths = Vec::with_capacity(n_cycles);
853 let mut weak_seasons = Vec::new();
854
855 for cycle in 0..n_cycles {
856 let cycle_start = t_start + cycle as f64 * period;
857 let cycle_end = cycle_start + period;
858
859 let start_idx = argvals.iter().position(|&t| t >= cycle_start).unwrap_or(0);
860 let end_idx = argvals.iter().position(|&t| t > cycle_end).unwrap_or(m);
861
862 let cycle_m = end_idx - start_idx;
863 if cycle_m < 4 {
864 cycle_strengths.push(f64::NAN);
865 continue;
866 }
867
868 let cycle_data: Vec<f64> = (start_idx..end_idx)
869 .flat_map(|j| (0..n).map(move |i| data[i + j * n]))
870 .collect();
871 let cycle_argvals: Vec<f64> = argvals[start_idx..end_idx].to_vec();
872
873 let strength =
874 seasonal_strength_variance(&cycle_data, n, cycle_m, &cycle_argvals, period, 3);
875
876 cycle_strengths.push(strength);
877 if strength < strength_thresh {
878 weak_seasons.push(cycle);
879 }
880 }
881
882 (cycle_strengths, weak_seasons)
883}
884
885fn build_histogram(valid: &[f64], n_bins: usize) -> (Vec<usize>, f64, f64) {
887 let min_val = valid.iter().cloned().fold(f64::INFINITY, f64::min);
888 let max_val = valid.iter().cloned().fold(f64::NEG_INFINITY, f64::max);
889 let bin_width = (max_val - min_val) / n_bins as f64;
890 let mut histogram = vec![0usize; n_bins];
891 for &v in valid {
892 let bin = ((v - min_val) / bin_width).min(n_bins as f64 - 1.0) as usize;
893 histogram[bin] += 1;
894 }
895 (histogram, min_val, bin_width)
896}
897
898fn find_optimal_threshold_bin(histogram: &[usize], total: f64) -> (usize, f64) {
900 let n_bins = histogram.len();
901 let mut sum_total = 0.0;
902 for (i, &count) in histogram.iter().enumerate() {
903 sum_total += i as f64 * count as f64;
904 }
905
906 let mut best_bin = 0;
907 let mut best_variance = 0.0;
908 let mut sum_b = 0.0;
909 let mut weight_b = 0.0;
910
911 for t in 0..n_bins {
912 weight_b += histogram[t] as f64;
913 if weight_b == 0.0 {
914 continue;
915 }
916 let weight_f = total - weight_b;
917 if weight_f == 0.0 {
918 break;
919 }
920 sum_b += t as f64 * histogram[t] as f64;
921 let mean_b = sum_b / weight_b;
922 let mean_f = (sum_total - sum_b) / weight_f;
923 let variance = weight_b * weight_f * (mean_b - mean_f).powi(2);
924 if variance > best_variance {
925 best_variance = variance;
926 best_bin = t;
927 }
928 }
929
930 (best_bin, best_variance)
931}
932
933fn sum_harmonic_power(
935 frequencies: &[f64],
936 power: &[f64],
937 fundamental_freq: f64,
938 tolerance: f64,
939) -> (f64, f64) {
940 let mut seasonal_power = 0.0;
941 let mut total_power = 0.0;
942
943 for (i, (&freq, &p)) in frequencies.iter().zip(power.iter()).enumerate() {
944 if i == 0 {
945 continue;
946 }
947 total_power += p;
948 let ratio = freq / fundamental_freq;
949 let nearest_harmonic = ratio.round();
950 if (ratio - nearest_harmonic).abs() < tolerance && nearest_harmonic >= 1.0 {
951 seasonal_power += p;
952 }
953 }
954
955 (seasonal_power, total_power)
956}
957
958fn crossing_direction(
962 ss: f64,
963 threshold: f64,
964 in_seasonal: bool,
965 i: usize,
966 last_change_idx: Option<usize>,
967 min_dur_points: usize,
968) -> Option<bool> {
969 if ss.is_nan() {
970 return None;
971 }
972 let now_seasonal = ss > threshold;
973 if now_seasonal == in_seasonal {
974 return None;
975 }
976 if last_change_idx.is_some_and(|last_idx| i - last_idx < min_dur_points) {
977 return None;
978 }
979 Some(now_seasonal)
980}
981
982fn build_change_point(
984 i: usize,
985 ss: f64,
986 now_seasonal: bool,
987 strength_curve: &[f64],
988 argvals: &[f64],
989) -> ChangePoint {
990 let change_type = if now_seasonal {
991 ChangeType::Onset
992 } else {
993 ChangeType::Cessation
994 };
995 let strength_before = if i > 0 && !strength_curve[i - 1].is_nan() {
996 strength_curve[i - 1]
997 } else {
998 ss
999 };
1000 ChangePoint {
1001 time: argvals[i],
1002 change_type,
1003 strength_before,
1004 strength_after: ss,
1005 }
1006}
1007
1008fn detect_threshold_crossings(
1010 strength_curve: &[f64],
1011 argvals: &[f64],
1012 threshold: f64,
1013 min_dur_points: usize,
1014) -> Vec<ChangePoint> {
1015 let mut change_points = Vec::new();
1016 let mut in_seasonal = strength_curve[0] > threshold;
1017 let mut last_change_idx: Option<usize> = None;
1018
1019 for (i, &ss) in strength_curve.iter().enumerate().skip(1) {
1020 let Some(now_seasonal) = crossing_direction(
1021 ss,
1022 threshold,
1023 in_seasonal,
1024 i,
1025 last_change_idx,
1026 min_dur_points,
1027 ) else {
1028 continue;
1029 };
1030
1031 change_points.push(build_change_point(
1032 i,
1033 ss,
1034 now_seasonal,
1035 strength_curve,
1036 argvals,
1037 ));
1038
1039 in_seasonal = now_seasonal;
1040 last_change_idx = Some(i);
1041 }
1042
1043 change_points
1044}
1045
1046pub fn estimate_period_fft(data: &[f64], n: usize, m: usize, argvals: &[f64]) -> PeriodEstimate {
1064 if n == 0 || m < 4 || argvals.len() != m {
1065 return PeriodEstimate {
1066 period: f64::NAN,
1067 frequency: f64::NAN,
1068 power: 0.0,
1069 confidence: 0.0,
1070 };
1071 }
1072
1073 let mean_curve = compute_mean_curve(data, n, m);
1075
1076 let (frequencies, power) = periodogram(&mean_curve, argvals);
1077
1078 if frequencies.len() < 2 {
1079 return PeriodEstimate {
1080 period: f64::NAN,
1081 frequency: f64::NAN,
1082 power: 0.0,
1083 confidence: 0.0,
1084 };
1085 }
1086
1087 let mut max_power = 0.0;
1089 let mut max_idx = 1;
1090 for (i, &p) in power.iter().enumerate().skip(1) {
1091 if p > max_power {
1092 max_power = p;
1093 max_idx = i;
1094 }
1095 }
1096
1097 let dominant_freq = frequencies[max_idx];
1098 let period = if dominant_freq > 1e-15 {
1099 1.0 / dominant_freq
1100 } else {
1101 f64::INFINITY
1102 };
1103
1104 let mean_power: f64 = power.iter().skip(1).sum::<f64>() / (power.len() - 1) as f64;
1106 let confidence = if mean_power > 1e-15 {
1107 max_power / mean_power
1108 } else {
1109 0.0
1110 };
1111
1112 PeriodEstimate {
1113 period,
1114 frequency: dominant_freq,
1115 power: max_power,
1116 confidence,
1117 }
1118}
1119
1120pub fn estimate_period_acf(
1131 data: &[f64],
1132 n: usize,
1133 m: usize,
1134 argvals: &[f64],
1135 max_lag: usize,
1136) -> PeriodEstimate {
1137 if n == 0 || m < 4 || argvals.len() != m {
1138 return PeriodEstimate {
1139 period: f64::NAN,
1140 frequency: f64::NAN,
1141 power: 0.0,
1142 confidence: 0.0,
1143 };
1144 }
1145
1146 let mean_curve = compute_mean_curve(data, n, m);
1148
1149 let acf = autocorrelation(&mean_curve, max_lag);
1150
1151 let min_lag = 2;
1153 let peaks = find_peaks_1d(&acf[min_lag..], 1);
1154
1155 if peaks.is_empty() {
1156 return PeriodEstimate {
1157 period: f64::NAN,
1158 frequency: f64::NAN,
1159 power: 0.0,
1160 confidence: 0.0,
1161 };
1162 }
1163
1164 let peak_lag = peaks[0] + min_lag;
1165 let dt = (argvals[m - 1] - argvals[0]) / (m - 1) as f64;
1166 let period = peak_lag as f64 * dt;
1167 let frequency = if period > 1e-15 { 1.0 / period } else { 0.0 };
1168
1169 PeriodEstimate {
1170 period,
1171 frequency,
1172 power: acf[peak_lag],
1173 confidence: acf[peak_lag].abs(),
1174 }
1175}
1176
1177pub fn estimate_period_regression(
1192 data: &[f64],
1193 n: usize,
1194 m: usize,
1195 argvals: &[f64],
1196 period_min: f64,
1197 period_max: f64,
1198 n_candidates: usize,
1199 n_harmonics: usize,
1200) -> PeriodEstimate {
1201 if n == 0 || m < 4 || argvals.len() != m || period_min >= period_max || n_candidates < 2 {
1202 return PeriodEstimate {
1203 period: f64::NAN,
1204 frequency: f64::NAN,
1205 power: 0.0,
1206 confidence: 0.0,
1207 };
1208 }
1209
1210 let mean_curve = compute_mean_curve(data, n, m);
1212
1213 let nbasis = 1 + 2 * n_harmonics;
1214
1215 let candidates: Vec<f64> = (0..n_candidates)
1217 .map(|i| period_min + (period_max - period_min) * i as f64 / (n_candidates - 1) as f64)
1218 .collect();
1219
1220 let results: Vec<(f64, f64)> = slice_maybe_parallel!(candidates)
1221 .map(|&period| {
1222 let basis = fourier_basis_with_period(argvals, nbasis, period);
1223
1224 let mut rss = 0.0;
1226 for j in 0..m {
1227 let mut fitted = 0.0;
1228 for k in 0..nbasis {
1230 let b_val = basis[j + k * m];
1231 let coef: f64 = (0..m)
1232 .map(|l| mean_curve[l] * basis[l + k * m])
1233 .sum::<f64>()
1234 / (0..m)
1235 .map(|l| basis[l + k * m].powi(2))
1236 .sum::<f64>()
1237 .max(1e-15);
1238 fitted += coef * b_val;
1239 }
1240 let resid = mean_curve[j] - fitted;
1241 rss += resid * resid;
1242 }
1243
1244 (period, rss)
1245 })
1246 .collect();
1247
1248 let (best_period, min_rss) = results
1250 .iter()
1251 .min_by(|a, b| a.1.partial_cmp(&b.1).unwrap_or(std::cmp::Ordering::Equal))
1252 .cloned()
1253 .unwrap_or((f64::NAN, f64::INFINITY));
1254
1255 let mean_rss: f64 = results.iter().map(|(_, r)| r).sum::<f64>() / results.len() as f64;
1257 let confidence = if min_rss > 1e-15 {
1258 (mean_rss / min_rss).min(10.0)
1259 } else {
1260 10.0
1261 };
1262
1263 PeriodEstimate {
1264 period: best_period,
1265 frequency: if best_period > 1e-15 {
1266 1.0 / best_period
1267 } else {
1268 0.0
1269 },
1270 power: 1.0 - min_rss / mean_rss,
1271 confidence,
1272 }
1273}
1274
1275pub fn detect_multiple_periods(
1296 data: &[f64],
1297 n: usize,
1298 m: usize,
1299 argvals: &[f64],
1300 max_periods: usize,
1301 min_confidence: f64,
1302 min_strength: f64,
1303) -> Vec<DetectedPeriod> {
1304 if n == 0 || m < 4 || argvals.len() != m || max_periods == 0 {
1305 return Vec::new();
1306 }
1307
1308 let mean_curve = compute_mean_curve(data, n, m);
1310
1311 let mut residual = mean_curve.clone();
1312 let mut detected = Vec::with_capacity(max_periods);
1313
1314 for iteration in 1..=max_periods {
1315 match evaluate_next_period(
1316 &mut residual,
1317 m,
1318 argvals,
1319 min_confidence,
1320 min_strength,
1321 iteration,
1322 ) {
1323 Some(period) => detected.push(period),
1324 None => break,
1325 }
1326 }
1327
1328 detected
1329}
1330
1331fn evaluate_next_period(
1335 residual: &mut [f64],
1336 m: usize,
1337 argvals: &[f64],
1338 min_confidence: f64,
1339 min_strength: f64,
1340 iteration: usize,
1341) -> Option<DetectedPeriod> {
1342 let residual_data: Vec<f64> = residual.to_vec();
1343 let est = estimate_period_fft(&residual_data, 1, m, argvals);
1344
1345 if est.confidence < min_confidence || est.period.is_nan() || est.period.is_infinite() {
1346 return None;
1347 }
1348
1349 let strength = seasonal_strength_variance(&residual_data, 1, m, argvals, est.period, 3);
1350 if strength < min_strength || strength.is_nan() {
1351 return None;
1352 }
1353
1354 let (_a, _b, amplitude, phase) = fit_and_subtract_sinusoid(residual, argvals, est.period);
1355
1356 Some(DetectedPeriod {
1357 period: est.period,
1358 confidence: est.confidence,
1359 strength,
1360 amplitude,
1361 phase,
1362 iteration,
1363 })
1364}
1365
1366fn smooth_for_peaks(
1372 data: &[f64],
1373 n: usize,
1374 m: usize,
1375 argvals: &[f64],
1376 smooth_first: bool,
1377 smooth_nbasis: Option<usize>,
1378) -> Vec<f64> {
1379 if !smooth_first {
1380 return data.to_vec();
1381 }
1382 let nbasis = smooth_nbasis
1383 .unwrap_or_else(|| crate::basis::select_fourier_nbasis_gcv(data, n, m, argvals, 5, 25));
1384 if let Some(result) = crate::basis::fourier_fit_1d(data, n, m, argvals, nbasis) {
1385 result.fitted
1386 } else {
1387 data.to_vec()
1388 }
1389}
1390
1391fn detect_peaks_single_curve(
1393 curve: &[f64],
1394 d1: &[f64],
1395 argvals: &[f64],
1396 min_dist_points: usize,
1397 min_prominence: Option<f64>,
1398 data_range: f64,
1399) -> (Vec<Peak>, Vec<f64>) {
1400 let m = curve.len();
1401 let mut peak_indices = Vec::new();
1402 for j in 1..m {
1403 if d1[j - 1] > 0.0 && d1[j] <= 0.0 {
1404 let idx = if (d1[j - 1] - d1[j]).abs() > 1e-15 {
1405 j - 1
1406 } else {
1407 j
1408 };
1409
1410 if peak_indices.is_empty()
1411 || idx - peak_indices[peak_indices.len() - 1] >= min_dist_points
1412 {
1413 peak_indices.push(idx);
1414 }
1415 }
1416 }
1417
1418 let mut peaks: Vec<Peak> = peak_indices
1419 .iter()
1420 .map(|&idx| {
1421 let prominence = compute_prominence(curve, idx) / data_range;
1422 Peak {
1423 time: argvals[idx],
1424 value: curve[idx],
1425 prominence,
1426 }
1427 })
1428 .collect();
1429
1430 if let Some(min_prom) = min_prominence {
1431 peaks.retain(|p| p.prominence >= min_prom);
1432 }
1433
1434 let distances: Vec<f64> = peaks.windows(2).map(|w| w[1].time - w[0].time).collect();
1435
1436 (peaks, distances)
1437}
1438
1439pub fn detect_peaks(
1455 data: &[f64],
1456 n: usize,
1457 m: usize,
1458 argvals: &[f64],
1459 min_distance: Option<f64>,
1460 min_prominence: Option<f64>,
1461 smooth_first: bool,
1462 smooth_nbasis: Option<usize>,
1463) -> PeakDetectionResult {
1464 if n == 0 || m < 3 || argvals.len() != m {
1465 return PeakDetectionResult {
1466 peaks: Vec::new(),
1467 inter_peak_distances: Vec::new(),
1468 mean_period: f64::NAN,
1469 };
1470 }
1471
1472 let dt = (argvals[m - 1] - argvals[0]) / (m - 1) as f64;
1473 let min_dist_points = min_distance.map(|d| (d / dt).round() as usize).unwrap_or(1);
1474
1475 let work_data = smooth_for_peaks(data, n, m, argvals, smooth_first, smooth_nbasis);
1476
1477 let deriv1 = deriv_1d(&work_data, n, m, argvals, 1);
1479
1480 let data_range: f64 = {
1482 let mut min_val = f64::INFINITY;
1483 let mut max_val = f64::NEG_INFINITY;
1484 for &v in work_data.iter() {
1485 min_val = min_val.min(v);
1486 max_val = max_val.max(v);
1487 }
1488 (max_val - min_val).max(1e-15)
1489 };
1490
1491 let results: Vec<(Vec<Peak>, Vec<f64>)> = iter_maybe_parallel!(0..n)
1493 .map(|i| {
1494 let curve: Vec<f64> = (0..m).map(|j| work_data[i + j * n]).collect();
1495 let d1: Vec<f64> = (0..m).map(|j| deriv1[i + j * n]).collect();
1496 detect_peaks_single_curve(
1497 &curve,
1498 &d1,
1499 argvals,
1500 min_dist_points,
1501 min_prominence,
1502 data_range,
1503 )
1504 })
1505 .collect();
1506
1507 let peaks: Vec<Vec<Peak>> = results.iter().map(|(p, _)| p.clone()).collect();
1508 let inter_peak_distances: Vec<Vec<f64>> = results.iter().map(|(_, d)| d.clone()).collect();
1509
1510 let all_distances: Vec<f64> = inter_peak_distances.iter().flatten().cloned().collect();
1512 let mean_period = if all_distances.is_empty() {
1513 f64::NAN
1514 } else {
1515 all_distances.iter().sum::<f64>() / all_distances.len() as f64
1516 };
1517
1518 PeakDetectionResult {
1519 peaks,
1520 inter_peak_distances,
1521 mean_period,
1522 }
1523}
1524
1525pub fn seasonal_strength_variance(
1545 data: &[f64],
1546 n: usize,
1547 m: usize,
1548 argvals: &[f64],
1549 period: f64,
1550 n_harmonics: usize,
1551) -> f64 {
1552 if n == 0 || m < 4 || argvals.len() != m || period <= 0.0 {
1553 return f64::NAN;
1554 }
1555
1556 let mean_curve = compute_mean_curve(data, n, m);
1558
1559 let global_mean: f64 = mean_curve.iter().sum::<f64>() / m as f64;
1561 let total_var: f64 = mean_curve
1562 .iter()
1563 .map(|&x| (x - global_mean).powi(2))
1564 .sum::<f64>()
1565 / m as f64;
1566
1567 if total_var < 1e-15 {
1568 return 0.0;
1569 }
1570
1571 let nbasis = 1 + 2 * n_harmonics;
1573 let basis = fourier_basis_with_period(argvals, nbasis, period);
1574
1575 let mut seasonal = vec![0.0; m];
1577 for k in 1..nbasis {
1578 let b_sum: f64 = (0..m).map(|j| basis[j + k * m].powi(2)).sum();
1580 if b_sum > 1e-15 {
1581 let coef: f64 = (0..m)
1582 .map(|j| mean_curve[j] * basis[j + k * m])
1583 .sum::<f64>()
1584 / b_sum;
1585 for j in 0..m {
1586 seasonal[j] += coef * basis[j + k * m];
1587 }
1588 }
1589 }
1590
1591 let seasonal_mean: f64 = seasonal.iter().sum::<f64>() / m as f64;
1593 let seasonal_var: f64 = seasonal
1594 .iter()
1595 .map(|&x| (x - seasonal_mean).powi(2))
1596 .sum::<f64>()
1597 / m as f64;
1598
1599 (seasonal_var / total_var).min(1.0)
1600}
1601
1602pub fn seasonal_strength_spectral(
1613 data: &[f64],
1614 n: usize,
1615 m: usize,
1616 argvals: &[f64],
1617 period: f64,
1618) -> f64 {
1619 if n == 0 || m < 4 || argvals.len() != m || period <= 0.0 {
1620 return f64::NAN;
1621 }
1622
1623 let mean_curve = compute_mean_curve(data, n, m);
1625
1626 let (frequencies, power) = periodogram(&mean_curve, argvals);
1627
1628 if frequencies.len() < 2 {
1629 return f64::NAN;
1630 }
1631
1632 let fundamental_freq = 1.0 / period;
1633 let (seasonal_power, total_power) =
1634 sum_harmonic_power(&frequencies, &power, fundamental_freq, 0.1);
1635
1636 if total_power < 1e-15 {
1637 return 0.0;
1638 }
1639
1640 (seasonal_power / total_power).min(1.0)
1641}
1642
1643pub fn seasonal_strength_wavelet(
1664 data: &[f64],
1665 n: usize,
1666 m: usize,
1667 argvals: &[f64],
1668 period: f64,
1669) -> f64 {
1670 if n == 0 || m < 4 || argvals.len() != m || period <= 0.0 {
1671 return f64::NAN;
1672 }
1673
1674 let mean_curve = compute_mean_curve(data, n, m);
1676
1677 let dc: f64 = mean_curve.iter().sum::<f64>() / m as f64;
1679 let detrended: Vec<f64> = mean_curve.iter().map(|&x| x - dc).collect();
1680
1681 let total_variance: f64 = detrended.iter().map(|&x| x * x).sum::<f64>() / m as f64;
1683
1684 if total_variance < 1e-15 {
1685 return 0.0;
1686 }
1687
1688 let omega0 = 6.0;
1690 let scale = period * omega0 / (2.0 * PI);
1691 let wavelet_coeffs = cwt_morlet_fft(&detrended, argvals, scale, omega0);
1692
1693 if wavelet_coeffs.is_empty() {
1694 return f64::NAN;
1695 }
1696
1697 let (interior_start, interior_end) = match interior_bounds(m) {
1699 Some(bounds) => bounds,
1700 None => return f64::NAN,
1701 };
1702
1703 let wavelet_power: f64 = wavelet_coeffs[interior_start..interior_end]
1704 .iter()
1705 .map(|c| c.norm_sqr())
1706 .sum::<f64>()
1707 / (interior_end - interior_start) as f64;
1708
1709 (wavelet_power / total_variance).sqrt().min(1.0)
1712}
1713
1714pub fn seasonal_strength_windowed(
1728 data: &[f64],
1729 n: usize,
1730 m: usize,
1731 argvals: &[f64],
1732 period: f64,
1733 window_size: f64,
1734 method: StrengthMethod,
1735) -> Vec<f64> {
1736 if n == 0 || m < 4 || argvals.len() != m || period <= 0.0 || window_size <= 0.0 {
1737 return Vec::new();
1738 }
1739
1740 let dt = (argvals[m - 1] - argvals[0]) / (m - 1) as f64;
1741 let half_window_points = ((window_size / 2.0) / dt).round() as usize;
1742
1743 let mean_curve = compute_mean_curve(data, n, m);
1745
1746 iter_maybe_parallel!(0..m)
1747 .map(|center| {
1748 let start = center.saturating_sub(half_window_points);
1749 let end = (center + half_window_points + 1).min(m);
1750 let window_m = end - start;
1751
1752 if window_m < 4 {
1753 return f64::NAN;
1754 }
1755
1756 let window_data: Vec<f64> = mean_curve[start..end].to_vec();
1757 let window_argvals: Vec<f64> = argvals[start..end].to_vec();
1758
1759 let single_data = window_data.clone();
1761
1762 match method {
1763 StrengthMethod::Variance => seasonal_strength_variance(
1764 &single_data,
1765 1,
1766 window_m,
1767 &window_argvals,
1768 period,
1769 3,
1770 ),
1771 StrengthMethod::Spectral => {
1772 seasonal_strength_spectral(&single_data, 1, window_m, &window_argvals, period)
1773 }
1774 }
1775 })
1776 .collect()
1777}
1778
1779pub fn detect_seasonality_changes(
1798 data: &[f64],
1799 n: usize,
1800 m: usize,
1801 argvals: &[f64],
1802 period: f64,
1803 threshold: f64,
1804 window_size: f64,
1805 min_duration: f64,
1806) -> ChangeDetectionResult {
1807 if n == 0 || m < 4 || argvals.len() != m {
1808 return ChangeDetectionResult {
1809 change_points: Vec::new(),
1810 strength_curve: Vec::new(),
1811 };
1812 }
1813
1814 let strength_curve = seasonal_strength_windowed(
1816 data,
1817 n,
1818 m,
1819 argvals,
1820 period,
1821 window_size,
1822 StrengthMethod::Variance,
1823 );
1824
1825 if strength_curve.is_empty() {
1826 return ChangeDetectionResult {
1827 change_points: Vec::new(),
1828 strength_curve: Vec::new(),
1829 };
1830 }
1831
1832 let dt = (argvals[m - 1] - argvals[0]) / (m - 1) as f64;
1833 let min_dur_points = (min_duration / dt).round() as usize;
1834
1835 let change_points =
1836 detect_threshold_crossings(&strength_curve, argvals, threshold, min_dur_points);
1837
1838 ChangeDetectionResult {
1839 change_points,
1840 strength_curve,
1841 }
1842}
1843
1844pub fn detect_amplitude_modulation(
1883 data: &[f64],
1884 n: usize,
1885 m: usize,
1886 argvals: &[f64],
1887 period: f64,
1888 modulation_threshold: f64,
1889 seasonality_threshold: f64,
1890) -> AmplitudeModulationResult {
1891 let empty_result = AmplitudeModulationResult {
1893 is_seasonal: false,
1894 seasonal_strength: 0.0,
1895 has_modulation: false,
1896 modulation_type: ModulationType::NonSeasonal,
1897 modulation_score: 0.0,
1898 amplitude_trend: 0.0,
1899 strength_curve: Vec::new(),
1900 time_points: Vec::new(),
1901 min_strength: 0.0,
1902 max_strength: 0.0,
1903 };
1904
1905 if n == 0 || m < 4 || argvals.len() != m || period <= 0.0 {
1906 return empty_result;
1907 }
1908
1909 let overall_strength = seasonal_strength_spectral(data, n, m, argvals, period);
1911
1912 if overall_strength < seasonality_threshold {
1913 return AmplitudeModulationResult {
1914 is_seasonal: false,
1915 seasonal_strength: overall_strength,
1916 has_modulation: false,
1917 modulation_type: ModulationType::NonSeasonal,
1918 modulation_score: 0.0,
1919 amplitude_trend: 0.0,
1920 strength_curve: Vec::new(),
1921 time_points: argvals.to_vec(),
1922 min_strength: 0.0,
1923 max_strength: 0.0,
1924 };
1925 }
1926
1927 let mean_curve = compute_mean_curve(data, n, m);
1929
1930 let dc: f64 = mean_curve.iter().sum::<f64>() / m as f64;
1932 let detrended: Vec<f64> = mean_curve.iter().map(|&x| x - dc).collect();
1933 let analytic = hilbert_transform(&detrended);
1934 let envelope: Vec<f64> = analytic.iter().map(|c| c.norm()).collect();
1935
1936 if envelope.is_empty() {
1937 return AmplitudeModulationResult {
1938 is_seasonal: true,
1939 seasonal_strength: overall_strength,
1940 has_modulation: false,
1941 modulation_type: ModulationType::Stable,
1942 modulation_score: 0.0,
1943 amplitude_trend: 0.0,
1944 strength_curve: Vec::new(),
1945 time_points: argvals.to_vec(),
1946 min_strength: 0.0,
1947 max_strength: 0.0,
1948 };
1949 }
1950
1951 let dt = (argvals[m - 1] - argvals[0]) / (m - 1) as f64;
1953 let smooth_window = ((period / dt) as usize).max(3);
1954 let half_window = smooth_window / 2;
1955
1956 let smoothed_envelope: Vec<f64> = (0..m)
1957 .map(|i| {
1958 let start = i.saturating_sub(half_window);
1959 let end = (i + half_window + 1).min(m);
1960 let sum: f64 = envelope[start..end].iter().sum();
1961 sum / (end - start) as f64
1962 })
1963 .collect();
1964
1965 let Some((interior_start, interior_end)) = valid_interior_bounds(m, 4) else {
1967 return AmplitudeModulationResult {
1968 is_seasonal: true,
1969 seasonal_strength: overall_strength,
1970 has_modulation: false,
1971 modulation_type: ModulationType::Stable,
1972 modulation_score: 0.0,
1973 amplitude_trend: 0.0,
1974 strength_curve: envelope,
1975 time_points: argvals.to_vec(),
1976 min_strength: 0.0,
1977 max_strength: 0.0,
1978 };
1979 };
1980
1981 let stats = analyze_amplitude_envelope(
1982 &smoothed_envelope[interior_start..interior_end],
1983 &argvals[interior_start..interior_end],
1984 modulation_threshold,
1985 );
1986
1987 AmplitudeModulationResult {
1988 is_seasonal: true,
1989 seasonal_strength: overall_strength,
1990 has_modulation: stats.has_modulation,
1991 modulation_type: stats.modulation_type,
1992 modulation_score: stats.modulation_score,
1993 amplitude_trend: stats.amplitude_trend,
1994 strength_curve: envelope,
1995 time_points: argvals.to_vec(),
1996 min_strength: stats.min_amp,
1997 max_strength: stats.max_amp,
1998 }
1999}
2000
2001pub fn detect_amplitude_modulation_wavelet(
2024 data: &[f64],
2025 n: usize,
2026 m: usize,
2027 argvals: &[f64],
2028 period: f64,
2029 modulation_threshold: f64,
2030 seasonality_threshold: f64,
2031) -> WaveletAmplitudeResult {
2032 let empty_result = WaveletAmplitudeResult {
2033 is_seasonal: false,
2034 seasonal_strength: 0.0,
2035 has_modulation: false,
2036 modulation_type: ModulationType::NonSeasonal,
2037 modulation_score: 0.0,
2038 amplitude_trend: 0.0,
2039 wavelet_amplitude: Vec::new(),
2040 time_points: Vec::new(),
2041 scale: 0.0,
2042 };
2043
2044 if n == 0 || m < 4 || argvals.len() != m || period <= 0.0 {
2045 return empty_result;
2046 }
2047
2048 let overall_strength = seasonal_strength_spectral(data, n, m, argvals, period);
2050
2051 if overall_strength < seasonality_threshold {
2052 return WaveletAmplitudeResult {
2053 is_seasonal: false,
2054 seasonal_strength: overall_strength,
2055 has_modulation: false,
2056 modulation_type: ModulationType::NonSeasonal,
2057 modulation_score: 0.0,
2058 amplitude_trend: 0.0,
2059 wavelet_amplitude: Vec::new(),
2060 time_points: argvals.to_vec(),
2061 scale: 0.0,
2062 };
2063 }
2064
2065 let mean_curve = compute_mean_curve(data, n, m);
2067
2068 let dc: f64 = mean_curve.iter().sum::<f64>() / m as f64;
2070 let detrended: Vec<f64> = mean_curve.iter().map(|&x| x - dc).collect();
2071
2072 let omega0 = 6.0; let scale = period * omega0 / (2.0 * PI);
2076
2077 let wavelet_coeffs = cwt_morlet_fft(&detrended, argvals, scale, omega0);
2079
2080 if wavelet_coeffs.is_empty() {
2081 return WaveletAmplitudeResult {
2082 is_seasonal: true,
2083 seasonal_strength: overall_strength,
2084 has_modulation: false,
2085 modulation_type: ModulationType::Stable,
2086 modulation_score: 0.0,
2087 amplitude_trend: 0.0,
2088 wavelet_amplitude: Vec::new(),
2089 time_points: argvals.to_vec(),
2090 scale,
2091 };
2092 }
2093
2094 let wavelet_amplitude: Vec<f64> = wavelet_coeffs.iter().map(|c| c.norm()).collect();
2096
2097 let Some((interior_start, interior_end)) = valid_interior_bounds(m, 4) else {
2099 return WaveletAmplitudeResult {
2100 is_seasonal: true,
2101 seasonal_strength: overall_strength,
2102 has_modulation: false,
2103 modulation_type: ModulationType::Stable,
2104 modulation_score: 0.0,
2105 amplitude_trend: 0.0,
2106 wavelet_amplitude,
2107 time_points: argvals.to_vec(),
2108 scale,
2109 };
2110 };
2111
2112 let stats = analyze_amplitude_envelope(
2113 &wavelet_amplitude[interior_start..interior_end],
2114 &argvals[interior_start..interior_end],
2115 modulation_threshold,
2116 );
2117
2118 WaveletAmplitudeResult {
2119 is_seasonal: true,
2120 seasonal_strength: overall_strength,
2121 has_modulation: stats.has_modulation,
2122 modulation_type: stats.modulation_type,
2123 modulation_score: stats.modulation_score,
2124 amplitude_trend: stats.amplitude_trend,
2125 wavelet_amplitude,
2126 time_points: argvals.to_vec(),
2127 scale,
2128 }
2129}
2130
2131pub fn instantaneous_period(
2146 data: &[f64],
2147 n: usize,
2148 m: usize,
2149 argvals: &[f64],
2150) -> InstantaneousPeriod {
2151 if n == 0 || m < 4 || argvals.len() != m {
2152 return InstantaneousPeriod {
2153 period: Vec::new(),
2154 frequency: Vec::new(),
2155 amplitude: Vec::new(),
2156 };
2157 }
2158
2159 let mean_curve = compute_mean_curve(data, n, m);
2161
2162 let dc: f64 = mean_curve.iter().sum::<f64>() / m as f64;
2164 let detrended: Vec<f64> = mean_curve.iter().map(|&x| x - dc).collect();
2165
2166 let analytic = hilbert_transform(&detrended);
2168
2169 let amplitude: Vec<f64> = analytic.iter().map(|c| c.norm()).collect();
2171
2172 let phase: Vec<f64> = analytic.iter().map(|c| c.im.atan2(c.re)).collect();
2173
2174 let unwrapped_phase = unwrap_phase(&phase);
2176
2177 let dt = (argvals[m - 1] - argvals[0]) / (m - 1) as f64;
2179 let mut inst_freq = vec![0.0; m];
2180
2181 if m > 1 {
2183 inst_freq[0] = (unwrapped_phase[1] - unwrapped_phase[0]) / dt / (2.0 * PI);
2184 }
2185 for j in 1..(m - 1) {
2186 inst_freq[j] = (unwrapped_phase[j + 1] - unwrapped_phase[j - 1]) / (2.0 * dt) / (2.0 * PI);
2187 }
2188 if m > 1 {
2189 inst_freq[m - 1] = (unwrapped_phase[m - 1] - unwrapped_phase[m - 2]) / dt / (2.0 * PI);
2190 }
2191
2192 let period: Vec<f64> = inst_freq
2194 .iter()
2195 .map(|&f| {
2196 if f.abs() > 1e-10 {
2197 (1.0 / f).abs()
2198 } else {
2199 f64::INFINITY
2200 }
2201 })
2202 .collect();
2203
2204 InstantaneousPeriod {
2205 period,
2206 frequency: inst_freq,
2207 amplitude,
2208 }
2209}
2210
2211pub fn analyze_peak_timing(
2232 data: &[f64],
2233 n: usize,
2234 m: usize,
2235 argvals: &[f64],
2236 period: f64,
2237 smooth_nbasis: Option<usize>,
2238) -> PeakTimingResult {
2239 if n == 0 || m < 3 || argvals.len() != m || period <= 0.0 {
2240 return PeakTimingResult {
2241 peak_times: Vec::new(),
2242 peak_values: Vec::new(),
2243 normalized_timing: Vec::new(),
2244 mean_timing: f64::NAN,
2245 std_timing: f64::NAN,
2246 range_timing: f64::NAN,
2247 variability_score: f64::NAN,
2248 timing_trend: f64::NAN,
2249 cycle_indices: Vec::new(),
2250 };
2251 }
2252
2253 let min_distance = period * 0.7;
2256 let peaks = detect_peaks(
2257 data,
2258 n,
2259 m,
2260 argvals,
2261 Some(min_distance),
2262 None, true, smooth_nbasis,
2265 );
2266
2267 let sample_peaks = if peaks.peaks.is_empty() {
2270 Vec::new()
2271 } else {
2272 peaks.peaks[0].clone()
2273 };
2274
2275 if sample_peaks.is_empty() {
2276 return PeakTimingResult {
2277 peak_times: Vec::new(),
2278 peak_values: Vec::new(),
2279 normalized_timing: Vec::new(),
2280 mean_timing: f64::NAN,
2281 std_timing: f64::NAN,
2282 range_timing: f64::NAN,
2283 variability_score: f64::NAN,
2284 timing_trend: f64::NAN,
2285 cycle_indices: Vec::new(),
2286 };
2287 }
2288
2289 let peak_times: Vec<f64> = sample_peaks.iter().map(|p| p.time).collect();
2290 let peak_values: Vec<f64> = sample_peaks.iter().map(|p| p.value).collect();
2291
2292 let t_start = argvals[0];
2294 let normalized_timing: Vec<f64> = peak_times
2295 .iter()
2296 .map(|&t| {
2297 let cycle_pos = (t - t_start) % period;
2298 cycle_pos / period
2299 })
2300 .collect();
2301
2302 let cycle_indices: Vec<usize> = peak_times
2304 .iter()
2305 .map(|&t| ((t - t_start) / period).floor() as usize + 1)
2306 .collect();
2307
2308 let n_peaks = normalized_timing.len() as f64;
2310 let mean_timing = normalized_timing.iter().sum::<f64>() / n_peaks;
2311
2312 let variance: f64 = normalized_timing
2313 .iter()
2314 .map(|&x| (x - mean_timing).powi(2))
2315 .sum::<f64>()
2316 / n_peaks;
2317 let std_timing = variance.sqrt();
2318
2319 let min_timing = normalized_timing
2320 .iter()
2321 .cloned()
2322 .fold(f64::INFINITY, f64::min);
2323 let max_timing = normalized_timing
2324 .iter()
2325 .cloned()
2326 .fold(f64::NEG_INFINITY, f64::max);
2327 let range_timing = max_timing - min_timing;
2328
2329 let variability_score = (std_timing / 0.1).min(1.0);
2333
2334 let cycle_idx_f64: Vec<f64> = cycle_indices.iter().map(|&i| i as f64).collect();
2336 let timing_trend = linear_slope(&cycle_idx_f64, &normalized_timing);
2337
2338 PeakTimingResult {
2339 peak_times,
2340 peak_values,
2341 normalized_timing,
2342 mean_timing,
2343 std_timing,
2344 range_timing,
2345 variability_score,
2346 timing_trend,
2347 cycle_indices,
2348 }
2349}
2350
2351pub fn classify_seasonality(
2375 data: &[f64],
2376 n: usize,
2377 m: usize,
2378 argvals: &[f64],
2379 period: f64,
2380 strength_threshold: Option<f64>,
2381 timing_threshold: Option<f64>,
2382) -> SeasonalityClassification {
2383 let strength_thresh = strength_threshold.unwrap_or(0.3);
2384 let timing_thresh = timing_threshold.unwrap_or(0.05);
2385
2386 if n == 0 || m < 4 || argvals.len() != m || period <= 0.0 {
2387 return SeasonalityClassification {
2388 is_seasonal: false,
2389 has_stable_timing: false,
2390 timing_variability: f64::NAN,
2391 seasonal_strength: f64::NAN,
2392 cycle_strengths: Vec::new(),
2393 weak_seasons: Vec::new(),
2394 classification: SeasonalType::NonSeasonal,
2395 peak_timing: None,
2396 };
2397 }
2398
2399 let overall_strength = seasonal_strength_variance(data, n, m, argvals, period, 3);
2401
2402 let (cycle_strengths, weak_seasons) =
2403 compute_cycle_strengths(data, n, m, argvals, period, strength_thresh);
2404 let n_cycles = cycle_strengths.len();
2405
2406 let peak_timing = analyze_peak_timing(data, n, m, argvals, period, None);
2408
2409 let is_seasonal = overall_strength >= strength_thresh;
2411 let has_stable_timing = peak_timing.std_timing <= timing_thresh;
2412 let timing_variability = peak_timing.variability_score;
2413
2414 let n_weak = weak_seasons.len();
2416 let classification = if !is_seasonal {
2417 SeasonalType::NonSeasonal
2418 } else if n_cycles > 0 && n_weak as f64 / n_cycles as f64 > 0.3 {
2419 SeasonalType::IntermittentSeasonal
2421 } else if !has_stable_timing {
2422 SeasonalType::VariableTiming
2423 } else {
2424 SeasonalType::StableSeasonal
2425 };
2426
2427 SeasonalityClassification {
2428 is_seasonal,
2429 has_stable_timing,
2430 timing_variability,
2431 seasonal_strength: overall_strength,
2432 cycle_strengths,
2433 weak_seasons,
2434 classification,
2435 peak_timing: Some(peak_timing),
2436 }
2437}
2438
2439pub fn detect_seasonality_changes_auto(
2453 data: &[f64],
2454 n: usize,
2455 m: usize,
2456 argvals: &[f64],
2457 period: f64,
2458 threshold_method: ThresholdMethod,
2459 window_size: f64,
2460 min_duration: f64,
2461) -> ChangeDetectionResult {
2462 if n == 0 || m < 4 || argvals.len() != m {
2463 return ChangeDetectionResult {
2464 change_points: Vec::new(),
2465 strength_curve: Vec::new(),
2466 };
2467 }
2468
2469 let strength_curve = seasonal_strength_windowed(
2471 data,
2472 n,
2473 m,
2474 argvals,
2475 period,
2476 window_size,
2477 StrengthMethod::Variance,
2478 );
2479
2480 if strength_curve.is_empty() {
2481 return ChangeDetectionResult {
2482 change_points: Vec::new(),
2483 strength_curve: Vec::new(),
2484 };
2485 }
2486
2487 let threshold = match threshold_method {
2489 ThresholdMethod::Fixed(t) => t,
2490 ThresholdMethod::Percentile(p) => {
2491 let mut sorted: Vec<f64> = strength_curve
2492 .iter()
2493 .copied()
2494 .filter(|x| x.is_finite())
2495 .collect();
2496 sorted.sort_by(|a, b| a.partial_cmp(b).unwrap_or(std::cmp::Ordering::Equal));
2497 if sorted.is_empty() {
2498 0.5
2499 } else {
2500 let idx = ((p / 100.0) * sorted.len() as f64) as usize;
2501 sorted[idx.min(sorted.len() - 1)]
2502 }
2503 }
2504 ThresholdMethod::Otsu => otsu_threshold(&strength_curve),
2505 };
2506
2507 detect_seasonality_changes(
2509 data,
2510 n,
2511 m,
2512 argvals,
2513 period,
2514 threshold,
2515 window_size,
2516 min_duration,
2517 )
2518}
2519
2520#[derive(Debug, Clone)]
2522pub struct SazedResult {
2523 pub period: f64,
2525 pub confidence: f64,
2527 pub component_periods: SazedComponents,
2529 pub agreeing_components: usize,
2531}
2532
2533#[derive(Debug, Clone)]
2535pub struct SazedComponents {
2536 pub spectral: f64,
2538 pub acf_peak: f64,
2540 pub acf_average: f64,
2542 pub zero_crossing: f64,
2544 pub spectral_diff: f64,
2546}
2547
2548pub fn sazed(data: &[f64], argvals: &[f64], tolerance: Option<f64>) -> SazedResult {
2570 let n = data.len();
2571 let tol = tolerance.unwrap_or(0.05); if n < 8 || argvals.len() != n {
2574 return SazedResult {
2575 period: f64::NAN,
2576 confidence: 0.0,
2577 component_periods: SazedComponents {
2578 spectral: f64::NAN,
2579 acf_peak: f64::NAN,
2580 acf_average: f64::NAN,
2581 zero_crossing: f64::NAN,
2582 spectral_diff: f64::NAN,
2583 },
2584 agreeing_components: 0,
2585 };
2586 }
2587
2588 let dt = (argvals[n - 1] - argvals[0]) / (n - 1) as f64;
2589 let max_lag = (n / 2).max(4);
2590 let signal_range = argvals[n - 1] - argvals[0];
2591
2592 let min_period = signal_range / (n as f64 / 3.0);
2594 let max_period = signal_range / 2.0;
2596
2597 let (spectral_period, spectral_conf) = sazed_spectral_with_confidence(data, argvals);
2599
2600 let (acf_peak_period, acf_peak_conf) = sazed_acf_peak_with_confidence(data, dt, max_lag);
2602
2603 let acf_average_period = sazed_acf_average(data, dt, max_lag);
2605
2606 let (zero_crossing_period, zero_crossing_conf) =
2608 sazed_zero_crossing_with_confidence(data, dt, max_lag);
2609
2610 let (spectral_diff_period, spectral_diff_conf) =
2612 sazed_spectral_diff_with_confidence(data, argvals);
2613
2614 let components = SazedComponents {
2615 spectral: spectral_period,
2616 acf_peak: acf_peak_period,
2617 acf_average: acf_average_period,
2618 zero_crossing: zero_crossing_period,
2619 spectral_diff: spectral_diff_period,
2620 };
2621
2622 let spectral_thresh = 8.0; let acf_thresh = 0.3; let zero_crossing_thresh = 0.9; let spectral_diff_thresh = 6.0; let min_confident_components = 2;
2631
2632 let confident_periods: Vec<f64> = [
2634 validate_sazed_component(
2635 spectral_period,
2636 spectral_conf,
2637 min_period,
2638 max_period,
2639 spectral_thresh,
2640 ),
2641 validate_sazed_component(
2642 acf_peak_period,
2643 acf_peak_conf,
2644 min_period,
2645 max_period,
2646 acf_thresh,
2647 ),
2648 validate_sazed_component(
2649 acf_average_period,
2650 acf_peak_conf,
2651 min_period,
2652 max_period,
2653 acf_thresh,
2654 ),
2655 validate_sazed_component(
2656 zero_crossing_period,
2657 zero_crossing_conf,
2658 min_period,
2659 max_period,
2660 zero_crossing_thresh,
2661 ),
2662 validate_sazed_component(
2663 spectral_diff_period,
2664 spectral_diff_conf,
2665 min_period,
2666 max_period,
2667 spectral_diff_thresh,
2668 ),
2669 ]
2670 .into_iter()
2671 .flatten()
2672 .collect();
2673
2674 if confident_periods.len() < min_confident_components {
2676 return SazedResult {
2677 period: f64::NAN,
2678 confidence: 0.0,
2679 component_periods: components,
2680 agreeing_components: confident_periods.len(),
2681 };
2682 }
2683
2684 let (consensus_period, agreeing_count) = find_consensus_period(&confident_periods, tol);
2686 let confidence = agreeing_count as f64 / 5.0;
2687
2688 SazedResult {
2689 period: consensus_period,
2690 confidence,
2691 component_periods: components,
2692 agreeing_components: agreeing_count,
2693 }
2694}
2695
2696pub fn sazed_fdata(
2707 data: &[f64],
2708 n: usize,
2709 m: usize,
2710 argvals: &[f64],
2711 tolerance: Option<f64>,
2712) -> SazedResult {
2713 if n == 0 || m < 8 || argvals.len() != m {
2714 return SazedResult {
2715 period: f64::NAN,
2716 confidence: 0.0,
2717 component_periods: SazedComponents {
2718 spectral: f64::NAN,
2719 acf_peak: f64::NAN,
2720 acf_average: f64::NAN,
2721 zero_crossing: f64::NAN,
2722 spectral_diff: f64::NAN,
2723 },
2724 agreeing_components: 0,
2725 };
2726 }
2727
2728 let mean_curve = compute_mean_curve(data, n, m);
2729 sazed(&mean_curve, argvals, tolerance)
2730}
2731
2732fn sazed_spectral_with_confidence(data: &[f64], argvals: &[f64]) -> (f64, f64) {
2734 let (frequencies, power) = periodogram(data, argvals);
2735
2736 if frequencies.len() < 3 {
2737 return (f64::NAN, 0.0);
2738 }
2739
2740 let power_no_dc: Vec<f64> = power.iter().skip(1).copied().collect();
2742
2743 if power_no_dc.is_empty() {
2744 return (f64::NAN, 0.0);
2745 }
2746
2747 let mut sorted_power = power_no_dc.clone();
2749 sorted_power.sort_by(|a, b| a.partial_cmp(b).unwrap_or(std::cmp::Ordering::Equal));
2750 let noise_floor = sorted_power[sorted_power.len() / 2].max(1e-15);
2751
2752 let mut max_idx = 0;
2754 let mut max_val = 0.0;
2755 for (i, &p) in power_no_dc.iter().enumerate() {
2756 if p > max_val {
2757 max_val = p;
2758 max_idx = i;
2759 }
2760 }
2761
2762 let power_ratio = max_val / noise_floor;
2763 let freq = frequencies[max_idx + 1];
2764
2765 if freq > 1e-15 {
2766 (1.0 / freq, power_ratio)
2767 } else {
2768 (f64::NAN, 0.0)
2769 }
2770}
2771
2772fn sazed_acf_peak_with_confidence(data: &[f64], dt: f64, max_lag: usize) -> (f64, f64) {
2774 let acf = autocorrelation(data, max_lag);
2775
2776 match find_first_acf_peak(&acf) {
2777 Some((peak_lag, acf_value)) => (peak_lag as f64 * dt, acf_value),
2778 None => (f64::NAN, 0.0),
2779 }
2780}
2781
2782fn sazed_acf_average(data: &[f64], dt: f64, max_lag: usize) -> f64 {
2784 let acf = autocorrelation(data, max_lag);
2785
2786 if acf.len() < 4 {
2787 return f64::NAN;
2788 }
2789
2790 let peaks = find_peaks_1d(&acf[1..], 1);
2792
2793 if peaks.is_empty() {
2794 return f64::NAN;
2795 }
2796
2797 let mut weighted_sum = 0.0;
2799 let mut weight_sum = 0.0;
2800
2801 for (i, &peak_idx) in peaks.iter().enumerate() {
2802 let lag = peak_idx + 1;
2803 let weight = acf[lag].max(0.0);
2804
2805 if i == 0 {
2806 weighted_sum += lag as f64 * weight;
2808 weight_sum += weight;
2809 } else {
2810 let expected_fundamental = peaks[0] + 1;
2812 let harmonic = ((lag as f64 / expected_fundamental as f64) + 0.5) as usize;
2813 if harmonic > 0 {
2814 let fundamental_est = lag as f64 / harmonic as f64;
2815 weighted_sum += fundamental_est * weight;
2816 weight_sum += weight;
2817 }
2818 }
2819 }
2820
2821 if weight_sum > 1e-15 {
2822 weighted_sum / weight_sum * dt
2823 } else {
2824 f64::NAN
2825 }
2826}
2827
2828fn sazed_zero_crossing_with_confidence(data: &[f64], dt: f64, max_lag: usize) -> (f64, f64) {
2831 let acf = autocorrelation(data, max_lag);
2832
2833 if acf.len() < 4 {
2834 return (f64::NAN, 0.0);
2835 }
2836
2837 let mut crossings = Vec::new();
2839 for i in 1..acf.len() {
2840 if acf[i - 1] * acf[i] < 0.0 {
2841 let frac = acf[i - 1].abs() / (acf[i - 1].abs() + acf[i].abs());
2843 crossings.push((i - 1) as f64 + frac);
2844 }
2845 }
2846
2847 if crossings.len() < 2 {
2848 return (f64::NAN, 0.0);
2849 }
2850
2851 let mut half_periods = Vec::new();
2854 for i in 1..crossings.len() {
2855 half_periods.push(crossings[i] - crossings[i - 1]);
2856 }
2857
2858 if half_periods.is_empty() {
2859 return (f64::NAN, 0.0);
2860 }
2861
2862 let mean: f64 = half_periods.iter().sum::<f64>() / half_periods.len() as f64;
2865 let variance: f64 =
2866 half_periods.iter().map(|x| (x - mean).powi(2)).sum::<f64>() / half_periods.len() as f64;
2867 let std = variance.sqrt();
2868 let consistency = (1.0 - std / mean.max(1e-15)).clamp(0.0, 1.0);
2869
2870 half_periods.sort_by(|a, b| a.partial_cmp(b).unwrap_or(std::cmp::Ordering::Equal));
2872 let median_half = half_periods[half_periods.len() / 2];
2873
2874 (2.0 * median_half * dt, consistency)
2875}
2876
2877fn sazed_spectral_diff_with_confidence(data: &[f64], argvals: &[f64]) -> (f64, f64) {
2879 if data.len() < 4 {
2880 return (f64::NAN, 0.0);
2881 }
2882
2883 let diff: Vec<f64> = data.windows(2).map(|w| w[1] - w[0]).collect();
2885 let diff_argvals: Vec<f64> = argvals.windows(2).map(|w| (w[0] + w[1]) / 2.0).collect();
2886
2887 sazed_spectral_with_confidence(&diff, &diff_argvals)
2888}
2889
2890fn find_spectral_peaks(power: &[f64]) -> Vec<usize> {
2892 if power.len() < 3 {
2893 return Vec::new();
2894 }
2895
2896 let mut sorted_power: Vec<f64> = power.to_vec();
2898 sorted_power.sort_by(|a, b| a.partial_cmp(b).unwrap_or(std::cmp::Ordering::Equal));
2899 let noise_floor = sorted_power[sorted_power.len() / 2];
2900 let threshold = noise_floor * 2.0; let mut peaks: Vec<(usize, f64)> = Vec::new();
2904 for i in 1..(power.len() - 1) {
2905 if power[i] > power[i - 1] && power[i] > power[i + 1] && power[i] > threshold {
2906 peaks.push((i, power[i]));
2907 }
2908 }
2909
2910 peaks.sort_by(|a, b| b.1.partial_cmp(&a.1).unwrap_or(std::cmp::Ordering::Equal));
2912
2913 peaks.into_iter().map(|(idx, _)| idx).collect()
2914}
2915
2916fn find_consensus_period(periods: &[f64], tolerance: f64) -> (f64, usize) {
2918 if periods.is_empty() {
2919 return (f64::NAN, 0);
2920 }
2921 if periods.len() == 1 {
2922 return (periods[0], 1);
2923 }
2924
2925 let mut best_period = periods[0];
2926 let mut best_count = 0;
2927 let mut best_sum = 0.0;
2928
2929 for &p1 in periods {
2930 let (count, sum) = count_agreeing_periods(periods, p1, tolerance);
2931
2932 if count > best_count
2933 || (count == best_count && sum / count as f64 > best_sum / best_count.max(1) as f64)
2934 {
2935 best_count = count;
2936 best_period = sum / count as f64;
2937 best_sum = sum;
2938 }
2939 }
2940
2941 (best_period, best_count)
2942}
2943
2944#[derive(Debug, Clone)]
2946pub struct AutoperiodResult {
2947 pub period: f64,
2949 pub confidence: f64,
2951 pub fft_power: f64,
2953 pub acf_validation: f64,
2955 pub candidates: Vec<AutoperiodCandidate>,
2957}
2958
2959#[derive(Debug, Clone)]
2961pub struct AutoperiodCandidate {
2962 pub period: f64,
2964 pub fft_power: f64,
2966 pub acf_score: f64,
2968 pub combined_score: f64,
2970}
2971
2972fn empty_autoperiod_result() -> AutoperiodResult {
2973 AutoperiodResult {
2974 period: f64::NAN,
2975 confidence: 0.0,
2976 fft_power: 0.0,
2977 acf_validation: 0.0,
2978 candidates: Vec::new(),
2979 }
2980}
2981
2982fn build_autoperiod_candidate(
2984 peak_idx: usize,
2985 frequencies: &[f64],
2986 power_no_dc: &[f64],
2987 acf: &[f64],
2988 dt: f64,
2989 steps: usize,
2990 total_power: f64,
2991) -> Option<AutoperiodCandidate> {
2992 let freq = frequencies[peak_idx + 1];
2993 if freq < 1e-15 {
2994 return None;
2995 }
2996 let fft_power = power_no_dc[peak_idx];
2997 let normalized_power = fft_power / total_power.max(1e-15);
2998 let refined_period = refine_period_gradient(acf, 1.0 / freq, dt, steps);
2999 let refined_acf_score = validate_period_acf(acf, refined_period, dt);
3000 Some(AutoperiodCandidate {
3001 period: refined_period,
3002 fft_power,
3003 acf_score: refined_acf_score,
3004 combined_score: normalized_power * refined_acf_score,
3005 })
3006}
3007
3008pub fn autoperiod(
3029 data: &[f64],
3030 argvals: &[f64],
3031 n_candidates: Option<usize>,
3032 gradient_steps: Option<usize>,
3033) -> AutoperiodResult {
3034 let n = data.len();
3035 let max_candidates = n_candidates.unwrap_or(5);
3036 let steps = gradient_steps.unwrap_or(10);
3037
3038 if n < 8 || argvals.len() != n {
3039 return empty_autoperiod_result();
3040 }
3041
3042 let dt = (argvals[n - 1] - argvals[0]) / (n - 1) as f64;
3043 let max_lag = (n / 2).max(4);
3044
3045 let (frequencies, power) = periodogram(data, argvals);
3047
3048 if frequencies.len() < 3 {
3049 return empty_autoperiod_result();
3050 }
3051
3052 let power_no_dc: Vec<f64> = power.iter().skip(1).copied().collect();
3054 let peak_indices = find_spectral_peaks(&power_no_dc);
3055
3056 if peak_indices.is_empty() {
3057 return empty_autoperiod_result();
3058 }
3059
3060 let acf = autocorrelation(data, max_lag);
3062
3063 let total_power: f64 = power_no_dc.iter().sum();
3065 let candidates: Vec<AutoperiodCandidate> = peak_indices
3066 .iter()
3067 .take(max_candidates)
3068 .filter_map(|&peak_idx| {
3069 build_autoperiod_candidate(
3070 peak_idx,
3071 &frequencies,
3072 &power_no_dc,
3073 &acf,
3074 dt,
3075 steps,
3076 total_power,
3077 )
3078 })
3079 .collect();
3080
3081 if candidates.is_empty() {
3082 return empty_autoperiod_result();
3083 }
3084
3085 let best = candidates
3087 .iter()
3088 .max_by(|a, b| {
3089 a.combined_score
3090 .partial_cmp(&b.combined_score)
3091 .unwrap_or(std::cmp::Ordering::Equal)
3092 })
3093 .unwrap();
3094
3095 AutoperiodResult {
3096 period: best.period,
3097 confidence: best.combined_score,
3098 fft_power: best.fft_power,
3099 acf_validation: best.acf_score,
3100 candidates,
3101 }
3102}
3103
3104pub fn autoperiod_fdata(
3106 data: &[f64],
3107 n: usize,
3108 m: usize,
3109 argvals: &[f64],
3110 n_candidates: Option<usize>,
3111 gradient_steps: Option<usize>,
3112) -> AutoperiodResult {
3113 if n == 0 || m < 8 || argvals.len() != m {
3114 return AutoperiodResult {
3115 period: f64::NAN,
3116 confidence: 0.0,
3117 fft_power: 0.0,
3118 acf_validation: 0.0,
3119 candidates: Vec::new(),
3120 };
3121 }
3122
3123 let mean_curve = compute_mean_curve(data, n, m);
3124 autoperiod(&mean_curve, argvals, n_candidates, gradient_steps)
3125}
3126
3127fn validate_period_acf(acf: &[f64], period: f64, dt: f64) -> f64 {
3129 let lag = (period / dt).round() as usize;
3130
3131 if lag == 0 || lag >= acf.len() {
3132 return 0.0;
3133 }
3134
3135 let acf_at_lag = acf[lag];
3138
3139 let half_lag = lag / 2;
3141 let double_lag = lag * 2;
3142
3143 let mut score = acf_at_lag.max(0.0);
3144
3145 if half_lag > 0 && half_lag < acf.len() {
3148 let half_acf = acf[half_lag];
3149 if half_acf > acf_at_lag * 0.7 {
3151 score *= 0.5;
3152 }
3153 }
3154
3155 if double_lag < acf.len() {
3156 let double_acf = acf[double_lag];
3157 if double_acf > 0.3 {
3159 score *= 1.2;
3160 }
3161 }
3162
3163 score.min(1.0)
3164}
3165
3166fn refine_period_gradient(acf: &[f64], initial_period: f64, dt: f64, steps: usize) -> f64 {
3168 let mut period = initial_period;
3169 let step_size = dt * 0.5; for _ in 0..steps {
3172 let current_score = validate_period_acf(acf, period, dt);
3173 let left_score = validate_period_acf(acf, period - step_size, dt);
3174 let right_score = validate_period_acf(acf, period + step_size, dt);
3175
3176 if left_score > current_score && left_score > right_score {
3177 period -= step_size;
3178 } else if right_score > current_score {
3179 period += step_size;
3180 }
3181 }
3183
3184 period.max(dt) }
3186
3187#[derive(Debug, Clone)]
3189pub struct CfdAutoperiodResult {
3190 pub period: f64,
3192 pub confidence: f64,
3194 pub acf_validation: f64,
3196 pub periods: Vec<f64>,
3198 pub confidences: Vec<f64>,
3200}
3201
3202fn generate_cfd_candidates(
3204 frequencies: &[f64],
3205 power_no_dc: &[f64],
3206 peak_indices: &[usize],
3207) -> Vec<(f64, f64)> {
3208 let total_power: f64 = power_no_dc.iter().sum();
3209 peak_indices
3210 .iter()
3211 .filter_map(|&peak_idx| {
3212 let freq = frequencies[peak_idx + 1];
3213 if freq > 1e-15 {
3214 let period = 1.0 / freq;
3215 let normalized_power = power_no_dc[peak_idx] / total_power.max(1e-15);
3216 Some((period, normalized_power))
3217 } else {
3218 None
3219 }
3220 })
3221 .collect()
3222}
3223
3224fn validate_cfd_candidates(clusters: &[(f64, f64)], acf: &[f64], dt: f64) -> Vec<(f64, f64, f64)> {
3226 clusters
3227 .iter()
3228 .filter_map(|&(center, power_sum)| {
3229 let acf_score = validate_period_acf(acf, center, dt);
3230 if acf_score > 0.1 {
3231 Some((center, acf_score, power_sum))
3232 } else {
3233 None
3234 }
3235 })
3236 .collect()
3237}
3238
3239fn validate_or_fallback_cfd(
3241 validated: Vec<(f64, f64, f64)>,
3242 candidates: &[(f64, f64)],
3243 tol: f64,
3244 min_size: usize,
3245) -> Vec<(f64, f64, f64)> {
3246 if !validated.is_empty() {
3247 return validated;
3248 }
3249 cluster_periods(candidates, tol, min_size)
3251 .into_iter()
3252 .max_by(|a, b| a.1.partial_cmp(&b.1).unwrap_or(std::cmp::Ordering::Equal))
3253 .map(|(center, power_sum)| vec![(center, 0.0, power_sum)])
3254 .unwrap_or_default()
3255}
3256
3257fn rank_cfd_results(validated: &[(f64, f64, f64)]) -> (Vec<f64>, Vec<f64>, f64) {
3260 let mut sorted: Vec<_> = validated.to_vec();
3261 sorted.sort_by(|a, b| {
3262 (b.1 * b.2)
3263 .partial_cmp(&(a.1 * a.2))
3264 .unwrap_or(std::cmp::Ordering::Equal)
3265 });
3266 let top_acf = sorted[0].1;
3267 let periods = sorted.iter().map(|v| v.0).collect();
3268 let confidences = sorted.iter().map(|v| v.1 * v.2).collect();
3269 (periods, confidences, top_acf)
3270}
3271
3272fn empty_cfd_result() -> CfdAutoperiodResult {
3273 CfdAutoperiodResult {
3274 period: f64::NAN,
3275 confidence: 0.0,
3276 acf_validation: 0.0,
3277 periods: Vec::new(),
3278 confidences: Vec::new(),
3279 }
3280}
3281
3282fn extract_cfd_spectral_candidates(data: &[f64], argvals: &[f64]) -> Option<Vec<(f64, f64)>> {
3284 let diff: Vec<f64> = data.windows(2).map(|w| w[1] - w[0]).collect();
3285 let diff_argvals: Vec<f64> = argvals.windows(2).map(|w| (w[0] + w[1]) / 2.0).collect();
3286 let (frequencies, power) = periodogram(&diff, &diff_argvals);
3287 if frequencies.len() < 3 {
3288 return None;
3289 }
3290 let power_no_dc: Vec<f64> = power.iter().skip(1).copied().collect();
3291 let peak_indices = find_spectral_peaks(&power_no_dc);
3292 if peak_indices.is_empty() {
3293 return None;
3294 }
3295 let candidates = generate_cfd_candidates(&frequencies, &power_no_dc, &peak_indices);
3296 if candidates.is_empty() {
3297 None
3298 } else {
3299 Some(candidates)
3300 }
3301}
3302
3303pub fn cfd_autoperiod(
3324 data: &[f64],
3325 argvals: &[f64],
3326 cluster_tolerance: Option<f64>,
3327 min_cluster_size: Option<usize>,
3328) -> CfdAutoperiodResult {
3329 let n = data.len();
3330 let tol = cluster_tolerance.unwrap_or(0.1);
3331 let min_size = min_cluster_size.unwrap_or(1);
3332
3333 if n < 8 || argvals.len() != n {
3334 return empty_cfd_result();
3335 }
3336
3337 let dt = (argvals[n - 1] - argvals[0]) / (n - 1) as f64;
3338 let max_lag = (n / 2).max(4);
3339
3340 let Some(candidates) = extract_cfd_spectral_candidates(data, argvals) else {
3341 return empty_cfd_result();
3342 };
3343
3344 let clusters = cluster_periods(&candidates, tol, min_size);
3345 if clusters.is_empty() {
3346 return empty_cfd_result();
3347 }
3348
3349 let acf = autocorrelation(data, max_lag);
3350 let validated = validate_cfd_candidates(&clusters, &acf, dt);
3351 let validated = validate_or_fallback_cfd(validated, &candidates, tol, min_size);
3352 let (periods, confidences, top_acf) = rank_cfd_results(&validated);
3353
3354 CfdAutoperiodResult {
3355 period: periods[0],
3356 confidence: confidences[0],
3357 acf_validation: top_acf,
3358 periods,
3359 confidences,
3360 }
3361}
3362
3363pub fn cfd_autoperiod_fdata(
3365 data: &[f64],
3366 n: usize,
3367 m: usize,
3368 argvals: &[f64],
3369 cluster_tolerance: Option<f64>,
3370 min_cluster_size: Option<usize>,
3371) -> CfdAutoperiodResult {
3372 if n == 0 || m < 8 || argvals.len() != m {
3373 return CfdAutoperiodResult {
3374 period: f64::NAN,
3375 confidence: 0.0,
3376 acf_validation: 0.0,
3377 periods: Vec::new(),
3378 confidences: Vec::new(),
3379 };
3380 }
3381
3382 let mean_curve = compute_mean_curve(data, n, m);
3383 cfd_autoperiod(&mean_curve, argvals, cluster_tolerance, min_cluster_size)
3384}
3385
3386fn cluster_periods(candidates: &[(f64, f64)], tolerance: f64, min_size: usize) -> Vec<(f64, f64)> {
3388 if candidates.is_empty() {
3389 return Vec::new();
3390 }
3391
3392 let mut sorted: Vec<(f64, f64)> = candidates.to_vec();
3394 sorted.sort_by(|a, b| a.0.partial_cmp(&b.0).unwrap_or(std::cmp::Ordering::Equal));
3395
3396 let mut clusters: Vec<(f64, f64)> = Vec::new(); let mut current_cluster: Vec<(f64, f64)> = vec![sorted[0]];
3398
3399 for &(period, power) in sorted.iter().skip(1) {
3400 let cluster_center =
3401 current_cluster.iter().map(|(p, _)| p).sum::<f64>() / current_cluster.len() as f64;
3402
3403 let rel_diff = (period - cluster_center).abs() / cluster_center.max(period);
3404
3405 if rel_diff <= tolerance {
3406 current_cluster.push((period, power));
3408 } else {
3409 if current_cluster.len() >= min_size {
3411 let center = current_cluster.iter().map(|(p, pw)| p * pw).sum::<f64>()
3412 / current_cluster
3413 .iter()
3414 .map(|(_, pw)| pw)
3415 .sum::<f64>()
3416 .max(1e-15);
3417 let total_power: f64 = current_cluster.iter().map(|(_, pw)| pw).sum();
3418 clusters.push((center, total_power));
3419 }
3420 current_cluster = vec![(period, power)];
3421 }
3422 }
3423
3424 if current_cluster.len() >= min_size {
3426 let center = current_cluster.iter().map(|(p, pw)| p * pw).sum::<f64>()
3427 / current_cluster
3428 .iter()
3429 .map(|(_, pw)| pw)
3430 .sum::<f64>()
3431 .max(1e-15);
3432 let total_power: f64 = current_cluster.iter().map(|(_, pw)| pw).sum();
3433 clusters.push((center, total_power));
3434 }
3435
3436 clusters
3437}
3438
3439#[derive(Debug, Clone)]
3445pub struct LombScargleResult {
3446 pub frequencies: Vec<f64>,
3448 pub periods: Vec<f64>,
3450 pub power: Vec<f64>,
3452 pub peak_period: f64,
3454 pub peak_frequency: f64,
3456 pub peak_power: f64,
3458 pub false_alarm_probability: f64,
3460 pub significance: f64,
3462}
3463
3464pub fn lomb_scargle(
3504 times: &[f64],
3505 values: &[f64],
3506 frequencies: Option<&[f64]>,
3507 oversampling: Option<f64>,
3508 nyquist_factor: Option<f64>,
3509) -> LombScargleResult {
3510 let n = times.len();
3511 assert_eq!(n, values.len(), "times and values must have same length");
3512 assert!(n >= 3, "Need at least 3 data points");
3513
3514 let mean_y: f64 = values.iter().sum::<f64>() / n as f64;
3516 let var_y: f64 = values.iter().map(|&y| (y - mean_y).powi(2)).sum::<f64>() / (n - 1) as f64;
3517
3518 let freq_vec: Vec<f64>;
3520 let freqs = if let Some(f) = frequencies {
3521 f
3522 } else {
3523 freq_vec = generate_ls_frequencies(
3524 times,
3525 oversampling.unwrap_or(4.0),
3526 nyquist_factor.unwrap_or(1.0),
3527 );
3528 &freq_vec
3529 };
3530
3531 let mut power = Vec::with_capacity(freqs.len());
3533
3534 for &freq in freqs.iter() {
3535 let omega = 2.0 * PI * freq;
3536 let p = lomb_scargle_single_freq(times, values, mean_y, var_y, omega);
3537 power.push(p);
3538 }
3539
3540 let (peak_idx, &peak_power) = power
3542 .iter()
3543 .enumerate()
3544 .max_by(|(_, a), (_, b)| a.partial_cmp(b).unwrap())
3545 .unwrap_or((0, &0.0));
3546
3547 let peak_frequency = freqs.get(peak_idx).copied().unwrap_or(0.0);
3548 let peak_period = if peak_frequency > 0.0 {
3549 1.0 / peak_frequency
3550 } else {
3551 f64::INFINITY
3552 };
3553
3554 let n_indep = estimate_independent_frequencies(times, freqs.len());
3556 let fap = lomb_scargle_fap(peak_power, n_indep, n);
3557
3558 let periods: Vec<f64> = freqs
3560 .iter()
3561 .map(|&f| if f > 0.0 { 1.0 / f } else { f64::INFINITY })
3562 .collect();
3563
3564 LombScargleResult {
3565 frequencies: freqs.to_vec(),
3566 periods,
3567 power,
3568 peak_period,
3569 peak_frequency,
3570 peak_power,
3571 false_alarm_probability: fap,
3572 significance: 1.0 - fap,
3573 }
3574}
3575
3576fn lomb_scargle_single_freq(
3580 times: &[f64],
3581 values: &[f64],
3582 mean_y: f64,
3583 var_y: f64,
3584 omega: f64,
3585) -> f64 {
3586 if var_y <= 0.0 || omega <= 0.0 {
3587 return 0.0;
3588 }
3589
3590 let n = times.len();
3591
3592 let mut sum_sin2 = 0.0;
3594 let mut sum_cos2 = 0.0;
3595 for &t in times.iter() {
3596 let arg = 2.0 * omega * t;
3597 sum_sin2 += arg.sin();
3598 sum_cos2 += arg.cos();
3599 }
3600 let tau = (sum_sin2).atan2(sum_cos2) / (2.0 * omega);
3601
3602 let mut ss = 0.0; let mut sc = 0.0; let mut css = 0.0; let mut sss = 0.0; for i in 0..n {
3609 let y_centered = values[i] - mean_y;
3610 let arg = omega * (times[i] - tau);
3611 let c = arg.cos();
3612 let s = arg.sin();
3613
3614 sc += y_centered * c;
3615 ss += y_centered * s;
3616 css += c * c;
3617 sss += s * s;
3618 }
3619
3620 let css = css.max(1e-15);
3622 let sss = sss.max(1e-15);
3623
3624 0.5 * (sc * sc / css + ss * ss / sss) / var_y
3626}
3627
3628fn generate_ls_frequencies(times: &[f64], oversampling: f64, nyquist_factor: f64) -> Vec<f64> {
3632 let n = times.len();
3633 if n < 2 {
3634 return vec![0.0];
3635 }
3636
3637 let t_min = times.iter().cloned().fold(f64::INFINITY, f64::min);
3639 let t_max = times.iter().cloned().fold(f64::NEG_INFINITY, f64::max);
3640 let t_span = (t_max - t_min).max(1e-10);
3641
3642 let f_min = 1.0 / t_span;
3644
3645 let f_nyquist = 0.5 * (n - 1) as f64 / t_span;
3648
3649 let f_max = f_nyquist * nyquist_factor;
3651
3652 let df = f_min / oversampling;
3654
3655 let n_freq = ((f_max - f_min) / df).ceil() as usize + 1;
3657 let n_freq = n_freq.min(10000); (0..n_freq).map(|i| f_min + i as f64 * df).collect()
3660}
3661
3662fn estimate_independent_frequencies(times: &[f64], n_freq: usize) -> usize {
3667 let n = times.len();
3669 n.min(n_freq)
3670}
3671
3672fn lomb_scargle_fap(power: f64, n_indep: usize, _n_data: usize) -> f64 {
3678 if power <= 0.0 || n_indep == 0 {
3679 return 1.0;
3680 }
3681
3682 let prob_single = 1.0 - (-power).exp();
3684
3685 if prob_single >= 1.0 {
3692 return 0.0; }
3694 if prob_single <= 0.0 {
3695 return 1.0; }
3697
3698 let log_prob = prob_single.ln();
3699 let log_cdf = n_indep as f64 * log_prob;
3700
3701 if log_cdf < -700.0 {
3702 0.0 } else {
3704 1.0 - log_cdf.exp()
3705 }
3706}
3707
3708pub fn lomb_scargle_fdata(
3724 data: &[f64],
3725 n: usize,
3726 m: usize,
3727 argvals: &[f64],
3728 oversampling: Option<f64>,
3729 nyquist_factor: Option<f64>,
3730) -> LombScargleResult {
3731 let mean_curve = compute_mean_curve(data, n, m);
3733
3734 lomb_scargle(argvals, &mean_curve, None, oversampling, nyquist_factor)
3736}
3737
3738#[derive(Debug, Clone)]
3744pub struct MatrixProfileResult {
3745 pub profile: Vec<f64>,
3747 pub profile_index: Vec<usize>,
3749 pub subsequence_length: usize,
3751 pub detected_periods: Vec<f64>,
3753 pub arc_counts: Vec<usize>,
3755 pub primary_period: f64,
3757 pub confidence: f64,
3759}
3760
3761pub fn matrix_profile(
3797 values: &[f64],
3798 subsequence_length: Option<usize>,
3799 exclusion_zone: Option<f64>,
3800) -> MatrixProfileResult {
3801 let n = values.len();
3802
3803 let m = subsequence_length.unwrap_or_else(|| {
3805 let default_m = n / 4;
3806 default_m.max(4).min(n / 2)
3807 });
3808
3809 assert!(m >= 3, "Subsequence length must be at least 3");
3810 assert!(
3811 m <= n / 2,
3812 "Subsequence length must be at most half the series length"
3813 );
3814
3815 let exclusion_zone = exclusion_zone.unwrap_or(0.5);
3816 let exclusion_radius = (m as f64 * exclusion_zone).ceil() as usize;
3817
3818 let profile_len = n - m + 1;
3820
3821 let (means, stds) = compute_sliding_stats(values, m);
3823
3824 let (profile, profile_index) = stomp_core(values, m, &means, &stds, exclusion_radius);
3826
3827 let (arc_counts, detected_periods, primary_period, confidence) =
3829 analyze_arcs(&profile_index, profile_len, m);
3830
3831 MatrixProfileResult {
3832 profile,
3833 profile_index,
3834 subsequence_length: m,
3835 detected_periods,
3836 arc_counts,
3837 primary_period,
3838 confidence,
3839 }
3840}
3841
3842fn compute_sliding_stats(values: &[f64], m: usize) -> (Vec<f64>, Vec<f64>) {
3846 let n = values.len();
3847 let profile_len = n - m + 1;
3848
3849 let mut cumsum = vec![0.0; n + 1];
3851 let mut cumsum_sq = vec![0.0; n + 1];
3852
3853 for i in 0..n {
3854 cumsum[i + 1] = cumsum[i] + values[i];
3855 cumsum_sq[i + 1] = cumsum_sq[i] + values[i] * values[i];
3856 }
3857
3858 let mut means = Vec::with_capacity(profile_len);
3860 let mut stds = Vec::with_capacity(profile_len);
3861
3862 let m_f64 = m as f64;
3863
3864 for i in 0..profile_len {
3865 let sum = cumsum[i + m] - cumsum[i];
3866 let sum_sq = cumsum_sq[i + m] - cumsum_sq[i];
3867
3868 let mean = sum / m_f64;
3869 let variance = (sum_sq / m_f64) - mean * mean;
3870 let std = variance.max(0.0).sqrt();
3871
3872 means.push(mean);
3873 stds.push(std.max(1e-10)); }
3875
3876 (means, stds)
3877}
3878
3879fn stomp_core(
3883 values: &[f64],
3884 m: usize,
3885 means: &[f64],
3886 stds: &[f64],
3887 exclusion_radius: usize,
3888) -> (Vec<f64>, Vec<usize>) {
3889 let n = values.len();
3890 let profile_len = n - m + 1;
3891
3892 let mut profile = vec![f64::INFINITY; profile_len];
3894 let mut profile_index = vec![0usize; profile_len];
3895
3896 let mut qt = vec![0.0; profile_len];
3899
3900 for j in 0..profile_len {
3902 let mut dot = 0.0;
3903 for k in 0..m {
3904 dot += values[k] * values[j + k];
3905 }
3906 qt[j] = dot;
3907 }
3908
3909 update_profile_row(
3911 0,
3912 &qt,
3913 means,
3914 stds,
3915 m,
3916 exclusion_radius,
3917 &mut profile,
3918 &mut profile_index,
3919 );
3920
3921 for i in 1..profile_len {
3923 let mut qt_new = vec![0.0; profile_len];
3926
3927 let mut dot = 0.0;
3929 for k in 0..m {
3930 dot += values[i + k] * values[k];
3931 }
3932 qt_new[0] = dot;
3933
3934 for j in 1..profile_len {
3936 qt_new[j] =
3937 qt[j - 1] - values[i - 1] * values[j - 1] + values[i + m - 1] * values[j + m - 1];
3938 }
3939
3940 qt = qt_new;
3941
3942 update_profile_row(
3944 i,
3945 &qt,
3946 means,
3947 stds,
3948 m,
3949 exclusion_radius,
3950 &mut profile,
3951 &mut profile_index,
3952 );
3953 }
3954
3955 (profile, profile_index)
3956}
3957
3958fn update_profile_row(
3960 i: usize,
3961 qt: &[f64],
3962 means: &[f64],
3963 stds: &[f64],
3964 m: usize,
3965 exclusion_radius: usize,
3966 profile: &mut [f64],
3967 profile_index: &mut [usize],
3968) {
3969 let profile_len = profile.len();
3970 let m_f64 = m as f64;
3971
3972 for j in 0..profile_len {
3973 if i.abs_diff(j) <= exclusion_radius {
3975 continue;
3976 }
3977
3978 let numerator = qt[j] - m_f64 * means[i] * means[j];
3981 let denominator = m_f64 * stds[i] * stds[j];
3982
3983 let pearson = if denominator > 0.0 {
3984 (numerator / denominator).clamp(-1.0, 1.0)
3985 } else {
3986 0.0
3987 };
3988
3989 let dist_sq = 2.0 * m_f64 * (1.0 - pearson);
3990 let dist = dist_sq.max(0.0).sqrt();
3991
3992 if dist < profile[i] {
3994 profile[i] = dist;
3995 profile_index[i] = j;
3996 }
3997
3998 if dist < profile[j] {
4000 profile[j] = dist;
4001 profile_index[j] = i;
4002 }
4003 }
4004}
4005
4006fn analyze_arcs(
4011 profile_index: &[usize],
4012 profile_len: usize,
4013 m: usize,
4014) -> (Vec<usize>, Vec<f64>, f64, f64) {
4015 let max_distance = profile_len;
4017 let mut arc_counts = vec![0usize; max_distance];
4018
4019 for (i, &j) in profile_index.iter().enumerate() {
4020 let distance = i.abs_diff(j);
4021 if distance < max_distance {
4022 arc_counts[distance] += 1;
4023 }
4024 }
4025
4026 let min_period = m / 2; let mut peaks: Vec<(usize, usize)> = Vec::new();
4029
4030 for i in min_period..arc_counts.len().saturating_sub(1) {
4032 if arc_counts[i] > arc_counts[i.saturating_sub(1)]
4033 && arc_counts[i] > arc_counts[(i + 1).min(arc_counts.len() - 1)]
4034 && arc_counts[i] >= 3
4035 {
4037 peaks.push((i, arc_counts[i]));
4038 }
4039 }
4040
4041 peaks.sort_by(|a, b| b.1.cmp(&a.1));
4043
4044 let detected_periods: Vec<f64> = peaks.iter().take(5).map(|(p, _)| *p as f64).collect();
4046
4047 let (primary_period, confidence) = if let Some(&(period, count)) = peaks.first() {
4049 let total_arcs: usize = arc_counts[min_period..].iter().sum();
4051 let conf = if total_arcs > 0 {
4052 count as f64 / total_arcs as f64
4053 } else {
4054 0.0
4055 };
4056 (period as f64, conf.min(1.0))
4057 } else {
4058 (0.0, 0.0)
4059 };
4060
4061 (arc_counts, detected_periods, primary_period, confidence)
4062}
4063
4064pub fn matrix_profile_fdata(
4078 data: &[f64],
4079 n: usize,
4080 m: usize,
4081 subsequence_length: Option<usize>,
4082 exclusion_zone: Option<f64>,
4083) -> MatrixProfileResult {
4084 let mean_curve = compute_mean_curve(data, n, m);
4086
4087 matrix_profile(&mean_curve, subsequence_length, exclusion_zone)
4089}
4090
4091pub fn matrix_profile_seasonality(
4103 values: &[f64],
4104 subsequence_length: Option<usize>,
4105 confidence_threshold: Option<f64>,
4106) -> (bool, f64, f64) {
4107 let result = matrix_profile(values, subsequence_length, None);
4108
4109 let threshold = confidence_threshold.unwrap_or(0.1);
4110 let is_seasonal = result.confidence >= threshold && result.primary_period > 0.0;
4111
4112 (is_seasonal, result.primary_period, result.confidence)
4113}
4114
4115#[derive(Debug, Clone)]
4121pub struct SsaResult {
4122 pub trend: Vec<f64>,
4124 pub seasonal: Vec<f64>,
4126 pub noise: Vec<f64>,
4128 pub singular_values: Vec<f64>,
4130 pub contributions: Vec<f64>,
4132 pub window_length: usize,
4134 pub n_components: usize,
4136 pub detected_period: f64,
4138 pub confidence: f64,
4140}
4141
4142pub fn ssa(
4183 values: &[f64],
4184 window_length: Option<usize>,
4185 n_components: Option<usize>,
4186 trend_components: Option<&[usize]>,
4187 seasonal_components: Option<&[usize]>,
4188) -> SsaResult {
4189 let n = values.len();
4190
4191 let l = window_length.unwrap_or_else(|| (n / 2).clamp(2, 50));
4193
4194 if n < 4 || l < 2 || l > n / 2 {
4195 return SsaResult {
4196 trend: values.to_vec(),
4197 seasonal: vec![0.0; n],
4198 noise: vec![0.0; n],
4199 singular_values: vec![],
4200 contributions: vec![],
4201 window_length: l,
4202 n_components: 0,
4203 detected_period: 0.0,
4204 confidence: 0.0,
4205 };
4206 }
4207
4208 let k = n - l + 1;
4210
4211 let trajectory = embed_trajectory(values, l, k);
4213
4214 let (u, sigma, vt) = svd_decompose(&trajectory, l, k);
4216
4217 let max_components = sigma.len();
4219 let n_comp = n_components.unwrap_or(10).min(max_components);
4220
4221 let total_var: f64 = sigma.iter().map(|&s| s * s).sum();
4223 let contributions: Vec<f64> = sigma
4224 .iter()
4225 .take(n_comp)
4226 .map(|&s| s * s / total_var.max(1e-15))
4227 .collect();
4228
4229 let (trend_idx, seasonal_idx, detected_period, confidence) =
4231 if trend_components.is_some() || seasonal_components.is_some() {
4232 let t_idx: Vec<usize> = trend_components.map(|v| v.to_vec()).unwrap_or_default();
4234 let s_idx: Vec<usize> = seasonal_components.map(|v| v.to_vec()).unwrap_or_default();
4235 (t_idx, s_idx, 0.0, 0.0)
4236 } else {
4237 auto_group_ssa_components(&u, &sigma, l, k, n_comp)
4239 };
4240
4241 let trend = reconstruct_grouped(&u, &sigma, &vt, l, k, n, &trend_idx);
4243 let seasonal = reconstruct_grouped(&u, &sigma, &vt, l, k, n, &seasonal_idx);
4244
4245 let noise: Vec<f64> = values
4247 .iter()
4248 .zip(trend.iter())
4249 .zip(seasonal.iter())
4250 .map(|((&y, &t), &s)| y - t - s)
4251 .collect();
4252
4253 SsaResult {
4254 trend,
4255 seasonal,
4256 noise,
4257 singular_values: sigma.into_iter().take(n_comp).collect(),
4258 contributions,
4259 window_length: l,
4260 n_components: n_comp,
4261 detected_period,
4262 confidence,
4263 }
4264}
4265
4266fn embed_trajectory(values: &[f64], l: usize, k: usize) -> Vec<f64> {
4268 let mut trajectory = vec![0.0; l * k];
4270
4271 for j in 0..k {
4272 for i in 0..l {
4273 trajectory[i + j * l] = values[i + j];
4274 }
4275 }
4276
4277 trajectory
4278}
4279
4280fn svd_decompose(trajectory: &[f64], l: usize, k: usize) -> (Vec<f64>, Vec<f64>, Vec<f64>) {
4282 use nalgebra::{DMatrix, SVD};
4283
4284 let mat = DMatrix::from_column_slice(l, k, trajectory);
4286
4287 let svd = SVD::new(mat, true, true);
4289
4290 let u_mat = svd.u.unwrap();
4292 let vt_mat = svd.v_t.unwrap();
4293 let sigma = svd.singular_values;
4294
4295 let u: Vec<f64> = u_mat.iter().cloned().collect();
4297 let sigma_vec: Vec<f64> = sigma.iter().cloned().collect();
4298 let vt: Vec<f64> = vt_mat.iter().cloned().collect();
4299
4300 (u, sigma_vec, vt)
4301}
4302
4303enum SsaComponentKind {
4304 Trend,
4305 Seasonal(f64),
4306 Noise,
4307}
4308
4309fn classify_ssa_component(u_col: &[f64], trend_count: usize) -> SsaComponentKind {
4311 if is_trend_component(u_col) && trend_count < 2 {
4312 SsaComponentKind::Trend
4313 } else {
4314 let (is_periodic, period) = is_periodic_component(u_col);
4315 if is_periodic {
4316 SsaComponentKind::Seasonal(period)
4317 } else {
4318 SsaComponentKind::Noise
4319 }
4320 }
4321}
4322
4323fn apply_ssa_grouping_defaults(
4325 trend_idx: &mut Vec<usize>,
4326 seasonal_idx: &mut Vec<usize>,
4327 n_comp: usize,
4328) {
4329 if trend_idx.is_empty() && n_comp > 0 {
4330 trend_idx.push(0);
4331 }
4332 if seasonal_idx.is_empty() && n_comp >= 3 {
4333 seasonal_idx.push(1);
4334 if n_comp > 2 {
4335 seasonal_idx.push(2);
4336 }
4337 }
4338}
4339
4340fn auto_group_ssa_components(
4342 u: &[f64],
4343 sigma: &[f64],
4344 l: usize,
4345 _k: usize,
4346 n_comp: usize,
4347) -> (Vec<usize>, Vec<usize>, f64, f64) {
4348 let mut trend_idx = Vec::new();
4349 let mut seasonal_idx = Vec::new();
4350 let mut detected_period = 0.0;
4351 let mut confidence = 0.0;
4352
4353 for i in 0..n_comp.min(sigma.len()) {
4354 let u_col: Vec<f64> = (0..l).map(|j| u[j + i * l]).collect();
4355 match classify_ssa_component(&u_col, trend_idx.len()) {
4356 SsaComponentKind::Trend => trend_idx.push(i),
4357 SsaComponentKind::Seasonal(period) => {
4358 seasonal_idx.push(i);
4359 if detected_period == 0.0 && period > 0.0 {
4360 detected_period = period;
4361 confidence = sigma[i] / sigma[0].max(1e-15);
4362 }
4363 }
4364 SsaComponentKind::Noise => {}
4365 }
4366 }
4367
4368 apply_ssa_grouping_defaults(&mut trend_idx, &mut seasonal_idx, n_comp);
4369 (trend_idx, seasonal_idx, detected_period, confidence)
4370}
4371
4372fn is_trend_component(u_col: &[f64]) -> bool {
4374 let n = u_col.len();
4375 if n < 3 {
4376 return false;
4377 }
4378
4379 let mut sign_changes = 0;
4381 for i in 1..n {
4382 if u_col[i] * u_col[i - 1] < 0.0 {
4383 sign_changes += 1;
4384 }
4385 }
4386
4387 sign_changes <= n / 10
4389}
4390
4391fn is_periodic_component(u_col: &[f64]) -> (bool, f64) {
4393 let n = u_col.len();
4394 if n < 4 {
4395 return (false, 0.0);
4396 }
4397
4398 let mean: f64 = u_col.iter().sum::<f64>() / n as f64;
4400 let centered: Vec<f64> = u_col.iter().map(|&x| x - mean).collect();
4401
4402 let var: f64 = centered.iter().map(|&x| x * x).sum();
4403 if var < 1e-15 {
4404 return (false, 0.0);
4405 }
4406
4407 let mut best_period = 0.0;
4409 let mut best_acf = 0.0;
4410
4411 for lag in 2..n / 2 {
4412 let mut acf = 0.0;
4413 for i in 0..(n - lag) {
4414 acf += centered[i] * centered[i + lag];
4415 }
4416 acf /= var;
4417
4418 if acf > best_acf && acf > 0.3 {
4419 best_acf = acf;
4420 best_period = lag as f64;
4421 }
4422 }
4423
4424 let is_periodic = best_acf > 0.3 && best_period > 0.0;
4425 (is_periodic, best_period)
4426}
4427
4428fn reconstruct_grouped(
4430 u: &[f64],
4431 sigma: &[f64],
4432 vt: &[f64],
4433 l: usize,
4434 k: usize,
4435 n: usize,
4436 group_idx: &[usize],
4437) -> Vec<f64> {
4438 if group_idx.is_empty() {
4439 return vec![0.0; n];
4440 }
4441
4442 let mut grouped_matrix = vec![0.0; l * k];
4444
4445 for &idx in group_idx {
4446 if idx >= sigma.len() {
4447 continue;
4448 }
4449
4450 let s = sigma[idx];
4451
4452 for j in 0..k {
4454 for i in 0..l {
4455 let u_val = u[i + idx * l];
4456 let v_val = vt[idx + j * sigma.len().min(l)]; grouped_matrix[i + j * l] += s * u_val * v_val;
4458 }
4459 }
4460 }
4461
4462 diagonal_average(&grouped_matrix, l, k, n)
4464}
4465
4466fn diagonal_average(matrix: &[f64], l: usize, k: usize, n: usize) -> Vec<f64> {
4468 let mut result = vec![0.0; n];
4469 let mut counts = vec![0.0; n];
4470
4471 for j in 0..k {
4473 for i in 0..l {
4474 let idx = i + j; if idx < n {
4476 result[idx] += matrix[i + j * l];
4477 counts[idx] += 1.0;
4478 }
4479 }
4480 }
4481
4482 for i in 0..n {
4484 if counts[i] > 0.0 {
4485 result[i] /= counts[i];
4486 }
4487 }
4488
4489 result
4490}
4491
4492pub fn ssa_fdata(
4504 data: &[f64],
4505 n: usize,
4506 m: usize,
4507 window_length: Option<usize>,
4508 n_components: Option<usize>,
4509) -> SsaResult {
4510 let mean_curve = compute_mean_curve(data, n, m);
4512
4513 ssa(&mean_curve, window_length, n_components, None, None)
4515}
4516
4517pub fn ssa_seasonality(
4527 values: &[f64],
4528 window_length: Option<usize>,
4529 confidence_threshold: Option<f64>,
4530) -> (bool, f64, f64) {
4531 let result = ssa(values, window_length, None, None, None);
4532
4533 let threshold = confidence_threshold.unwrap_or(0.1);
4534 let is_seasonal = result.confidence >= threshold && result.detected_period > 0.0;
4535
4536 (is_seasonal, result.detected_period, result.confidence)
4537}
4538
4539#[cfg(test)]
4540mod tests {
4541 use super::*;
4542 use std::f64::consts::PI;
4543
4544 fn generate_sine(n: usize, m: usize, period: f64, argvals: &[f64]) -> Vec<f64> {
4545 let mut data = vec![0.0; n * m];
4546 for i in 0..n {
4547 for j in 0..m {
4548 data[i + j * n] = (2.0 * PI * argvals[j] / period).sin();
4549 }
4550 }
4551 data
4552 }
4553
4554 #[test]
4555 fn test_period_estimation_fft() {
4556 let m = 200;
4557 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
4558 let period = 2.0;
4559 let data = generate_sine(1, m, period, &argvals);
4560
4561 let estimate = estimate_period_fft(&data, 1, m, &argvals);
4562 assert!((estimate.period - period).abs() < 0.2);
4563 assert!(estimate.confidence > 1.0);
4564 }
4565
4566 #[test]
4567 fn test_peak_detection() {
4568 let m = 100;
4569 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
4570 let period = 2.0;
4571 let data = generate_sine(1, m, period, &argvals);
4572
4573 let result = detect_peaks(&data, 1, m, &argvals, Some(1.5), None, false, None);
4574
4575 assert!(!result.peaks[0].is_empty());
4577 assert!((result.mean_period - period).abs() < 0.3);
4578 }
4579
4580 #[test]
4581 fn test_peak_detection_known_sine() {
4582 let m = 200; let period = 2.0;
4586 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 10.0 / (m - 1) as f64).collect();
4587 let data: Vec<f64> = argvals
4588 .iter()
4589 .map(|&t| (2.0 * std::f64::consts::PI * t / period).sin())
4590 .collect();
4591
4592 let result = detect_peaks(&data, 1, m, &argvals, None, None, false, None);
4593
4594 assert_eq!(
4596 result.peaks[0].len(),
4597 5,
4598 "Expected 5 peaks, got {}. Peak times: {:?}",
4599 result.peaks[0].len(),
4600 result.peaks[0].iter().map(|p| p.time).collect::<Vec<_>>()
4601 );
4602
4603 let expected_times = [0.5, 2.5, 4.5, 6.5, 8.5];
4605 for (peak, expected) in result.peaks[0].iter().zip(expected_times.iter()) {
4606 assert!(
4607 (peak.time - expected).abs() < 0.15,
4608 "Peak at {:.3} not close to expected {:.3}",
4609 peak.time,
4610 expected
4611 );
4612 }
4613
4614 assert!(
4616 (result.mean_period - period).abs() < 0.1,
4617 "Mean period {:.3} not close to expected {:.3}",
4618 result.mean_period,
4619 period
4620 );
4621 }
4622
4623 #[test]
4624 fn test_peak_detection_with_min_distance() {
4625 let m = 200;
4627 let period = 2.0;
4628 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 10.0 / (m - 1) as f64).collect();
4629 let data: Vec<f64> = argvals
4630 .iter()
4631 .map(|&t| (2.0 * std::f64::consts::PI * t / period).sin())
4632 .collect();
4633
4634 let result = detect_peaks(&data, 1, m, &argvals, Some(1.5), None, false, None);
4636 assert_eq!(
4637 result.peaks[0].len(),
4638 5,
4639 "With min_distance=1.5, expected 5 peaks, got {}",
4640 result.peaks[0].len()
4641 );
4642
4643 let result2 = detect_peaks(&data, 1, m, &argvals, Some(2.5), None, false, None);
4645 assert!(
4646 result2.peaks[0].len() < 5,
4647 "With min_distance=2.5, expected fewer than 5 peaks, got {}",
4648 result2.peaks[0].len()
4649 );
4650 }
4651
4652 #[test]
4653 fn test_peak_detection_period_1() {
4654 let m = 400; let period = 1.0;
4658 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 10.0 / (m - 1) as f64).collect();
4659 let data: Vec<f64> = argvals
4660 .iter()
4661 .map(|&t| (2.0 * std::f64::consts::PI * t / period).sin())
4662 .collect();
4663
4664 let result = detect_peaks(&data, 1, m, &argvals, None, None, false, None);
4665
4666 assert_eq!(
4668 result.peaks[0].len(),
4669 10,
4670 "Expected 10 peaks, got {}",
4671 result.peaks[0].len()
4672 );
4673
4674 assert!(
4676 (result.mean_period - period).abs() < 0.1,
4677 "Mean period {:.3} not close to expected {:.3}",
4678 result.mean_period,
4679 period
4680 );
4681 }
4682
4683 #[test]
4684 fn test_peak_detection_shifted_sine() {
4685 let m = 200;
4688 let period = 2.0;
4689 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 10.0 / (m - 1) as f64).collect();
4690 let data: Vec<f64> = argvals
4691 .iter()
4692 .map(|&t| (2.0 * std::f64::consts::PI * t / period).sin() + 1.0)
4693 .collect();
4694
4695 let result = detect_peaks(&data, 1, m, &argvals, None, None, false, None);
4696
4697 assert_eq!(
4699 result.peaks[0].len(),
4700 5,
4701 "Expected 5 peaks for shifted sine, got {}",
4702 result.peaks[0].len()
4703 );
4704
4705 for peak in &result.peaks[0] {
4707 assert!(
4708 (peak.value - 2.0).abs() < 0.05,
4709 "Peak value {:.3} not close to expected 2.0",
4710 peak.value
4711 );
4712 }
4713 }
4714
4715 #[test]
4716 fn test_peak_detection_prominence() {
4717 let m = 200;
4720 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 10.0 / (m - 1) as f64).collect();
4721 let data: Vec<f64> = argvals
4722 .iter()
4723 .map(|&t| {
4724 let base = (2.0 * std::f64::consts::PI * t / 2.0).sin();
4725 let ripple = 0.1 * (2.0 * std::f64::consts::PI * t * 4.0).sin();
4727 base + ripple
4728 })
4729 .collect();
4730
4731 let result_no_filter = detect_peaks(&data, 1, m, &argvals, None, None, false, None);
4733
4734 let result_filtered = detect_peaks(&data, 1, m, &argvals, None, Some(0.5), false, None);
4736
4737 assert!(
4739 result_filtered.peaks[0].len() <= result_no_filter.peaks[0].len(),
4740 "Prominence filter should reduce peak count"
4741 );
4742 }
4743
4744 #[test]
4745 fn test_peak_detection_different_amplitudes() {
4746 let m = 200;
4748 let period = 2.0;
4749 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 10.0 / (m - 1) as f64).collect();
4750
4751 for amplitude in [0.5, 1.0, 2.0, 5.0] {
4752 let data: Vec<f64> = argvals
4753 .iter()
4754 .map(|&t| amplitude * (2.0 * std::f64::consts::PI * t / period).sin())
4755 .collect();
4756
4757 let result = detect_peaks(&data, 1, m, &argvals, None, None, false, None);
4758
4759 assert_eq!(
4760 result.peaks[0].len(),
4761 5,
4762 "Amplitude {} should still find 5 peaks",
4763 amplitude
4764 );
4765
4766 for peak in &result.peaks[0] {
4768 assert!(
4769 (peak.value - amplitude).abs() < 0.1,
4770 "Peak value {:.3} should be close to amplitude {}",
4771 peak.value,
4772 amplitude
4773 );
4774 }
4775 }
4776 }
4777
4778 #[test]
4779 fn test_peak_detection_varying_frequency() {
4780 let m = 400;
4783 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 10.0 / (m - 1) as f64).collect();
4784
4785 let data: Vec<f64> = argvals
4788 .iter()
4789 .map(|&t| {
4790 let phase = 2.0 * std::f64::consts::PI * (0.5 * t + 0.05 * t * t);
4791 phase.sin()
4792 })
4793 .collect();
4794
4795 let result = detect_peaks(&data, 1, m, &argvals, None, None, false, None);
4796
4797 assert!(
4799 result.peaks[0].len() >= 5,
4800 "Should find at least 5 peaks, got {}",
4801 result.peaks[0].len()
4802 );
4803
4804 let distances = &result.inter_peak_distances[0];
4806 if distances.len() >= 3 {
4807 let early_avg = (distances[0] + distances[1]) / 2.0;
4809 let late_avg = (distances[distances.len() - 2] + distances[distances.len() - 1]) / 2.0;
4810 assert!(
4811 late_avg < early_avg,
4812 "Later peaks should be closer: early avg={:.3}, late avg={:.3}",
4813 early_avg,
4814 late_avg
4815 );
4816 }
4817 }
4818
4819 #[test]
4820 fn test_peak_detection_sum_of_sines() {
4821 let m = 300;
4824 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 12.0 / (m - 1) as f64).collect();
4825
4826 let data: Vec<f64> = argvals
4827 .iter()
4828 .map(|&t| {
4829 (2.0 * std::f64::consts::PI * t / 2.0).sin()
4830 + 0.5 * (2.0 * std::f64::consts::PI * t / 3.0).sin()
4831 })
4832 .collect();
4833
4834 let result = detect_peaks(&data, 1, m, &argvals, Some(1.0), None, false, None);
4835
4836 assert!(
4838 result.peaks[0].len() >= 4,
4839 "Should find at least 4 peaks, got {}",
4840 result.peaks[0].len()
4841 );
4842
4843 let distances = &result.inter_peak_distances[0];
4845 if distances.len() >= 2 {
4846 let min_dist = distances.iter().cloned().fold(f64::INFINITY, f64::min);
4847 let max_dist = distances.iter().cloned().fold(0.0, f64::max);
4848 assert!(
4849 max_dist > min_dist * 1.1,
4850 "Distances should vary: min={:.3}, max={:.3}",
4851 min_dist,
4852 max_dist
4853 );
4854 }
4855 }
4856
4857 #[test]
4858 fn test_seasonal_strength() {
4859 let m = 200;
4860 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
4861 let period = 2.0;
4862 let data = generate_sine(1, m, period, &argvals);
4863
4864 let strength = seasonal_strength_variance(&data, 1, m, &argvals, period, 3);
4865 assert!(strength > 0.8);
4867
4868 let strength_spectral = seasonal_strength_spectral(&data, 1, m, &argvals, period);
4869 assert!(strength_spectral > 0.5);
4870 }
4871
4872 #[test]
4873 fn test_instantaneous_period() {
4874 let m = 200;
4875 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
4876 let period = 2.0;
4877 let data = generate_sine(1, m, period, &argvals);
4878
4879 let result = instantaneous_period(&data, 1, m, &argvals);
4880
4881 let mid_period = result.period[m / 2];
4883 assert!(
4884 (mid_period - period).abs() < 0.5,
4885 "Expected period ~{}, got {}",
4886 period,
4887 mid_period
4888 );
4889 }
4890
4891 #[test]
4892 fn test_peak_timing_analysis() {
4893 let m = 500;
4895 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.02).collect();
4896 let period = 2.0;
4897 let data = generate_sine(1, m, period, &argvals);
4898
4899 let result = analyze_peak_timing(&data, 1, m, &argvals, period, Some(11));
4900
4901 assert!(!result.peak_times.is_empty());
4903 assert!(result.mean_timing.is_finite());
4905 assert!(result.std_timing < 0.1 || result.std_timing.is_nan());
4907 }
4908
4909 #[test]
4910 fn test_seasonality_classification() {
4911 let m = 400;
4912 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.05).collect();
4913 let period = 2.0;
4914 let data = generate_sine(1, m, period, &argvals);
4915
4916 let result = classify_seasonality(&data, 1, m, &argvals, period, None, None);
4917
4918 assert!(result.is_seasonal);
4919 assert!(result.seasonal_strength > 0.5);
4920 assert!(matches!(
4921 result.classification,
4922 SeasonalType::StableSeasonal | SeasonalType::VariableTiming
4923 ));
4924 }
4925
4926 #[test]
4927 fn test_otsu_threshold() {
4928 let values = vec![
4930 0.1, 0.12, 0.15, 0.18, 0.11, 0.14, 0.7, 0.75, 0.8, 0.85, 0.9, 0.72,
4931 ];
4932
4933 let threshold = otsu_threshold(&values);
4934
4935 assert!(threshold >= 0.1, "Threshold {} should be >= 0.1", threshold);
4939 assert!(threshold <= 0.9, "Threshold {} should be <= 0.9", threshold);
4940 }
4941
4942 #[test]
4943 fn test_gcv_fourier_nbasis_selection() {
4944 let m = 100;
4945 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
4946
4947 let mut data = vec![0.0; m];
4949 for j in 0..m {
4950 data[j] = (2.0 * PI * argvals[j] / 2.0).sin() + 0.1 * (j as f64 * 0.3).sin();
4951 }
4952
4953 let nbasis = crate::basis::select_fourier_nbasis_gcv(&data, 1, m, &argvals, 5, 25);
4954
4955 assert!(nbasis >= 5);
4957 assert!(nbasis <= 25);
4958 }
4959
4960 #[test]
4961 fn test_detect_multiple_periods() {
4962 let m = 400;
4963 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.05).collect(); let period1 = 2.0;
4967 let period2 = 7.0;
4968 let mut data = vec![0.0; m];
4969 for j in 0..m {
4970 data[j] = (2.0 * PI * argvals[j] / period1).sin()
4971 + 0.6 * (2.0 * PI * argvals[j] / period2).sin();
4972 }
4973
4974 let detected = detect_multiple_periods(&data, 1, m, &argvals, 5, 0.4, 0.20);
4976
4977 assert!(
4979 detected.len() >= 2,
4980 "Expected at least 2 periods, found {}",
4981 detected.len()
4982 );
4983
4984 let periods: Vec<f64> = detected.iter().map(|d| d.period).collect();
4986 let has_period1 = periods.iter().any(|&p| (p - period1).abs() < 0.3);
4987 let has_period2 = periods.iter().any(|&p| (p - period2).abs() < 0.5);
4988
4989 assert!(
4990 has_period1,
4991 "Expected to find period ~{}, got {:?}",
4992 period1, periods
4993 );
4994 assert!(
4995 has_period2,
4996 "Expected to find period ~{}, got {:?}",
4997 period2, periods
4998 );
4999
5000 assert!(
5002 detected[0].amplitude > detected[1].amplitude,
5003 "First detected should have higher amplitude"
5004 );
5005
5006 for d in &detected {
5008 assert!(
5009 d.strength > 0.0,
5010 "Detected period should have positive strength"
5011 );
5012 assert!(
5013 d.confidence > 0.0,
5014 "Detected period should have positive confidence"
5015 );
5016 assert!(
5017 d.amplitude > 0.0,
5018 "Detected period should have positive amplitude"
5019 );
5020 }
5021 }
5022
5023 #[test]
5028 fn test_amplitude_modulation_stable() {
5029 let m = 200;
5031 let period = 0.2;
5032 let argvals: Vec<f64> = (0..m).map(|i| i as f64 / (m - 1) as f64).collect();
5033
5034 let data: Vec<f64> = argvals
5036 .iter()
5037 .map(|&t| (2.0 * PI * t / period).sin())
5038 .collect();
5039
5040 let result = detect_amplitude_modulation(
5041 &data, 1, m, &argvals, period, 0.15, 0.3, );
5044
5045 eprintln!(
5046 "Stable test: is_seasonal={}, has_modulation={}, modulation_score={:.4}, amplitude_trend={:.4}, type={:?}",
5047 result.is_seasonal, result.has_modulation, result.modulation_score, result.amplitude_trend, result.modulation_type
5048 );
5049
5050 assert!(result.is_seasonal, "Should detect seasonality");
5051 assert!(
5052 !result.has_modulation,
5053 "Constant amplitude should not have modulation, got score={:.4}",
5054 result.modulation_score
5055 );
5056 assert_eq!(
5057 result.modulation_type,
5058 ModulationType::Stable,
5059 "Should be classified as Stable"
5060 );
5061 }
5062
5063 #[test]
5064 fn test_amplitude_modulation_emerging() {
5065 let m = 200;
5067 let period = 0.2;
5068 let argvals: Vec<f64> = (0..m).map(|i| i as f64 / (m - 1) as f64).collect();
5069
5070 let data: Vec<f64> = argvals
5072 .iter()
5073 .map(|&t| {
5074 let amplitude = 0.2 + 0.8 * t; amplitude * (2.0 * PI * t / period).sin()
5076 })
5077 .collect();
5078
5079 let result = detect_amplitude_modulation(&data, 1, m, &argvals, period, 0.15, 0.2);
5080
5081 eprintln!(
5082 "Emerging test: is_seasonal={}, has_modulation={}, modulation_score={:.4}, amplitude_trend={:.4}, type={:?}",
5083 result.is_seasonal, result.has_modulation, result.modulation_score, result.amplitude_trend, result.modulation_type
5084 );
5085
5086 assert!(result.is_seasonal, "Should detect seasonality");
5087 assert!(
5088 result.has_modulation,
5089 "Growing amplitude should have modulation, score={:.4}",
5090 result.modulation_score
5091 );
5092 assert_eq!(
5093 result.modulation_type,
5094 ModulationType::Emerging,
5095 "Should be classified as Emerging, trend={:.4}",
5096 result.amplitude_trend
5097 );
5098 assert!(
5099 result.amplitude_trend > 0.0,
5100 "Trend should be positive for emerging"
5101 );
5102 }
5103
5104 #[test]
5105 fn test_amplitude_modulation_fading() {
5106 let m = 200;
5108 let period = 0.2;
5109 let argvals: Vec<f64> = (0..m).map(|i| i as f64 / (m - 1) as f64).collect();
5110
5111 let data: Vec<f64> = argvals
5113 .iter()
5114 .map(|&t| {
5115 let amplitude = 1.0 - 0.8 * t; amplitude * (2.0 * PI * t / period).sin()
5117 })
5118 .collect();
5119
5120 let result = detect_amplitude_modulation(&data, 1, m, &argvals, period, 0.15, 0.2);
5121
5122 eprintln!(
5123 "Fading test: is_seasonal={}, has_modulation={}, modulation_score={:.4}, amplitude_trend={:.4}, type={:?}",
5124 result.is_seasonal, result.has_modulation, result.modulation_score, result.amplitude_trend, result.modulation_type
5125 );
5126
5127 assert!(result.is_seasonal, "Should detect seasonality");
5128 assert!(
5129 result.has_modulation,
5130 "Fading amplitude should have modulation"
5131 );
5132 assert_eq!(
5133 result.modulation_type,
5134 ModulationType::Fading,
5135 "Should be classified as Fading, trend={:.4}",
5136 result.amplitude_trend
5137 );
5138 assert!(
5139 result.amplitude_trend < 0.0,
5140 "Trend should be negative for fading"
5141 );
5142 }
5143
5144 #[test]
5145 fn test_amplitude_modulation_oscillating() {
5146 let m = 200;
5148 let period = 0.1;
5149 let argvals: Vec<f64> = (0..m).map(|i| i as f64 / (m - 1) as f64).collect();
5150
5151 let data: Vec<f64> = argvals
5153 .iter()
5154 .map(|&t| {
5155 let amplitude = 0.5 + 0.4 * (2.0 * PI * t * 2.0).sin(); amplitude * (2.0 * PI * t / period).sin()
5157 })
5158 .collect();
5159
5160 let result = detect_amplitude_modulation(&data, 1, m, &argvals, period, 0.15, 0.2);
5161
5162 eprintln!(
5163 "Oscillating test: is_seasonal={}, has_modulation={}, modulation_score={:.4}, amplitude_trend={:.4}, type={:?}",
5164 result.is_seasonal, result.has_modulation, result.modulation_score, result.amplitude_trend, result.modulation_type
5165 );
5166
5167 assert!(result.is_seasonal, "Should detect seasonality");
5168 if result.has_modulation {
5170 assert!(
5172 result.amplitude_trend.abs() < 0.5,
5173 "Trend should be small for oscillating"
5174 );
5175 }
5176 }
5177
5178 #[test]
5179 fn test_amplitude_modulation_non_seasonal() {
5180 let m = 100;
5182 let argvals: Vec<f64> = (0..m).map(|i| i as f64 / (m - 1) as f64).collect();
5183
5184 let data: Vec<f64> = (0..m)
5186 .map(|i| ((i as f64 * 1.618).sin() * 100.0).fract())
5187 .collect();
5188
5189 let result = detect_amplitude_modulation(
5190 &data, 1, m, &argvals, 0.2, 0.15, 0.3,
5192 );
5193
5194 assert!(
5195 !result.is_seasonal,
5196 "Noise should not be detected as seasonal"
5197 );
5198 assert_eq!(
5199 result.modulation_type,
5200 ModulationType::NonSeasonal,
5201 "Should be classified as NonSeasonal"
5202 );
5203 }
5204
5205 #[test]
5210 fn test_wavelet_amplitude_modulation_stable() {
5211 let m = 200;
5212 let period = 0.2;
5213 let argvals: Vec<f64> = (0..m).map(|i| i as f64 / (m - 1) as f64).collect();
5214
5215 let data: Vec<f64> = argvals
5216 .iter()
5217 .map(|&t| (2.0 * PI * t / period).sin())
5218 .collect();
5219
5220 let result = detect_amplitude_modulation_wavelet(&data, 1, m, &argvals, period, 0.15, 0.3);
5221
5222 eprintln!(
5223 "Wavelet stable: is_seasonal={}, has_modulation={}, modulation_score={:.4}, amplitude_trend={:.4}, type={:?}",
5224 result.is_seasonal, result.has_modulation, result.modulation_score, result.amplitude_trend, result.modulation_type
5225 );
5226
5227 assert!(result.is_seasonal, "Should detect seasonality");
5228 assert!(
5229 !result.has_modulation,
5230 "Constant amplitude should not have modulation, got score={:.4}",
5231 result.modulation_score
5232 );
5233 }
5234
5235 #[test]
5236 fn test_wavelet_amplitude_modulation_emerging() {
5237 let m = 200;
5238 let period = 0.2;
5239 let argvals: Vec<f64> = (0..m).map(|i| i as f64 / (m - 1) as f64).collect();
5240
5241 let data: Vec<f64> = argvals
5243 .iter()
5244 .map(|&t| {
5245 let amplitude = 0.2 + 0.8 * t;
5246 amplitude * (2.0 * PI * t / period).sin()
5247 })
5248 .collect();
5249
5250 let result = detect_amplitude_modulation_wavelet(&data, 1, m, &argvals, period, 0.15, 0.2);
5251
5252 eprintln!(
5253 "Wavelet emerging: is_seasonal={}, has_modulation={}, modulation_score={:.4}, amplitude_trend={:.4}, type={:?}",
5254 result.is_seasonal, result.has_modulation, result.modulation_score, result.amplitude_trend, result.modulation_type
5255 );
5256
5257 assert!(result.is_seasonal, "Should detect seasonality");
5258 assert!(
5259 result.has_modulation,
5260 "Growing amplitude should have modulation"
5261 );
5262 assert!(
5263 result.amplitude_trend > 0.0,
5264 "Trend should be positive for emerging"
5265 );
5266 }
5267
5268 #[test]
5269 fn test_wavelet_amplitude_modulation_fading() {
5270 let m = 200;
5271 let period = 0.2;
5272 let argvals: Vec<f64> = (0..m).map(|i| i as f64 / (m - 1) as f64).collect();
5273
5274 let data: Vec<f64> = argvals
5276 .iter()
5277 .map(|&t| {
5278 let amplitude = 1.0 - 0.8 * t;
5279 amplitude * (2.0 * PI * t / period).sin()
5280 })
5281 .collect();
5282
5283 let result = detect_amplitude_modulation_wavelet(&data, 1, m, &argvals, period, 0.15, 0.2);
5284
5285 eprintln!(
5286 "Wavelet fading: is_seasonal={}, has_modulation={}, modulation_score={:.4}, amplitude_trend={:.4}, type={:?}",
5287 result.is_seasonal, result.has_modulation, result.modulation_score, result.amplitude_trend, result.modulation_type
5288 );
5289
5290 assert!(result.is_seasonal, "Should detect seasonality");
5291 assert!(
5292 result.has_modulation,
5293 "Fading amplitude should have modulation"
5294 );
5295 assert!(
5296 result.amplitude_trend < 0.0,
5297 "Trend should be negative for fading"
5298 );
5299 }
5300
5301 #[test]
5302 fn test_seasonal_strength_wavelet() {
5303 let m = 200;
5304 let period = 0.2;
5305 let argvals: Vec<f64> = (0..m).map(|i| i as f64 / (m - 1) as f64).collect();
5306
5307 let seasonal_data: Vec<f64> = argvals
5309 .iter()
5310 .map(|&t| (2.0 * PI * t / period).sin())
5311 .collect();
5312
5313 let strength = seasonal_strength_wavelet(&seasonal_data, 1, m, &argvals, period);
5314 eprintln!("Wavelet strength (pure sine): {:.4}", strength);
5315 assert!(
5316 strength > 0.5,
5317 "Pure sine should have high wavelet strength"
5318 );
5319
5320 let noise_data: Vec<f64> = (0..m)
5322 .map(|i| ((i * 12345 + 67890) % 1000) as f64 / 1000.0 - 0.5)
5323 .collect();
5324
5325 let noise_strength = seasonal_strength_wavelet(&noise_data, 1, m, &argvals, period);
5326 eprintln!("Wavelet strength (noise): {:.4}", noise_strength);
5327 assert!(
5328 noise_strength < 0.3,
5329 "Noise should have low wavelet strength"
5330 );
5331
5332 let wrong_period_strength =
5334 seasonal_strength_wavelet(&seasonal_data, 1, m, &argvals, period * 2.0);
5335 eprintln!(
5336 "Wavelet strength (wrong period): {:.4}",
5337 wrong_period_strength
5338 );
5339 assert!(
5340 wrong_period_strength < strength,
5341 "Wrong period should have lower strength"
5342 );
5343 }
5344
5345 #[test]
5346 fn test_compute_mean_curve() {
5347 let data = vec![1.0, 2.0, 2.0, 4.0, 3.0, 6.0]; let mean = compute_mean_curve(&data, 2, 3);
5353 assert_eq!(mean.len(), 3);
5354 assert!((mean[0] - 1.5).abs() < 1e-10);
5355 assert!((mean[1] - 3.0).abs() < 1e-10);
5356 assert!((mean[2] - 4.5).abs() < 1e-10);
5357 }
5358
5359 #[test]
5360 fn test_compute_mean_curve_parallel_consistency() {
5361 let n = 10;
5363 let m = 200;
5364 let data: Vec<f64> = (0..n * m).map(|i| (i as f64 * 0.1).sin()).collect();
5365
5366 let seq_result = compute_mean_curve_impl(&data, n, m, false);
5367 let par_result = compute_mean_curve_impl(&data, n, m, true);
5368
5369 assert_eq!(seq_result.len(), par_result.len());
5370 for (s, p) in seq_result.iter().zip(par_result.iter()) {
5371 assert!(
5372 (s - p).abs() < 1e-10,
5373 "Sequential and parallel results differ"
5374 );
5375 }
5376 }
5377
5378 #[test]
5379 fn test_interior_bounds() {
5380 let bounds = interior_bounds(100);
5382 assert!(bounds.is_some());
5383 let (start, end) = bounds.unwrap();
5384 assert_eq!(start, 10);
5385 assert_eq!(end, 90);
5386
5387 let bounds = interior_bounds(10);
5389 assert!(bounds.is_some());
5390 let (start, end) = bounds.unwrap();
5391 assert!(start < end);
5392
5393 let bounds = interior_bounds(2);
5395 assert!(bounds.is_some() || bounds.is_none());
5397 }
5398
5399 #[test]
5400 fn test_hilbert_transform_pure_sine() {
5401 let m = 200;
5403 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
5404 let signal: Vec<f64> = argvals.iter().map(|&t| (2.0 * PI * t).sin()).collect();
5405
5406 let analytic = hilbert_transform(&signal);
5407 assert_eq!(analytic.len(), m);
5408
5409 for c in analytic.iter().skip(10).take(m - 20) {
5411 let amp = c.norm();
5412 assert!(
5413 (amp - 1.0).abs() < 0.1,
5414 "Amplitude should be ~1, got {}",
5415 amp
5416 );
5417 }
5418 }
5419
5420 #[test]
5421 fn test_sazed_pure_sine() {
5422 let m = 200;
5424 let period = 2.0;
5425 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
5426 let data: Vec<f64> = argvals
5427 .iter()
5428 .map(|&t| (2.0 * PI * t / period).sin())
5429 .collect();
5430
5431 let result = sazed(&data, &argvals, None);
5432
5433 assert!(result.period.is_finite(), "SAZED should detect a period");
5434 assert!(
5435 (result.period - period).abs() < 0.3,
5436 "Expected period ~{}, got {}",
5437 period,
5438 result.period
5439 );
5440 assert!(
5441 result.confidence > 0.4,
5442 "Expected confidence > 0.4, got {}",
5443 result.confidence
5444 );
5445 assert!(
5446 result.agreeing_components >= 2,
5447 "Expected at least 2 agreeing components, got {}",
5448 result.agreeing_components
5449 );
5450 }
5451
5452 #[test]
5453 fn test_sazed_noisy_sine() {
5454 let m = 300;
5456 let period = 3.0;
5457 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
5458
5459 let data: Vec<f64> = argvals
5461 .iter()
5462 .enumerate()
5463 .map(|(i, &t)| {
5464 let signal = (2.0 * PI * t / period).sin();
5465 let noise = 0.1 * (17.3 * i as f64).sin();
5466 signal + noise
5467 })
5468 .collect();
5469
5470 let result = sazed(&data, &argvals, Some(0.15));
5471
5472 assert!(
5473 result.period.is_finite(),
5474 "SAZED should detect a period even with noise"
5475 );
5476 assert!(
5477 (result.period - period).abs() < 0.5,
5478 "Expected period ~{}, got {}",
5479 period,
5480 result.period
5481 );
5482 }
5483
5484 #[test]
5485 fn test_sazed_fdata() {
5486 let n = 5;
5488 let m = 200;
5489 let period = 2.0;
5490 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
5491 let data = generate_sine(n, m, period, &argvals);
5492
5493 let result = sazed_fdata(&data, n, m, &argvals, None);
5494
5495 assert!(result.period.is_finite(), "SAZED should detect a period");
5496 assert!(
5497 (result.period - period).abs() < 0.3,
5498 "Expected period ~{}, got {}",
5499 period,
5500 result.period
5501 );
5502 }
5503
5504 #[test]
5505 fn test_sazed_short_series() {
5506 let argvals: Vec<f64> = (0..5).map(|i| i as f64).collect();
5508 let data: Vec<f64> = argvals.iter().map(|&t| t.sin()).collect();
5509
5510 let result = sazed(&data, &argvals, None);
5511
5512 assert!(
5514 result.period.is_nan() || result.period.is_finite(),
5515 "Should return NaN or valid period"
5516 );
5517 }
5518
5519 #[test]
5520 fn test_autoperiod_pure_sine() {
5521 let m = 200;
5523 let period = 2.0;
5524 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
5525 let data: Vec<f64> = argvals
5526 .iter()
5527 .map(|&t| (2.0 * PI * t / period).sin())
5528 .collect();
5529
5530 let result = autoperiod(&data, &argvals, None, None);
5531
5532 assert!(
5533 result.period.is_finite(),
5534 "Autoperiod should detect a period"
5535 );
5536 assert!(
5537 (result.period - period).abs() < 0.3,
5538 "Expected period ~{}, got {}",
5539 period,
5540 result.period
5541 );
5542 assert!(
5543 result.confidence > 0.0,
5544 "Expected positive confidence, got {}",
5545 result.confidence
5546 );
5547 }
5548
5549 #[test]
5550 fn test_autoperiod_with_trend() {
5551 let m = 300;
5553 let period = 3.0;
5554 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
5555 let data: Vec<f64> = argvals
5556 .iter()
5557 .map(|&t| 0.2 * t + (2.0 * PI * t / period).sin())
5558 .collect();
5559
5560 let result = autoperiod(&data, &argvals, None, None);
5561
5562 assert!(
5563 result.period.is_finite(),
5564 "Autoperiod should detect a period"
5565 );
5566 assert!(
5568 (result.period - period).abs() < 0.5,
5569 "Expected period ~{}, got {}",
5570 period,
5571 result.period
5572 );
5573 }
5574
5575 #[test]
5576 fn test_autoperiod_candidates() {
5577 let m = 200;
5579 let period = 2.0;
5580 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
5581 let data: Vec<f64> = argvals
5582 .iter()
5583 .map(|&t| (2.0 * PI * t / period).sin())
5584 .collect();
5585
5586 let result = autoperiod(&data, &argvals, Some(5), Some(10));
5587
5588 assert!(
5589 !result.candidates.is_empty(),
5590 "Should have at least one candidate"
5591 );
5592
5593 let max_score = result
5595 .candidates
5596 .iter()
5597 .map(|c| c.combined_score)
5598 .fold(f64::NEG_INFINITY, f64::max);
5599 assert!(
5600 (result.confidence - max_score).abs() < 1e-10,
5601 "Returned confidence should match best candidate's score"
5602 );
5603 }
5604
5605 #[test]
5606 fn test_autoperiod_fdata() {
5607 let n = 5;
5609 let m = 200;
5610 let period = 2.0;
5611 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
5612 let data = generate_sine(n, m, period, &argvals);
5613
5614 let result = autoperiod_fdata(&data, n, m, &argvals, None, None);
5615
5616 assert!(
5617 result.period.is_finite(),
5618 "Autoperiod should detect a period"
5619 );
5620 assert!(
5621 (result.period - period).abs() < 0.3,
5622 "Expected period ~{}, got {}",
5623 period,
5624 result.period
5625 );
5626 }
5627
5628 #[test]
5629 fn test_cfd_autoperiod_pure_sine() {
5630 let m = 200;
5632 let period = 2.0;
5633 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
5634 let data: Vec<f64> = argvals
5635 .iter()
5636 .map(|&t| (2.0 * PI * t / period).sin())
5637 .collect();
5638
5639 let result = cfd_autoperiod(&data, &argvals, None, None);
5640
5641 assert!(
5642 result.period.is_finite(),
5643 "CFDAutoperiod should detect a period"
5644 );
5645 assert!(
5646 (result.period - period).abs() < 0.3,
5647 "Expected period ~{}, got {}",
5648 period,
5649 result.period
5650 );
5651 }
5652
5653 #[test]
5654 fn test_cfd_autoperiod_with_trend() {
5655 let m = 300;
5657 let period = 3.0;
5658 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
5659 let data: Vec<f64> = argvals
5660 .iter()
5661 .map(|&t| 2.0 * t + (2.0 * PI * t / period).sin())
5662 .collect();
5663
5664 let result = cfd_autoperiod(&data, &argvals, None, None);
5665
5666 assert!(
5667 result.period.is_finite(),
5668 "CFDAutoperiod should detect a period despite trend"
5669 );
5670 assert!(
5672 (result.period - period).abs() < 0.6,
5673 "Expected period ~{}, got {}",
5674 period,
5675 result.period
5676 );
5677 }
5678
5679 #[test]
5680 fn test_cfd_autoperiod_multiple_periods() {
5681 let m = 400;
5683 let period1 = 2.0;
5684 let period2 = 5.0;
5685 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
5686 let data: Vec<f64> = argvals
5687 .iter()
5688 .map(|&t| (2.0 * PI * t / period1).sin() + 0.5 * (2.0 * PI * t / period2).sin())
5689 .collect();
5690
5691 let result = cfd_autoperiod(&data, &argvals, None, None);
5692
5693 assert!(
5694 !result.periods.is_empty(),
5695 "Should detect at least one period"
5696 );
5697 let close_to_p1 = (result.period - period1).abs() < 0.5;
5699 let close_to_p2 = (result.period - period2).abs() < 1.0;
5700 assert!(
5701 close_to_p1 || close_to_p2,
5702 "Primary period {} not close to {} or {}",
5703 result.period,
5704 period1,
5705 period2
5706 );
5707 }
5708
5709 #[test]
5710 fn test_cfd_autoperiod_fdata() {
5711 let n = 5;
5713 let m = 200;
5714 let period = 2.0;
5715 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
5716 let data = generate_sine(n, m, period, &argvals);
5717
5718 let result = cfd_autoperiod_fdata(&data, n, m, &argvals, None, None);
5719
5720 assert!(
5721 result.period.is_finite(),
5722 "CFDAutoperiod should detect a period"
5723 );
5724 assert!(
5725 (result.period - period).abs() < 0.3,
5726 "Expected period ~{}, got {}",
5727 period,
5728 result.period
5729 );
5730 }
5731
5732 #[test]
5737 fn test_ssa_pure_sine() {
5738 let n = 200;
5739 let period = 12.0;
5740 let values: Vec<f64> = (0..n)
5741 .map(|i| {
5742 let t = i as f64;
5743 0.01 * t + (2.0 * PI * t / period).sin() + 0.05 * ((i * 7) as f64 * 0.1).sin()
5744 })
5745 .collect();
5746
5747 let result = ssa(&values, None, None, None, None);
5748
5749 for i in 0..n {
5751 let reconstructed = result.trend[i] + result.seasonal[i] + result.noise[i];
5752 assert!(
5753 (reconstructed - values[i]).abs() < 1e-8,
5754 "SSA reconstruction error at {}: {} vs {}",
5755 i,
5756 reconstructed,
5757 values[i]
5758 );
5759 }
5760
5761 for i in 1..result.singular_values.len() {
5763 assert!(
5764 result.singular_values[i] <= result.singular_values[i - 1] + 1e-10,
5765 "Singular values should be descending: {} > {}",
5766 result.singular_values[i],
5767 result.singular_values[i - 1]
5768 );
5769 }
5770
5771 let total_contrib: f64 = result.contributions.iter().sum();
5773 assert!(
5774 total_contrib <= 1.0 + 1e-10,
5775 "Contributions should sum to <= 1, got {}",
5776 total_contrib
5777 );
5778 }
5779
5780 #[test]
5781 fn test_ssa_explicit_groupings() {
5782 let n = 100;
5783 let period = 10.0;
5784 let values: Vec<f64> = (0..n)
5785 .map(|i| 0.01 * i as f64 + (2.0 * PI * i as f64 / period).sin())
5786 .collect();
5787
5788 let trend_components = [0usize];
5789 let seasonal_components = [1usize, 2];
5790
5791 let result = ssa(
5792 &values,
5793 None,
5794 None,
5795 Some(&trend_components),
5796 Some(&seasonal_components),
5797 );
5798
5799 assert_eq!(result.trend.len(), n);
5800 assert_eq!(result.seasonal.len(), n);
5801 assert_eq!(result.noise.len(), n);
5802
5803 for i in 0..n {
5805 let reconstructed = result.trend[i] + result.seasonal[i] + result.noise[i];
5806 assert!(
5807 (reconstructed - values[i]).abs() < 1e-8,
5808 "SSA explicit grouping reconstruction error at {}",
5809 i
5810 );
5811 }
5812 }
5813
5814 #[test]
5815 fn test_ssa_short_series() {
5816 let values = vec![1.0, 2.0, 3.0];
5818 let result = ssa(&values, None, None, None, None);
5819
5820 assert_eq!(result.trend, values);
5821 assert_eq!(result.seasonal, vec![0.0; 3]);
5822 assert_eq!(result.noise, vec![0.0; 3]);
5823 assert_eq!(result.n_components, 0);
5824 }
5825
5826 #[test]
5827 fn test_ssa_fdata() {
5828 let n = 5;
5829 let m = 100;
5830 let mut data = vec![0.0; n * m];
5831 for i in 0..n {
5832 let amp = (i + 1) as f64;
5833 for j in 0..m {
5834 data[i + j * n] = amp * (2.0 * PI * j as f64 / 12.0).sin() + 0.01 * j as f64;
5835 }
5836 }
5837
5838 let result = ssa_fdata(&data, n, m, None, None);
5839
5840 assert_eq!(result.trend.len(), m);
5841 assert_eq!(result.seasonal.len(), m);
5842 assert_eq!(result.noise.len(), m);
5843 assert!(!result.singular_values.is_empty());
5844 }
5845
5846 #[test]
5847 fn test_ssa_seasonality() {
5848 let n = 200;
5850 let period = 12.0;
5851 let seasonal_values: Vec<f64> = (0..n)
5852 .map(|i| (2.0 * PI * i as f64 / period).sin())
5853 .collect();
5854
5855 let (is_seasonal, _det_period, confidence) =
5856 ssa_seasonality(&seasonal_values, None, Some(0.05));
5857
5858 assert!(confidence >= 0.0, "Confidence should be non-negative");
5861
5862 let noise_values: Vec<f64> = (0..n)
5864 .map(|i| ((i * 13 + 7) as f64 * 0.1).sin() * 0.01)
5865 .collect();
5866
5867 let (is_noise_seasonal, _, noise_conf) = ssa_seasonality(&noise_values, None, Some(0.5));
5868
5869 let _ = (is_seasonal, is_noise_seasonal, noise_conf);
5872 }
5873
5874 #[test]
5879 fn test_matrix_profile_periodic() {
5880 let period = 20;
5881 let n = period * 10;
5882 let values: Vec<f64> = (0..n)
5883 .map(|i| (2.0 * PI * i as f64 / period as f64).sin())
5884 .collect();
5885
5886 let result = matrix_profile(&values, Some(15), None);
5887
5888 assert_eq!(result.profile.len(), n - 15 + 1);
5889 assert_eq!(result.profile_index.len(), n - 15 + 1);
5890 assert_eq!(result.subsequence_length, 15);
5891
5892 for &p in &result.profile {
5894 assert!(p.is_finite(), "Profile values should be finite");
5895 }
5896
5897 assert!(
5899 (result.primary_period - period as f64).abs() < 5.0,
5900 "Expected primary_period ≈ {}, got {}",
5901 period,
5902 result.primary_period
5903 );
5904 }
5905
5906 #[test]
5907 fn test_matrix_profile_non_periodic() {
5908 let n = 200;
5910 let values: Vec<f64> = (0..n).map(|i| i as f64 * 0.1).collect();
5911
5912 let result = matrix_profile(&values, Some(10), None);
5913
5914 assert_eq!(result.profile.len(), n - 10 + 1);
5915
5916 assert!(result.confidence <= 1.0, "Confidence should be <= 1.0");
5919 }
5920
5921 #[test]
5922 fn test_matrix_profile_fdata() {
5923 let n = 3;
5924 let m = 200;
5925 let period = 20.0;
5926 let mut data = vec![0.0; n * m];
5927 for i in 0..n {
5928 let amp = (i + 1) as f64;
5929 for j in 0..m {
5930 data[i + j * n] = amp * (2.0 * PI * j as f64 / period).sin();
5931 }
5932 }
5933
5934 let result = matrix_profile_fdata(&data, n, m, Some(15), None);
5935
5936 assert!(!result.profile.is_empty());
5937 assert!(result.profile_index.len() == result.profile.len());
5938 }
5939
5940 #[test]
5941 fn test_matrix_profile_seasonality() {
5942 let period = 20;
5943 let n = period * 10;
5944 let values: Vec<f64> = (0..n)
5946 .map(|i| (2.0 * PI * i as f64 / period as f64).sin())
5947 .collect();
5948
5949 let (is_seasonal, det_period, confidence) =
5950 matrix_profile_seasonality(&values, Some(15), Some(0.05));
5951
5952 assert!(
5953 is_seasonal,
5954 "Periodic signal should be detected as seasonal"
5955 );
5956 assert!(det_period > 0.0, "Detected period should be positive");
5957 assert!(confidence >= 0.05, "Confidence should be above threshold");
5958
5959 let weak_values: Vec<f64> = (0..n).map(|i| i as f64 * 0.001).collect();
5961 let (is_weak_seasonal, _, _) =
5962 matrix_profile_seasonality(&weak_values, Some(15), Some(0.5));
5963 let _ = is_weak_seasonal; }
5965
5966 #[test]
5971 fn test_lomb_scargle_regular() {
5972 let n = 200;
5973 let true_period = 5.0;
5974 let times: Vec<f64> = (0..n).map(|i| i as f64 * 0.1).collect();
5975 let values: Vec<f64> = times
5976 .iter()
5977 .map(|&t| (2.0 * PI * t / true_period).sin())
5978 .collect();
5979
5980 let result = lomb_scargle(×, &values, None, None, None);
5981
5982 assert!(
5983 (result.peak_period - true_period).abs() < 0.5,
5984 "Expected peak_period ≈ {}, got {}",
5985 true_period,
5986 result.peak_period
5987 );
5988 assert!(
5989 result.false_alarm_probability < 0.05,
5990 "FAP should be low for strong signal: {}",
5991 result.false_alarm_probability
5992 );
5993 assert!(result.peak_power > 0.0, "Peak power should be positive");
5994 assert!(!result.frequencies.is_empty());
5995 assert_eq!(result.frequencies.len(), result.power.len());
5996 assert_eq!(result.frequencies.len(), result.periods.len());
5997 }
5998
5999 #[test]
6000 fn test_lomb_scargle_irregular() {
6001 let true_period = 5.0;
6002 let all_times: Vec<f64> = (0..300).map(|i| i as f64 * 0.1).collect();
6004 let times: Vec<f64> = all_times
6006 .iter()
6007 .enumerate()
6008 .filter(|(i, _)| i % 2 == 0 || i % 3 == 0)
6009 .map(|(_, &t)| t)
6010 .collect();
6011 let values: Vec<f64> = times
6012 .iter()
6013 .map(|&t| (2.0 * PI * t / true_period).sin())
6014 .collect();
6015
6016 let result = lomb_scargle(×, &values, None, None, None);
6017
6018 assert!(
6019 (result.peak_period - true_period).abs() < 1.0,
6020 "Irregular LS: expected period ≈ {}, got {}",
6021 true_period,
6022 result.peak_period
6023 );
6024 }
6025
6026 #[test]
6027 fn test_lomb_scargle_custom_frequencies() {
6028 let n = 100;
6029 let true_period = 5.0;
6030 let times: Vec<f64> = (0..n).map(|i| i as f64 * 0.1).collect();
6031 let values: Vec<f64> = times
6032 .iter()
6033 .map(|&t| (2.0 * PI * t / true_period).sin())
6034 .collect();
6035
6036 let frequencies: Vec<f64> = (1..50).map(|i| i as f64 * 0.01).collect();
6038 let result = lomb_scargle(×, &values, Some(&frequencies), None, None);
6039
6040 assert_eq!(result.frequencies.len(), frequencies.len());
6041 assert_eq!(result.power.len(), frequencies.len());
6042 assert!(result.peak_power > 0.0);
6043 }
6044
6045 #[test]
6046 fn test_lomb_scargle_fdata() {
6047 let n = 5;
6048 let m = 200;
6049 let period = 5.0;
6050 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
6051 let data = generate_sine(n, m, period, &argvals);
6052
6053 let result = lomb_scargle_fdata(&data, n, m, &argvals, None, None);
6054
6055 assert!(
6056 (result.peak_period - period).abs() < 0.5,
6057 "Fdata LS: expected period ≈ {}, got {}",
6058 period,
6059 result.peak_period
6060 );
6061 assert!(!result.frequencies.is_empty());
6062 }
6063
6064 #[test]
6069 fn test_detect_seasonality_changes_onset() {
6070 let m = 400;
6071 let period = 2.0;
6072 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.05).collect(); let data: Vec<f64> = argvals
6076 .iter()
6077 .map(|&t| {
6078 if t < 10.0 {
6079 0.05 * ((t * 13.0).sin() + (t * 7.0).cos())
6081 } else {
6082 (2.0 * PI * t / period).sin()
6084 }
6085 })
6086 .collect();
6087
6088 let result = detect_seasonality_changes(&data, 1, m, &argvals, period, 0.3, 4.0, 2.0);
6089
6090 assert!(
6091 !result.strength_curve.is_empty(),
6092 "Strength curve should not be empty"
6093 );
6094 assert_eq!(result.strength_curve.len(), m);
6095
6096 if !result.change_points.is_empty() {
6098 let onset_points: Vec<_> = result
6099 .change_points
6100 .iter()
6101 .filter(|cp| cp.change_type == ChangeType::Onset)
6102 .collect();
6103 assert!(!onset_points.is_empty(), "Should detect Onset change point");
6105 }
6106 }
6107
6108 #[test]
6109 fn test_detect_seasonality_changes_no_change() {
6110 let m = 400;
6111 let period = 2.0;
6112 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.05).collect();
6113
6114 let data: Vec<f64> = argvals
6116 .iter()
6117 .map(|&t| (2.0 * PI * t / period).sin())
6118 .collect();
6119
6120 let result = detect_seasonality_changes(&data, 1, m, &argvals, period, 0.3, 4.0, 2.0);
6121
6122 assert!(!result.strength_curve.is_empty());
6123 let cessation_points: Vec<_> = result
6125 .change_points
6126 .iter()
6127 .filter(|cp| cp.change_type == ChangeType::Cessation)
6128 .collect();
6129 assert!(
6130 cessation_points.is_empty(),
6131 "Consistently seasonal signal should have no Cessation points, found {}",
6132 cessation_points.len()
6133 );
6134 }
6135
6136 #[test]
6141 fn test_estimate_period_acf() {
6142 let m = 200;
6143 let period = 2.0;
6144 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
6145 let data = generate_sine(1, m, period, &argvals);
6146
6147 let estimate = estimate_period_acf(&data, 1, m, &argvals, m / 2);
6148
6149 assert!(
6150 estimate.period.is_finite(),
6151 "ACF period estimate should be finite"
6152 );
6153 assert!(
6154 (estimate.period - period).abs() < 0.5,
6155 "ACF expected period ≈ {}, got {}",
6156 period,
6157 estimate.period
6158 );
6159 assert!(
6160 estimate.confidence > 0.0,
6161 "ACF confidence should be positive"
6162 );
6163 }
6164
6165 #[test]
6166 fn test_estimate_period_regression() {
6167 let m = 200;
6168 let period = 2.0;
6169 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
6170 let data = generate_sine(1, m, period, &argvals);
6171
6172 let estimate = estimate_period_regression(&data, 1, m, &argvals, 1.5, 3.0, 100, 3);
6173
6174 assert!(
6175 estimate.period.is_finite(),
6176 "Regression period estimate should be finite"
6177 );
6178 assert!(
6179 (estimate.period - period).abs() < 0.5,
6180 "Regression expected period ≈ {}, got {}",
6181 period,
6182 estimate.period
6183 );
6184 assert!(
6185 estimate.confidence > 0.0,
6186 "Regression confidence should be positive"
6187 );
6188 }
6189
6190 #[test]
6195 fn test_seasonal_strength_windowed_variance() {
6196 let m = 200;
6197 let period = 2.0;
6198 let argvals: Vec<f64> = (0..m).map(|i| i as f64 * 0.1).collect();
6199 let data = generate_sine(1, m, period, &argvals);
6200
6201 let strengths = seasonal_strength_windowed(
6202 &data,
6203 1,
6204 m,
6205 &argvals,
6206 period,
6207 4.0, StrengthMethod::Variance,
6209 );
6210
6211 assert_eq!(strengths.len(), m, "Should return m values");
6212
6213 let interior_start = m / 4;
6215 let interior_end = 3 * m / 4;
6216 for i in interior_start..interior_end {
6217 let s = strengths[i];
6218 if s.is_finite() {
6219 assert!(
6220 (-0.01..=1.01).contains(&s),
6221 "Windowed strength at {} should be near [0,1], got {}",
6222 i,
6223 s
6224 );
6225 }
6226 }
6227 }
6228}